[build/release] Refactor GH workflows. Add new Docker images (#3014)

This commit is contained in:
Salvador Girones Gil 2023-12-05 12:52:09 +01:00 committed by GitHub
parent 8e401a90c4
commit 7cd921b8eb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 1671 additions and 819 deletions

View file

@ -1,3 +1 @@
* target
!amd64/surreal
!arm64/surreal

View file

@ -46,7 +46,7 @@ jobs:
cargo install --quiet --locked critcmp cargo-make cargo install --quiet --locked critcmp cargo-make
- name: Checkout changes - name: Checkout changes
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Run benchmark - name: Run benchmark
run: | run: |
@ -115,7 +115,7 @@ jobs:
# features: "protocol-ws" # features: "protocol-ws"
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Install stable toolchain - name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable

View file

@ -1,5 +1,7 @@
name: Continuous integration name: Continuous integration
run-name: "CI run '${{ github.head_ref || github.ref_name }}'"
on: on:
workflow_dispatch: workflow_dispatch:
push: push:
@ -13,6 +15,16 @@ defaults:
shell: bash shell: bash
jobs: jobs:
# TODO: Do not run it on CI because it's very slow right now.
# Build the Docker image but don't push it.
# docker:
# uses: ./.github/workflows/reusable_docker.yml
# with:
# git-ref: ${{ github.ref }}
# tag-prefix: ${{ github.head_ref || github.ref_name }}
# build: true
# push: false
# secrets: inherit
format: format:
name: Check format name: Check format
@ -26,7 +38,7 @@ jobs:
components: rustfmt components: rustfmt
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -50,7 +62,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -79,7 +91,7 @@ jobs:
targets: wasm32-unknown-unknown targets: wasm32-unknown-unknown
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -104,7 +116,7 @@ jobs:
components: clippy components: clippy
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -132,7 +144,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -170,7 +182,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -198,7 +210,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -226,7 +238,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -277,7 +289,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -314,7 +326,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -351,7 +363,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -388,7 +400,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -412,7 +424,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -436,7 +448,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -460,7 +472,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -484,7 +496,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
@ -528,7 +540,7 @@ jobs:
toolchain: 1.71.1 toolchain: 1.71.1
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup cache - name: Setup cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2

View file

@ -1,7 +1,20 @@
name: Nightly release name: Nightly release
run-name: "Nightly release '${{ inputs.git-ref || github.ref_name }}' (publish: ${{ inputs.publish || github.event_name == 'schedule' }})"
on: on:
workflow_dispatch: workflow_dispatch:
inputs:
git-ref:
required: true
type: string
description: "The github ref of this nightly version (i.e. main, 1234567)"
default: main
publish:
required: false
type: boolean
default: false
description: "Publish the nightly release"
schedule: schedule:
- cron: '0 0 * * *' - cron: '0 0 * * *'
@ -10,207 +23,22 @@ defaults:
shell: bash shell: bash
jobs: jobs:
release:
test: name: Prepare nightly release
name: Test uses: ./.github/workflows/reusable_publish_version.yml
runs-on: ubuntu-latest-16-cores
steps:
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with: with:
toolchain: 1.71.1 name: nightly
git-ref: ${{ inputs.git-ref || github.ref_name }}
- name: Checkout sources publish: ${{ inputs.publish || github.event_name == 'schedule' }}
uses: actions/checkout@v3 secrets: inherit
- name: Setup cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: Install dependencies
run: |
sudo apt-get -y update
- name: Free up some disk space
run: |
(set -x; df -h)
# Free up some disk space by removing unused files
(set -x; sudo rm -rf /imagegeneration || true)
(set -x; sudo rm -rf /opt/az || true)
(set -x; sudo rm -rf /opt/hostedtoolcache || true)
(set -x; sudo rm -rf /opt/google || true)
(set -x; sudo rm -rf /opt/pipx || true)
(set -x; df -h)
- name: Install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov
- name: Install cargo-make
run: cargo install --debug --locked cargo-make
- name: Test workspace + coverage
run: cargo make ci-workspace-coverage
- name: Debug info
if: always()
run: |
set -x
free -m
df -h
ps auxf
cat /tmp/surrealdb.log || true
- name: Upload coverage report
uses: actions/upload-artifact@v3
with:
name: code-coverage-report
path: target/llvm-cov/html/
retention-days: 5
lint:
name: Lint
runs-on: ubuntu-latest-16-cores
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install dependencies
run: |
sudo apt-get -y update
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
targets: wasm32-unknown-unknown
components: rustfmt, clippy
- name: Install cargo-make
run: cargo install --debug --locked cargo-make
- name: Check workspace
run: cargo make ci-check
- name: Check format
run: cargo make ci-format
- name: Check wasm
run: cargo make ci-check-wasm
- name: Check clippy
run: cargo make ci-clippy
build:
name: Build ${{ matrix.arch }}
needs: [test, lint]
strategy:
fail-fast: false
matrix:
include:
- arch: x86_64-apple-darwin
os: macos-latest-xl
file: surreal-nightly.darwin-amd64
opts: --features storage-tikv,http-compression
- arch: aarch64-apple-darwin
os: macos-latest-xl
file: surreal-nightly.darwin-arm64
opts: --features storage-tikv,http-compression
- arch: x86_64-unknown-linux-gnu
os: ubuntu-latest-16-cores
file: surreal-nightly.linux-amd64
opts: --features storage-tikv,http-compression
- arch: aarch64-unknown-linux-gnu
os: ubuntu-latest-16-cores
file: surreal-nightly.linux-arm64
opts: --features storage-tikv,http-compression
- arch: x86_64-pc-windows-msvc
os: windows-latest
file: surreal-nightly.windows-amd64
opts:
runs-on: ${{ matrix.os }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Prepare environment
if: contains(matrix.arch, 'windows') && endsWith(matrix.arch, '-gnu')
run: echo "C:\msys64\usr\bin;$Env:Path" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8
- name: Install dependencies
if: contains(matrix.arch, 'windows') && endsWith(matrix.arch, '-msvc')
run: |
vcpkg integrate install
- name: Install dependencies
if: contains(matrix.arch, 'apple') && endsWith(matrix.arch, '-darwin')
run: |
brew install protobuf
- name: Install dependencies
if: contains(matrix.arch, 'linux') && endsWith(matrix.arch, '-gnu')
run: |
sudo apt-get -y update
sudo apt-get -y install musl-tools qemu-user libc6-dev-arm64-cross
sudo apt-get -y install g++-aarch64-linux-gnu gcc-aarch64-linux-gnu
- name: Install FoundationDB
if: contains(matrix.arch, 'linux') && startsWith(matrix.arch, 'x86_64')
run: |
curl -sLO https://github.com/apple/foundationdb/releases/download/6.3.23/foundationdb-clients_6.3.23-1_amd64.deb
sudo dpkg -i --force-architecture foundationdb-clients_6.3.23-1_amd64.deb
rm -rf foundationdb-clients_6.3.23-1_amd64.deb
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
targets: ${{ matrix.arch }}
- name: Output package versions
run: set -x; go version ; cargo version ; rustc --version ; cmake --version ; gcc --version ; g++ --version ; perl -v
- name: Run cargo build
run: cargo build ${{ matrix.opts }} --release --locked --target ${{ matrix.arch }}
env:
BINDGEN_EXTRA_CLANG_ARGS_aarch64-unknown-linux-gnu: "-I/usr/aarch64-linux-gnu/include/"
- name: Package binaries
if: ${{ !contains(matrix.arch, 'windows') }}
shell: bash
run: |
cd target/${{ matrix.arch }}/release
chmod +x surreal
tar -zcvf ${{ matrix.file }}.tgz surreal
echo $(shasum -a 256 ${{ matrix.file }}.tgz | cut -f1 -d' ') > ${{ matrix.file }}.txt
- name: Package binaries
if: ${{ contains(matrix.arch, 'windows') }}
shell: bash
run: |
cd target/${{ matrix.arch }}/release
cp surreal.exe ${{ matrix.file }}.exe
echo $(shasum -a 256 ${{ matrix.file }}.exe | cut -f1 -d' ') > ${{ matrix.file }}.txt
- name: Upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.file }}
path: |
target/${{ matrix.arch }}/release/surreal
target/${{ matrix.arch }}/release/${{ matrix.file }}.tgz
target/${{ matrix.arch }}/release/${{ matrix.file }}.txt
target/${{ matrix.arch }}/release/${{ matrix.file }}.exe
crate: crate:
name: Publish surrealdb-nightly to crates.io name: Publish surrealdb-nightly to crates.io
needs: [lint, test] needs: [release]
environment: crate-nightly
if: ${{ inputs.publish || github.event_name == 'schedule' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Install stable toolchain - name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
with: with:
@ -218,11 +46,12 @@ jobs:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
ref: ${{ inputs.git-ref || github.ref_name }}
- name: Publish - name: Publish
env: env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
shell: bash
run: | run: |
# Replace the crate name # Replace the crate name
# We are just going to replace the first occurance of surrealdb # We are just going to replace the first occurance of surrealdb
@ -256,191 +85,3 @@ jobs:
# Publish cargo crate # Publish cargo crate
/home/runner/.cargo/bin/release-plz release --config .config/release-nightly-plz.toml /home/runner/.cargo/bin/release-plz release --config .config/release-nightly-plz.toml
docker:
name: Build and publish Docker image
needs: [build]
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Download amd64 binary
uses: actions/download-artifact@v3
with:
name: surreal-nightly.linux-amd64
path: amd64
- name: Download arm64 binary
uses: actions/download-artifact@v3
with:
name: surreal-nightly.linux-arm64
path: arm64
- name: Set file permissions
shell: bash
run: |
chmod +x amd64/surreal arm64/surreal
- name: Configure DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASS }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Buildx
uses: docker/setup-buildx-action@v2
- name: Configure tag
shell: bash
run: |
VERSION=nightly
echo "VERSION=${VERSION}" >> $GITHUB_ENV
- name: Build the Docker image
uses: docker/build-push-action@v4
with:
context: .
load: true
tags: surrealdb/surrealdb:${{ env.VERSION }}
# Start the docker image as server and wait until it is ready
- name: Test the Docker image
run: |
docker run --net=host --rm surrealdb/surrealdb:${{ env.VERSION }} start 2>&1 >surreal.log &
retries=5
until docker run --net=host --rm surrealdb/surrealdb:${{ env.VERSION }} is-ready; do
retries=$((retries-1))
if [[ $retries -eq 0 ]]; then
echo "###"
echo "### The container is not ready after 5 seconds!"
echo "###"
cat surreal.log
echo "###"
echo "### ERROR: The docker image is not valid. Aborting."
echo "###"
exit 1
fi
sleep 1
done
# This second build reuses the cache from the build above
- name: Push the Docker image
uses: docker/build-push-action@v4
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
tags: surrealdb/surrealdb:${{ env.VERSION }}
publish:
name: Publish binaries for ${{ matrix.arch }}
needs: [docker]
strategy:
fail-fast: false
matrix:
include:
- arch: x86_64-apple-darwin
os: macos-latest
file: surreal-nightly.darwin-amd64
- arch: aarch64-apple-darwin
os: macos-latest
file: surreal-nightly.darwin-arm64
- arch: x86_64-unknown-linux-gnu
os: ubuntu-latest
file: surreal-nightly.linux-amd64
- arch: aarch64-unknown-linux-gnu
os: ubuntu-latest
file: surreal-nightly.linux-arm64
- arch: x86_64-pc-windows-msvc
os: windows-latest
file: surreal-nightly.windows-amd64
runs-on: ${{ matrix.os }}
steps:
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: ${{ matrix.file }}
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Publish non-windows binaries
if: ${{ !contains(matrix.arch, 'windows') }}
shell: bash
run: |
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.tgz s3://download.surrealdb.com/nightly/
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.txt s3://download.surrealdb.com/nightly/
- name: Publish windows binaries
if: ${{ contains(matrix.arch, 'windows') }}
shell: bash
run: |
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.exe s3://download.surrealdb.com/nightly/
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.txt s3://download.surrealdb.com/nightly/
package-macos:
name: Package macOS universal binary
needs: [publish]
runs-on: macos-latest
steps:
- name: Download amd64 binary
uses: actions/download-artifact@v3
with:
name: surreal-nightly.darwin-amd64
path: amd64
- name: Download arm64 binary
uses: actions/download-artifact@v3
with:
name: surreal-nightly.darwin-arm64
path: arm64
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Package universal MacOS binary
shell: bash
run: |
FILE="surreal-nightly.darwin-universal"
lipo -create -output surreal amd64/surreal arm64/surreal
chmod +x surreal
tar -zcvf $FILE.tgz surreal
echo $(shasum -a 256 $FILE.tgz | cut -f1 -d' ') > $FILE.txt
aws s3 cp --cache-control 'no-store' $FILE.tgz s3://download.surrealdb.com/nightly/
aws s3 cp --cache-control 'no-store' $FILE.txt s3://download.surrealdb.com/nightly/
deploy:
name: Deploy
needs: [publish, package-macos]
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Distribute binaries
shell: bash
run: |
regions=("af-south-1" "ap-east-1" "ap-south-1" "ap-southeast-1" "ap-southeast-2" "ca-central-1" "eu-central-1" "eu-west-2" "me-south-1" "sa-east-1" "us-west-2")
for region in ${regions[@]}; do
aws s3 sync --delete --storage-class INTELLIGENT_TIERING --source-region eu-west-2 --region ${region} s3://download.surrealdb.com s3://download.${region}.surrealdb.com
done

View file

@ -33,7 +33,7 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' }} || ${{ github.event.label.name == 'nix' }} if: ${{ github.ref == 'refs/heads/main' }} || ${{ github.event.label.name == 'nix' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: cachix/install-nix-action@v20 - uses: cachix/install-nix-action@v20
- uses: cachix/cachix-action@v12 - uses: cachix/cachix-action@v12
with: with:
@ -48,7 +48,7 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' }} || ${{ github.event.label.name == 'nix' }} if: ${{ github.ref == 'refs/heads/main' }} || ${{ github.event.label.name == 'nix' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: cachix/install-nix-action@v20 - uses: cachix/install-nix-action@v20
- uses: cachix/cachix-action@v12 - uses: cachix/cachix-action@v12
with: with:

View file

@ -1,6 +1,25 @@
name: Version release name: Tag release
run-name: "Tag release '${{ inputs.git-ref || github.ref_name }}' (publish: ${{ inputs.publish || github.event_name == 'push' }}, latest: ${{ inputs.latest || github.event_name == 'schedule' }})"
on: on:
workflow_dispatch:
inputs:
git-ref:
required: true
type: string
description: "The github ref of this release. If you are publishing it, use a tag (i.e. v1.0.0)."
default: main
latest:
required: false
type: boolean
default: false
description: "Consider this release as the latest one and update the Docker image tag and the binary pointer for the installers"
publish:
required: false
type: boolean
default: false
description: "Publish the release"
push: push:
tags: tags:
- "v*.*.*" - "v*.*.*"
@ -10,404 +29,24 @@ defaults:
shell: bash shell: bash
jobs: jobs:
checks:
test: name: Pre-release checks
name: Test runs-on: ubuntu-latest
runs-on: ubuntu-latest-16-cores
steps:
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
- name: Checkout sources
uses: actions/checkout@v3
- name: Setup cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: Install dependencies
run: |
sudo apt-get -y update
- name: Free up some disk space
run: |
(set -x; df -h)
# Free up some disk space by removing unused files
(set -x; sudo rm -rf /imagegeneration || true)
(set -x; sudo rm -rf /opt/az || true)
(set -x; sudo rm -rf /opt/hostedtoolcache || true)
(set -x; sudo rm -rf /opt/google || true)
(set -x; sudo rm -rf /opt/pipx || true)
(set -x; df -h)
- name: Install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov
- name: Install cargo-make
run: cargo install --debug --locked cargo-make
- name: Test workspace + coverage
run: cargo make ci-workspace-coverage
- name: Debug info
if: always()
run: |
set -x
free -m
df -h
ps auxf
cat /tmp/surrealdb.log || true
- name: Upload coverage report
uses: actions/upload-artifact@v3
with:
name: code-coverage-report
path: target/llvm-cov/html/
retention-days: 30
lint:
name: Lint
runs-on: ubuntu-latest-16-cores
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install dependencies
run: |
sudo apt-get -y update
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
targets: wasm32-unknown-unknown
components: rustfmt, clippy
- name: Install cargo-make
run: cargo install --debug --locked cargo-make
- name: Check workspace
run: cargo make ci-check
- name: Check format
run: cargo make ci-format
- name: Check wasm
run: cargo make ci-check-wasm
- name: Check clippy
run: cargo make ci-clippy
build:
name: Build ${{ matrix.arch }}
needs: [test, lint]
strategy:
fail-fast: false
matrix:
include:
- arch: x86_64-apple-darwin
os: macos-latest-xl
file: surreal-${{ github.ref_name }}.darwin-amd64
opts: --features storage-tikv,http-compression
- arch: aarch64-apple-darwin
os: macos-latest-xl
file: surreal-${{ github.ref_name }}.darwin-arm64
opts: --features storage-tikv,http-compression
- arch: x86_64-unknown-linux-gnu
os: ubuntu-latest-16-cores
file: surreal-${{ github.ref_name }}.linux-amd64
opts: --features storage-tikv,http-compression
- arch: aarch64-unknown-linux-gnu
os: ubuntu-latest-16-cores
file: surreal-${{ github.ref_name }}.linux-arm64
opts: --features storage-tikv,http-compression
- arch: x86_64-pc-windows-msvc
os: windows-latest
file: surreal-${{ github.ref_name }}.windows-amd64
opts:
runs-on: ${{ matrix.os }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Prepare environment
if: contains(matrix.arch, 'windows') && endsWith(matrix.arch, '-gnu')
run: echo "C:\msys64\usr\bin;$Env:Path" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8
- name: Install dependencies
if: contains(matrix.arch, 'windows') && endsWith(matrix.arch, '-msvc')
run: |
vcpkg integrate install
- name: Install dependencies
if: contains(matrix.arch, 'apple') && endsWith(matrix.arch, '-darwin')
run: |
brew install protobuf
- name: Install dependencies
if: contains(matrix.arch, 'linux') && endsWith(matrix.arch, '-gnu')
run: |
sudo apt-get -y update
sudo apt-get -y install musl-tools qemu-user libc6-dev-arm64-cross
sudo apt-get -y install g++-aarch64-linux-gnu gcc-aarch64-linux-gnu
- name: Install FoundationDB
if: contains(matrix.arch, 'linux') && startsWith(matrix.arch, 'x86_64')
run: |
curl -sLO https://github.com/apple/foundationdb/releases/download/6.3.23/foundationdb-clients_6.3.23-1_amd64.deb
sudo dpkg -i --force-architecture foundationdb-clients_6.3.23-1_amd64.deb
rm -rf foundationdb-clients_6.3.23-1_amd64.deb
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
targets: ${{ matrix.arch }}
- name: Output package versions
run: set -x; go version ; cargo version ; rustc --version ; cmake --version ; gcc --version ; g++ --version ; perl -v
- name: Run cargo build
run: cargo build ${{ matrix.opts }} --release --locked --target ${{ matrix.arch }}
env: env:
BINDGEN_EXTRA_CLANG_ARGS_aarch64-unknown-linux-gnu: "-I/usr/aarch64-linux-gnu/include/" GIT_REF: ${{ inputs.git-ref || github.ref_name }}
- name: Package binaries
if: ${{ !contains(matrix.arch, 'windows') }}
shell: bash
run: |
cd target/${{ matrix.arch }}/release
chmod +x surreal
tar -zcvf ${{ matrix.file }}.tgz surreal
echo $(shasum -a 256 ${{ matrix.file }}.tgz | cut -f1 -d' ') > ${{ matrix.file }}.txt
- name: Package binaries
if: ${{ contains(matrix.arch, 'windows') }}
shell: bash
run: |
cd target/${{ matrix.arch }}/release
cp surreal.exe ${{ matrix.file }}.exe
echo $(shasum -a 256 ${{ matrix.file }}.exe | cut -f1 -d' ') > ${{ matrix.file }}.txt
- name: Upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.file }}
path: |
target/${{ matrix.arch }}/release/surreal
target/${{ matrix.arch }}/release/${{ matrix.file }}.tgz
target/${{ matrix.arch }}/release/${{ matrix.file }}.txt
target/${{ matrix.arch }}/release/${{ matrix.file }}.exe
docker:
name: Build and publish Docker image
needs: [build]
runs-on: ubuntu-latest
steps: steps:
- name: Checkout sources - name: Verify that the provided git_ref is a tag when 'publish' is true
uses: actions/checkout@v3 if: ${{ inputs.publish || github.event_name == 'push' }}
- name: Download amd64 binary
uses: actions/download-artifact@v3
with:
name: surreal-${{ github.ref_name }}.linux-amd64
path: amd64
- name: Download arm64 binary
uses: actions/download-artifact@v3
with:
name: surreal-${{ github.ref_name }}.linux-arm64
path: arm64
- name: Set file permissions
shell: bash
run: | run: |
chmod +x amd64/surreal arm64/surreal git tag -l | grep -w $GIT_REF || (echo "The provided git_ref '$GIT_REF' is not a tag" && exit 1)
- name: Configure DockerHub release:
uses: docker/login-action@v2 name: Prepare release
needs: [checks]
uses: ./.github/workflows/reusable_publish_version.yml
with: with:
username: ${{ secrets.DOCKER_USER }} git-ref: ${{ inputs.git-ref || github.ref_name }}
password: ${{ secrets.DOCKER_PASS }} latest: ${{ inputs.latest || github.event_name == 'push' }}
publish: ${{ inputs.publish || github.event_name == 'push' }}
- name: Set up QEMU create-release: ${{ inputs.publish || github.event_name == 'push' }}
uses: docker/setup-qemu-action@v2 secrets: inherit
- name: Set up Buildx
uses: docker/setup-buildx-action@v2
- name: Configure tag
shell: bash
run: |
VERSION=${{ github.ref_name }}
echo "VERSION=${VERSION:1}" | tr + - >> $GITHUB_ENV
- name: Build the Docker image
uses: docker/build-push-action@v4
with:
context: .
load: true
tags: surrealdb/surrealdb:latest,surrealdb/surrealdb:${{ env.VERSION }}
# Start the docker image as server and wait until it is ready
- name: Test the Docker image
run: |
docker run --net=host --rm surrealdb/surrealdb:${{ env.VERSION }} start 2>&1 >surreal.log &
retries=5
until docker run --net=host --rm surrealdb/surrealdb:${{ env.VERSION }} is-ready; do
retries=$((retries-1))
if [[ $retries -eq 0 ]]; then
echo "###"
echo "### The container is not ready after 5 seconds!"
echo "###"
cat surreal.log
echo "###"
echo "### ERROR: The docker image is not valid. Aborting."
echo "###"
exit 1
fi
sleep 1
done
# This second build reuses the cache from the build above
- name: Push the Docker image
uses: docker/build-push-action@v4
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
tags: surrealdb/surrealdb:latest,surrealdb/surrealdb:${{ env.VERSION }}
publish:
name: Publish binaries for ${{ matrix.arch }}
needs: [docker]
strategy:
fail-fast: false
matrix:
include:
- arch: x86_64-apple-darwin
os: macos-latest
file: surreal-${{ github.ref_name }}.darwin-amd64
- arch: aarch64-apple-darwin
os: macos-latest
file: surreal-${{ github.ref_name }}.darwin-arm64
- arch: x86_64-unknown-linux-gnu
os: ubuntu-latest
file: surreal-${{ github.ref_name }}.linux-amd64
- arch: aarch64-unknown-linux-gnu
os: ubuntu-latest
file: surreal-${{ github.ref_name }}.linux-arm64
- arch: x86_64-pc-windows-msvc
os: windows-latest
file: surreal-${{ github.ref_name }}.windows-amd64
runs-on: ${{ matrix.os }}
steps:
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: ${{ matrix.file }}
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Publish non-windows binaries
if: ${{ !contains(matrix.arch, 'windows') }}
shell: bash
run: |
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.tgz s3://download.surrealdb.com/${{ github.ref_name }}/
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.txt s3://download.surrealdb.com/${{ github.ref_name }}/
- name: Publish windows binaries
if: ${{ contains(matrix.arch, 'windows') }}
shell: bash
run: |
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.exe s3://download.surrealdb.com/${{ github.ref_name }}/
aws s3 cp --cache-control 'no-store' ${{ matrix.file }}.txt s3://download.surrealdb.com/${{ github.ref_name }}/
package-macos:
name: Package macOS universal binary
needs: [publish]
runs-on: macos-latest
steps:
- name: Download amd64 binary
uses: actions/download-artifact@v3
with:
name: surreal-${{ github.ref_name }}.darwin-amd64
path: amd64
- name: Download arm64 binary
uses: actions/download-artifact@v3
with:
name: surreal-${{ github.ref_name }}.darwin-arm64
path: arm64
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Package universal MacOS binary
shell: bash
run: |
FILE="surreal-${{ github.ref_name }}.darwin-universal"
lipo -create -output surreal amd64/surreal arm64/surreal
chmod +x surreal
tar -zcvf $FILE.tgz surreal
echo $(shasum -a 256 $FILE.tgz | cut -f1 -d' ') > $FILE.txt
aws s3 cp --cache-control 'no-store' $FILE.tgz s3://download.surrealdb.com/${{ github.ref_name }}/
aws s3 cp --cache-control 'no-store' $FILE.txt s3://download.surrealdb.com/${{ github.ref_name }}/
deploy:
name: Deploy
needs: [publish, package-macos]
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Download artifacts
uses: actions/download-artifact@v3
- name: Publish release
uses: softprops/action-gh-release@v1
with:
name: "Release ${{ github.ref_name }}"
files: |
LICENSE
**/*.tgz
**/*.exe
- name: Set version
shell: bash
run: |
echo ${{ github.ref_name }} > latest.txt
aws s3 cp --cache-control 'no-store' latest.txt s3://download.surrealdb.com/
- name: Distribute binaries
shell: bash
run: |
regions=("af-south-1" "ap-east-1" "ap-south-1" "ap-southeast-1" "ap-southeast-2" "ca-central-1" "eu-central-1" "eu-west-2" "me-south-1" "sa-east-1" "us-west-2")
for region in ${regions[@]}; do
aws s3 sync --delete --storage-class INTELLIGENT_TIERING --source-region eu-west-2 --region ${region} s3://download.surrealdb.com s3://download.${region}.surrealdb.com
done

343
.github/workflows/reusable_docker.yml vendored Normal file
View file

@ -0,0 +1,343 @@
on:
workflow_call:
inputs:
git-ref:
required: true
type: string
description: "The github ref to checkout for building the Docker images."
tag-prefix:
required: true
type: string
description: "The prefix of the Docker image tag. i.e. 'nightly' for 'surrealdb/surrealdb:nightly-dev' or 'surrealdb/surrealdb:nightly-fdb'."
build:
required: false
type: boolean
default: true
description: "Build the Docker images."
push:
required: false
type: boolean
default: false
description: "Publish the Docker images."
latest:
required: false
type: boolean
default: false
description: "Update the latest tag of the Docker image."
secrets:
DOCKER_USER:
required: false
DOCKER_PASS:
required: false
AWS_CI_ACCESS_KEY_ID:
required: false
AWS_CI_SECRET_ACCESS_KEY:
required: false
defaults:
run:
shell: bash
jobs:
prepare:
name: Prepare steps
runs-on: ubuntu-latest
outputs:
with-ecr: ${{ steps.aws-credentials.outputs.with-ecr }}
tag-prefix: ${{ steps.tag-prefix.outputs.tag-prefix }}
build-matrix: ${{ steps.set-matrix.outputs.build-matrix }}
push-matrix: ${{ steps.set-matrix.outputs.push-matrix }}
steps:
- name: Check if AWS credentials are set
id: aws-credentials
run: |
if [[ "${{ secrets.AWS_CI_ACCESS_KEY_ID }}" == "" ]]; then
echo "###"
echo "### AWS credentials are not set. Will skip any AWS ECR action."
echo "###"
echo "with-ecr=false" >> $GITHUB_OUTPUT
else
echo "with-ecr=true" >> $GITHUB_OUTPUT
fi
- name: Sanitize tag name
id: tag-prefix
run: |
echo "tag-prefix=$(echo '${{ inputs.tag-prefix }}' | sed 's/[^a-zA-Z0-9_.-]/-/g' | tr '[:upper:]' '[:lower:]')" >> $GITHUB_OUTPUT
# Define matrix here so we don't need to search for it when making changes
- name: Set matrix
id: set-matrix
env:
BUILD_MATRIX: |
include:
########
# Binary image
########
- name: Binary image
dockerfile: Dockerfile.binary
platform: amd64
runner: ubuntu-latest-4-cores
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-binary
########################################
# Base images
########################################
# Prod AMD64 image
- &base_image
name: Base image
dockerfile: Dockerfile
build-target: prod
platform: amd64
runner: ubuntu-latest-4-cores
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}
# Prod ARM64 image
- <<: *base_image
platform: arm64
runner: ["self-hosted", "arm64", "4-cores"]
tag: arm64-${{ steps.tag-prefix.outputs.tag-prefix }}
# Dev AMD64 image
- <<: *base_image
build-target: dev
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-dev
# Dev ARM64 image
- <<: *base_image
build-target: dev
platform: arm64
runner: ["self-hosted", "arm64", "4-cores"]
tag: arm64-${{ steps.tag-prefix.outputs.tag-prefix }}-dev
########################################
# FoundationDB images (FDB client library is only available for amd64)
########################################
# Prod AMD64 image
- &fdb_image
name: FDB image
dockerfile: Dockerfile.fdb
build-target: prod
platform: amd64
runner: ubuntu-latest-4-cores
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-fdb
# Dev AMD64 image
- <<: *fdb_image
build-target: dev
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-fdb-dev
PUSH_MATRIX: |
include:
########################################
# Base images
########################################
# Prod images
- &base_image
platforms: linux/amd64,linux/arm64
tag: ${{ steps.tag-prefix.outputs.tag-prefix }}
tag-latest: latest
# Dev images
- <<: *base_image
platforms: linux/amd64,linux/arm64
tag: ${{ steps.tag-prefix.outputs.tag-prefix }}-dev
tag-latest: latest-dev
# TODO: Decide whether or not we want a dedicated image for FoundationDB
# ########################################
# # FoundationDB images (FDB client library is only available for amd64)
# ########################################
# # Prod images
# - &fdb_image
# platforms: linux/amd64
# tag: ${{ steps.tag-prefix.outputs.tag-prefix }}-fdb
# tag-latest: latest-fdb
# # Dev images
# - <<: *fdb_image
# tag: ${{ steps.tag-prefix.outputs.tag-prefix }}-fdb-dev
# tag-latest: latest-fdb-dev
run: |
echo '${{ env.BUILD_MATRIX }}' > build-matrix.yaml
echo "build-matrix=$(yq -o json -I=0 build-matrix.yaml)" >> $GITHUB_OUTPUT
echo '${{ env.PUSH_MATRIX }}' > push-matrix.yaml
echo "push-matrix=$(yq -o json -I=0 push-matrix.yaml)" >> $GITHUB_OUTPUT
build:
name: Build ${{ matrix.name }} (${{ matrix.build-target || 'default' }}, ${{ matrix.platform }})
runs-on: ${{ matrix.runner }}
needs: prepare
if: ${{ inputs.build }}
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.prepare.outputs.build-matrix) }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ inputs.git-ref }}
- name: Checkout docker
uses: actions/checkout@v4
with:
path: _docker
# Replace docker files. It allows us to test new Dockerfiles with workflow_dispatch and a custom git ref.
# When triggered by a push or a schedule, this git ref will be the same as 'inputs.git-ref'
- name: Replace docker files
env:
GH_TOKEN: ${{ github.token }}
run: |
rm -rf docker .dockerignore
mv _docker/docker .
mv _docker/.dockerignore .
rm -rf _docker
- name: Set up Buildx
uses: docker/setup-buildx-action@v3
- name: Build and export to Docker.
uses: docker/build-push-action@v5
id: build
with:
context: .
load: true
platforms: linux/${{ matrix.platform }}
file: docker/${{ matrix.dockerfile }}
target: ${{ matrix.build-target }}
tags: surrealdb-ci:${{ matrix.tag }}-${{ github.run_id }}
# Start the docker image as server and wait until it is ready
- name: Test the Docker image
run: |
docker run --net=host --rm ${{ steps.build.outputs.imageid }} start 2>&1 >surreal.log &
retries=5
until docker run --net=host --rm ${{ steps.build.outputs.imageid }} is-ready; do
retries=$((retries-1))
if [[ $retries -eq 0 ]]; then
echo "###"
echo "### The container is not ready after 5 seconds!"
echo "###"
cat surreal.log
echo "###"
echo "### ERROR: The docker image is not valid. Aborting."
echo "###"
exit 1
fi
sleep 1
done
- name: Configure AWS credentials
if: ${{ needs.prepare.outputs.with-ecr == 'true' }}
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Login to Amazon ECR
if: ${{ needs.prepare.outputs.with-ecr == 'true' }}
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Push individual images to CI registry.
if: ${{ needs.prepare.outputs.with-ecr == 'true' }}
run: |
docker tag ${{ steps.build.outputs.imageid }} ${{ steps.login-ecr.outputs.registry }}/surrealdb-ci:${{ matrix.tag }}-${{ github.run_id }}
docker push ${{ steps.login-ecr.outputs.registry }}/surrealdb-ci:${{ matrix.tag }}-${{ github.run_id }}
# Push a multi-arch manifest to the CI registry
push-all-to-ecr-ci:
name: Push ${{ matrix.tag }} to CI registry
runs-on: ubuntu-latest
needs: [prepare, build]
if: ${{ inputs.build && needs.prepare.outputs.with-ecr == 'true' }}
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.prepare.outputs.push-matrix) }}
steps:
# Checkout the workflow code, we don't need the code to build SurrealDB, that's why we don't checkout "input.git-ref" here
- name: Checkout
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Push multi-arch Docker manifest to CI registry
uses: docker/build-push-action@v5
with:
context: .
file: ./docker/Dockerfile.multi-arch
platforms: ${{ matrix.platforms }}
push: true
tags: ${{ steps.login-ecr.outputs.registry }}/surrealdb-ci:${{ matrix.tag }}
build-args: |
IMAGE_REPO=${{ steps.login-ecr.outputs.registry }}/surrealdb-ci
TAG=${{ matrix.tag }}-${{ github.run_id }}
# Push a multi-arch manifest to DockerHub
push-all-to-dockerhub:
name: Push ${{ matrix.tag }} to DockerHub
runs-on: ubuntu-latest
needs: [prepare]
if: ${{ inputs.push }}
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.prepare.outputs.push-matrix) }}
steps:
# Checkout the workflow code, we don't need the code to build SurrealDB, that's why we don't checkout "input.git-ref" here
- name: Checkout
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASS }}
- name: Push multi-arch Docker manifest to DockerHub
uses: docker/build-push-action@v5
with:
context: .
file: ./docker/Dockerfile.multi-arch
platforms: ${{ matrix.platforms }}
push: true
tags: surrealdb/surrealdb:${{ matrix.tag }}
build-args: |
IMAGE_REPO=${{ steps.login-ecr.outputs.registry }}/surrealdb-ci
TAG=${{ matrix.tag }}-${{ github.run_id }}
- name: Tag multi-arch Docker manifest as latest
uses: docker/build-push-action@v5
if: ${{ inputs.latest }}
with:
context: .
file: ./docker/Dockerfile.multi-arch
platforms: ${{ matrix.platforms }}
push: true
tags: surrealdb/surrealdb:${{ matrix.tag-latest }}
build-args: |
IMAGE_REPO=${{ steps.login-ecr.outputs.registry }}/surrealdb-ci
TAG=${{ matrix.tag }}-${{ github.run_id }}

View file

@ -0,0 +1,390 @@
on:
workflow_call:
inputs:
# i.e. nightly, 1.0.0
name:
required: false
type: string
description: "The name of this release version. It can be a 'nightly' version or a semver version that represents a tag (i.e. 'v1.0.0')"
git-ref:
required: true
type: string
description: "The git ref of this release version. All 'actions/checkout' steps will use it"
latest:
required: false
type: boolean
default: false
description: "Consider this release as the latest one and update the Docker image tag and the binary pointer for the installers"
publish:
required: false
type: boolean
default: false
description: "Whether to publish this release"
create-release:
required: false
type: boolean
default: false
description: "Create a GitHub release"
defaults:
run:
shell: bash
jobs:
prepare-vars:
name: Prepare vars
runs-on: ubuntu-latest
outputs:
git-ref: ${{ steps.outputs.outputs.git-ref }}
name: ${{ steps.outputs.outputs.name }}
steps:
- name: Set outputs
id: outputs
run: |
echo "git-ref=${{ inputs.git-ref || github.ref_name }}" >> $GITHUB_OUTPUT
# If the name is not provided, use the git_ref (i.e. v1.0.0)
echo "name=${{ inputs.name || inputs.git-ref || github.ref_name }}" >> $GITHUB_OUTPUT
test:
name: Test
needs: [prepare-vars]
runs-on: ubuntu-latest-16-cores
steps:
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
- name: Checkout sources
uses: actions/checkout@v4
with:
ref: ${{ needs.prepare-vars.outputs.git-ref }}
- name: Setup cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ needs.prepare-vars.outputs.git-ref == 'main' }}
- name: Install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov
- name: Install cargo-make
run: cargo install --debug --locked cargo-make
- name: Test workspace + coverage
run: cargo make ci-workspace-coverage
- name: Debug info
if: always()
run: |
set -x
free -m
df -h
ps auxf
cat /tmp/surrealdb.log || true
- name: Upload coverage report
uses: actions/upload-artifact@v3
with:
name: code-coverage-report
path: target/llvm-cov/html/
retention-days: 5
lint:
name: Lint
needs: [prepare-vars]
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4
with:
ref: ${{ needs.prepare-vars.outputs.git-ref }}
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
targets: wasm32-unknown-unknown
components: rustfmt, clippy
- name: Install cargo-make
run: cargo install --debug --locked cargo-make
- name: Check workspace
run: cargo make ci-check
- name: Check format
run: cargo make ci-format
- name: Check wasm
run: cargo make ci-check-wasm
- name: Check clippy
run: cargo make ci-clippy
docker-build:
name: Build Docker images
needs: [prepare-vars]
uses: ./.github/workflows/reusable_docker.yml
with:
git-ref: ${{ needs.prepare-vars.outputs.git-ref }}
tag-prefix: ${{ needs.prepare-vars.outputs.name }}
build: true
push: false
secrets: inherit
build:
name: Build ${{ matrix.arch }} binary
needs: [prepare-vars]
strategy:
fail-fast: false
matrix:
include:
# MacOS amd64
- arch: x86_64-apple-darwin
runner: macos-latest-xl
file: surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64
build-step: |
# Prepare deps
brew install protobuf
# Build
cargo build --features storage-tikv,http-compression --release --locked --target x86_64-apple-darwin
# Package
cp target/x86_64-apple-darwin/release/surreal surreal
chmod +x surreal
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64.tgz surreal
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64.txt
# MacOS arm64
- arch: aarch64-apple-darwin
runner: macos-latest-xl
file: surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64
build-step: |
# Prepare deps
brew install protobuf
# Build
cargo build --features storage-tikv,http-compression --release --locked --target aarch64-apple-darwin
# Package
cp target/aarch64-apple-darwin/release/surreal surreal
chmod +x surreal
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64.tgz surreal
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64.txt
# Linux amd64
- arch: x86_64-unknown-linux-gnu
runner: ubuntu-latest-16-cores
file: surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64
build-step: |
# Build
docker build \
--platform linux/amd64 \
--build-arg="CARGO_EXTRA_FEATURES=storage-tikv,http-compression" \
-t binary \
-f docker/Dockerfile.binary \
.
docker create --name binary binary
docker cp binary:/surrealdb/target/release/surreal surreal
# Package
chmod +x surreal
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64.tgz surreal
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64.txt
# Linux arm64
- arch: aarch64-unknown-linux-gnu
runner: ["self-hosted", "arm64", "4-cores"]
file: surreal-${{ needs.prepare-vars.outputs.name }}.linux-arm64
build-step: |
# Build
docker build \
--platform linux/arm64 \
--build-arg="CARGO_EXTRA_FEATURES=storage-tikv,http-compression" \
-t binary \
-f docker/Dockerfile.binary \
.
docker create --name binary binary
docker cp binary:/surrealdb/target/release/surreal surreal
# Package
chmod +x surreal
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.linux-arm64.tgz surreal
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.linux-arm64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.linux-arm64.txt
# Windows amd64
- arch: x86_64-pc-windows-msvc
runner: windows-latest
file: surreal-${{ needs.prepare-vars.outputs.name }}.windows-amd64
build-step: |
# Prepare deps
vcpkg integrate install
# Build
cargo build --features storage-tikv,http-compression --release --locked --target x86_64-pc-windows-msvc
# Package
cp target/x86_64-pc-windows-msvc/release/surreal.exe surreal-${{ needs.prepare-vars.outputs.name }}.windows-amd64.exe
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.windows-amd64.exe | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.windows-amd64.txt
runs-on: ${{ matrix.runner }}
steps:
- name: Checkout sources
uses: actions/checkout@v4
with:
ref: ${{ needs.prepare-vars.outputs.git-ref }}
- name: Checkout docker
uses: actions/checkout@v4
with:
path: _docker
# Replace docker files. It allows us to test new Dockerfiles with workflow_dispatch and a custom git ref.
# When triggered by a push or a schedule, this git ref will be the same as 'inputs.git-ref'
- name: Replace docker files
env:
GH_TOKEN: ${{ github.token }}
run: |
rm -rf docker .dockerignore
mv _docker/docker .
mv _docker/.dockerignore .
rm -rf _docker
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: 1.71.1
targets: ${{ matrix.arch }}
- name: Output package versions
run: |
set -x
set +e
go version ; cargo version ; rustc --version ; cmake --version ; gcc --version ; g++ --version ; perl -v
- name: Build step
run: ${{ matrix.build-step }}
- name: Upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.file }}
path: |
surreal
${{ matrix.file }}.tgz
${{ matrix.file }}.txt
${{ matrix.file }}.exe
publish:
name: Publish artifacts binaries
needs: [prepare-vars, test, lint, build, docker-build]
if: ${{ inputs.publish }}
environment: ${{ needs.prepare-vars.outputs.name == 'nightly' && 'nightly' || 'release' }}
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v3
with:
path: artifacts
- name: Publish release
uses: softprops/action-gh-release@v1
if: ${{ inputs.create-release }}
with:
name: "Release ${{ github.ref_name }}"
files: |
LICENSE
artifacts/*.tgz
artifacts/*.exe
- name: Set latest version
if: ${{ inputs.create-release }}
run: |
echo ${{ github.ref_name }} > latest.txt
aws s3 cp --cache-control 'no-store' latest.txt s3://download.surrealdb.com/latest.txt
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Publish binaries
run: |
for file in artifacts/**/*.{tgz,txt,exe}; do
aws s3 cp --cache-control 'no-store' $file s3://download.surrealdb.com/${{ needs.prepare-vars.outputs.name }}/
done
docker-publish:
name: Publish Docker images
needs: [prepare-vars, publish]
uses: ./.github/workflows/reusable_docker.yml
with:
git-ref: ${{ needs.prepare-vars.outputs.git-ref }}
tag-prefix: ${{ needs.prepare-vars.outputs.name }}
latest: ${{ inputs.latest }}
build: false
push: true
secrets: inherit
package-macos:
name: Package and publish macOS universal binary
needs: [prepare-vars, publish]
runs-on: macos-latest
env:
FILE: surreal-${{ needs.prepare-vars.outputs.name }}.darwin-universal
steps:
- name: Download amd64 binary
uses: actions/download-artifact@v3
with:
name: surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64
path: amd64
- name: Download arm64 binary
uses: actions/download-artifact@v3
with:
name: surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64
path: arm64
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Package universal MacOS binary
run: |
lipo -create -output surreal amd64/surreal arm64/surreal
chmod +x surreal
tar -zcvf $FILE.tgz surreal
echo $(shasum -a 256 $FILE.tgz | cut -f1 -d' ') > $FILE.txt
- name: Publish universal MacOS binary
if: ${{ inputs.publish }}
run: |
aws s3 cp --cache-control 'no-store' $FILE.tgz s3://download.surrealdb.com/${{ needs.prepare-vars.outputs.name }}/
aws s3 cp --cache-control 'no-store' $FILE.txt s3://download.surrealdb.com/${{ needs.prepare-vars.outputs.name }}/
propagate:
name: Propagate binaries to all regions
if: ${{ inputs.publish }}
needs: [publish, package-macos]
runs-on: ubuntu-latest
steps:
- name: Configure AWS
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: us-east-2
aws-access-key-id: ${{ secrets.AMAZON_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AMAZON_SECRET_KEY }}
- name: Distribute binaries
run: |
regions=("af-south-1" "ap-east-1" "ap-south-1" "ap-southeast-1" "ap-southeast-2" "ca-central-1" "eu-central-1" "eu-west-2" "me-south-1" "sa-east-1" "us-west-2")
for region in ${regions[@]}; do
aws s3 sync --delete --storage-class INTELLIGENT_TIERING --source-region eu-west-2 --region ${region} s3://download.surrealdb.com s3://download.${region}.surrealdb.com
done

1
.gitignore vendored
View file

@ -39,7 +39,6 @@ Temporary Items
.vscode/ .vscode/
/result /result
/bin/ /bin/
/docker/
/.direnv/ /.direnv/
# ----------------------------------- # -----------------------------------

View file

@ -1,11 +0,0 @@
# Use ChainGuard's glibc-dynamic image as the base image. More information about this image can be found at https://www.chainguard.dev/chainguard-images
FROM cgr.dev/chainguard/glibc-dynamic
# Declare a build-time argument for the target architecture
ARG TARGETARCH
# Add the binary file 'surreal' from the specified path on the host machine to the root directory in the container
ADD $TARGETARCH/surreal /
# Set the entry point for the container to be the 'surreal' binary
ENTRYPOINT ["/surreal"]

36
docker/Dockerfile Normal file
View file

@ -0,0 +1,36 @@
#
# Dockerfile that builds a SurrealDB docker image.
#
FROM cgr.dev/chainguard/rust:latest-dev as builder
USER root
RUN apk update
RUN apk add patch clang curl gcc cmake
ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-unknown-linux-gnu-gcc
RUN mkdir /surrealdb
WORKDIR /surrealdb
COPY . /surrealdb/
RUN cargo build --features http-compression,storage-tikv --release --locked
#
# Development image
#
FROM cgr.dev/chainguard/glibc-dynamic:latest-dev as dev
USER root
COPY --from=builder /surrealdb/target/release/surreal /surreal
ENTRYPOINT ["/surreal"]
#
# Production image
#
FROM cgr.dev/chainguard/glibc-dynamic:latest as prod
COPY --from=builder /surrealdb/target/release/surreal /surreal
ENTRYPOINT ["/surreal"]

26
docker/Dockerfile.binary Normal file
View file

@ -0,0 +1,26 @@
#
# Dockerfile that builds the SurrealDB Linux binary and makes it depend on GLIBC 2.17.
#
FROM docker.io/ubuntu:18.04
ARG CARGO_EXTRA_FEATURES="http-compression,storage-tikv"
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y curl patch clang gpg build-essential git
# Install rust
COPY docker/files/rustup-init.sh /tmp/rustup-init.sh
RUN /tmp/rustup-init.sh -y
ENV PATH="/root/.cargo/bin:${PATH}"
RUN mkdir /surrealdb
WORKDIR /surrealdb
COPY . /surrealdb/
RUN cargo build --features ${CARGO_EXTRA_FEATURES} --release --locked
# For testing purposes
RUN cp target/release/surreal /surreal
ENTRYPOINT ["/surreal"]

39
docker/Dockerfile.fdb Normal file
View file

@ -0,0 +1,39 @@
#
# Dockerfile that builds a SurrealDB docker image with FoundationDB support.
#
FROM --platform=linux/amd64 cgr.dev/chainguard/rust:latest-dev as builder
USER root
RUN apk update
RUN apk add patch clang curl cmake
RUN mkdir /surrealdb
WORKDIR /surrealdb
COPY . /surrealdb/
RUN curl -L https://github.com/apple/foundationdb/releases/download/7.1.42/libfdb_c.x86_64.so -o libfdb_c.so && \
echo "9501a7910fe2d47b805c48c467fddaf485ccf4b1195863e3c5fb0c86648084f1 libfdb_c.so" | sha256sum -c -s - || exit 1 && \
mv libfdb_c.so /usr/lib/ && \
cargo build --features http-compression,storage-tikv,storage-fdb --release --locked
#
# Development image
#
FROM cgr.dev/chainguard/glibc-dynamic:latest-dev as dev
USER root
COPY --from=builder /surrealdb/target/release/surreal /surreal
COPY --from=builder /usr/lib/libfdb_c.so /usr/lib/libfdb_c.so
ENTRYPOINT ["/surreal"]
#
# Production image
#
FROM cgr.dev/chainguard/glibc-dynamic:latest as prod
COPY --from=builder /surrealdb/target/release/surreal /surreal
COPY --from=builder /usr/lib/libfdb_c.so /usr/lib/libfdb_c.so
ENTRYPOINT ["/surreal"]

View file

@ -0,0 +1,9 @@
#
# Dockerfile that builds SurrealDB docker images for multiple architectures.
#
ARG TAG IMAGE_REPO
FROM ${IMAGE_REPO}:${TARGETARCH}-${TAG}
ARG TARGETARCH

731
docker/files/rustup-init.sh Executable file
View file

@ -0,0 +1,731 @@
#!/bin/sh
# shellcheck shell=dash
# This is just a little script that can be downloaded from the internet to
# install rustup. It just does platform detection, downloads the installer
# and runs it.
# It runs on Unix shells like {a,ba,da,k,z}sh. It uses the common `local`
# extension. Note: Most shells limit `local` to 1 var per line, contra bash.
if [ "$KSH_VERSION" = 'Version JM 93t+ 2010-03-05' ]; then
# The version of ksh93 that ships with many illumos systems does not
# support the "local" extension. Print a message rather than fail in
# subtle ways later on:
echo 'rustup does not work with this ksh93 version; please try bash!' >&2
exit 1
fi
set -u
# If RUSTUP_UPDATE_ROOT is unset or empty, default it.
RUSTUP_UPDATE_ROOT="${RUSTUP_UPDATE_ROOT:-https://static.rust-lang.org/rustup}"
# NOTICE: If you change anything here, please make the same changes in setup_mode.rs
usage() {
cat <<EOF
rustup-init 1.26.0 (577bf51ae 2023-04-05)
The installer for rustup
USAGE:
rustup-init [OPTIONS]
OPTIONS:
-v, --verbose
Enable verbose output
-q, --quiet
Disable progress output
-y
Disable confirmation prompt.
--default-host <default-host>
Choose a default host triple
--default-toolchain <default-toolchain>
Choose a default toolchain to install. Use 'none' to not install any toolchains at all
--profile <profile>
[default: default] [possible values: minimal, default, complete]
-c, --component <components>...
Component name to also install
-t, --target <targets>...
Target name to also install
--no-update-default-toolchain
Don't update any existing default toolchain after install
--no-modify-path
Don't configure the PATH environment variable
-h, --help
Print help information
-V, --version
Print version information
EOF
}
main() {
downloader --check
need_cmd uname
need_cmd mktemp
need_cmd chmod
need_cmd mkdir
need_cmd rm
need_cmd rmdir
get_architecture || return 1
local _arch="$RETVAL"
assert_nz "$_arch" "arch"
local _ext=""
case "$_arch" in
*windows*)
_ext=".exe"
;;
esac
local _url="${RUSTUP_UPDATE_ROOT}/dist/${_arch}/rustup-init${_ext}"
local _dir
if ! _dir="$(ensure mktemp -d)"; then
# Because the previous command ran in a subshell, we must manually
# propagate exit status.
exit 1
fi
local _file="${_dir}/rustup-init${_ext}"
local _ansi_escapes_are_valid=false
if [ -t 2 ]; then
if [ "${TERM+set}" = 'set' ]; then
case "$TERM" in
xterm*|rxvt*|urxvt*|linux*|vt*)
_ansi_escapes_are_valid=true
;;
esac
fi
fi
# check if we have to use /dev/tty to prompt the user
local need_tty=yes
for arg in "$@"; do
case "$arg" in
--help)
usage
exit 0
;;
*)
OPTIND=1
if [ "${arg%%--*}" = "" ]; then
# Long option (other than --help);
# don't attempt to interpret it.
continue
fi
while getopts :hy sub_arg "$arg"; do
case "$sub_arg" in
h)
usage
exit 0
;;
y)
# user wants to skip the prompt --
# we don't need /dev/tty
need_tty=no
;;
*)
;;
esac
done
;;
esac
done
if $_ansi_escapes_are_valid; then
printf "\33[1minfo:\33[0m downloading installer\n" 1>&2
else
printf '%s\n' 'info: downloading installer' 1>&2
fi
ensure mkdir -p "$_dir"
ensure downloader "$_url" "$_file" "$_arch"
ensure chmod u+x "$_file"
if [ ! -x "$_file" ]; then
printf '%s\n' "Cannot execute $_file (likely because of mounting /tmp as noexec)." 1>&2
printf '%s\n' "Please copy the file to a location where you can execute binaries and run ./rustup-init${_ext}." 1>&2
exit 1
fi
if [ "$need_tty" = "yes" ] && [ ! -t 0 ]; then
# The installer is going to want to ask for confirmation by
# reading stdin. This script was piped into `sh` though and
# doesn't have stdin to pass to its children. Instead we're going
# to explicitly connect /dev/tty to the installer's stdin.
if [ ! -t 1 ]; then
err "Unable to run interactively. Run with -y to accept defaults, --help for additional options"
fi
ignore "$_file" "$@" < /dev/tty
else
ignore "$_file" "$@"
fi
local _retval=$?
ignore rm "$_file"
ignore rmdir "$_dir"
return "$_retval"
}
check_proc() {
# Check for /proc by looking for the /proc/self/exe link
# This is only run on Linux
if ! test -L /proc/self/exe ; then
err "fatal: Unable to find /proc/self/exe. Is /proc mounted? Installation cannot proceed without /proc."
fi
}
get_bitness() {
need_cmd head
# Architecture detection without dependencies beyond coreutils.
# ELF files start out "\x7fELF", and the following byte is
# 0x01 for 32-bit and
# 0x02 for 64-bit.
# The printf builtin on some shells like dash only supports octal
# escape sequences, so we use those.
local _current_exe_head
_current_exe_head=$(head -c 5 /proc/self/exe )
if [ "$_current_exe_head" = "$(printf '\177ELF\001')" ]; then
echo 32
elif [ "$_current_exe_head" = "$(printf '\177ELF\002')" ]; then
echo 64
else
err "unknown platform bitness"
fi
}
is_host_amd64_elf() {
need_cmd head
need_cmd tail
# ELF e_machine detection without dependencies beyond coreutils.
# Two-byte field at offset 0x12 indicates the CPU,
# but we're interested in it being 0x3E to indicate amd64, or not that.
local _current_exe_machine
_current_exe_machine=$(head -c 19 /proc/self/exe | tail -c 1)
[ "$_current_exe_machine" = "$(printf '\076')" ]
}
get_endianness() {
local cputype=$1
local suffix_eb=$2
local suffix_el=$3
# detect endianness without od/hexdump, like get_bitness() does.
need_cmd head
need_cmd tail
local _current_exe_endianness
_current_exe_endianness="$(head -c 6 /proc/self/exe | tail -c 1)"
if [ "$_current_exe_endianness" = "$(printf '\001')" ]; then
echo "${cputype}${suffix_el}"
elif [ "$_current_exe_endianness" = "$(printf '\002')" ]; then
echo "${cputype}${suffix_eb}"
else
err "unknown platform endianness"
fi
}
get_architecture() {
local _ostype _cputype _bitness _arch _clibtype
_ostype="$(uname -s)"
_cputype="$(uname -m)"
_clibtype="gnu"
if [ "$_ostype" = Linux ]; then
if [ "$(uname -o)" = Android ]; then
_ostype=Android
fi
if ldd --version 2>&1 | grep -q 'musl'; then
_clibtype="musl"
fi
fi
if [ "$_ostype" = Darwin ] && [ "$_cputype" = i386 ]; then
# Darwin `uname -m` lies
if sysctl hw.optional.x86_64 | grep -q ': 1'; then
_cputype=x86_64
fi
fi
if [ "$_ostype" = SunOS ]; then
# Both Solaris and illumos presently announce as "SunOS" in "uname -s"
# so use "uname -o" to disambiguate. We use the full path to the
# system uname in case the user has coreutils uname first in PATH,
# which has historically sometimes printed the wrong value here.
if [ "$(/usr/bin/uname -o)" = illumos ]; then
_ostype=illumos
fi
# illumos systems have multi-arch userlands, and "uname -m" reports the
# machine hardware name; e.g., "i86pc" on both 32- and 64-bit x86
# systems. Check for the native (widest) instruction set on the
# running kernel:
if [ "$_cputype" = i86pc ]; then
_cputype="$(isainfo -n)"
fi
fi
case "$_ostype" in
Android)
_ostype=linux-android
;;
Linux)
check_proc
_ostype=unknown-linux-$_clibtype
_bitness=$(get_bitness)
;;
FreeBSD)
_ostype=unknown-freebsd
;;
NetBSD)
_ostype=unknown-netbsd
;;
DragonFly)
_ostype=unknown-dragonfly
;;
Darwin)
_ostype=apple-darwin
;;
illumos)
_ostype=unknown-illumos
;;
MINGW* | MSYS* | CYGWIN* | Windows_NT)
_ostype=pc-windows-gnu
;;
*)
err "unrecognized OS type: $_ostype"
;;
esac
case "$_cputype" in
i386 | i486 | i686 | i786 | x86)
_cputype=i686
;;
xscale | arm)
_cputype=arm
if [ "$_ostype" = "linux-android" ]; then
_ostype=linux-androideabi
fi
;;
armv6l)
_cputype=arm
if [ "$_ostype" = "linux-android" ]; then
_ostype=linux-androideabi
else
_ostype="${_ostype}eabihf"
fi
;;
armv7l | armv8l)
_cputype=armv7
if [ "$_ostype" = "linux-android" ]; then
_ostype=linux-androideabi
else
_ostype="${_ostype}eabihf"
fi
;;
aarch64 | arm64)
_cputype=aarch64
;;
x86_64 | x86-64 | x64 | amd64)
_cputype=x86_64
;;
mips)
_cputype=$(get_endianness mips '' el)
;;
mips64)
if [ "$_bitness" -eq 64 ]; then
# only n64 ABI is supported for now
_ostype="${_ostype}abi64"
_cputype=$(get_endianness mips64 '' el)
fi
;;
ppc)
_cputype=powerpc
;;
ppc64)
_cputype=powerpc64
;;
ppc64le)
_cputype=powerpc64le
;;
s390x)
_cputype=s390x
;;
riscv64)
_cputype=riscv64gc
;;
loongarch64)
_cputype=loongarch64
;;
*)
err "unknown CPU type: $_cputype"
esac
# Detect 64-bit linux with 32-bit userland
if [ "${_ostype}" = unknown-linux-gnu ] && [ "${_bitness}" -eq 32 ]; then
case $_cputype in
x86_64)
if [ -n "${RUSTUP_CPUTYPE:-}" ]; then
_cputype="$RUSTUP_CPUTYPE"
else {
# 32-bit executable for amd64 = x32
if is_host_amd64_elf; then {
echo "This host is running an x32 userland; as it stands, x32 support is poor," 1>&2
echo "and there isn't a native toolchain -- you will have to install" 1>&2
echo "multiarch compatibility with i686 and/or amd64, then select one" 1>&2
echo "by re-running this script with the RUSTUP_CPUTYPE environment variable" 1>&2
echo "set to i686 or x86_64, respectively." 1>&2
echo 1>&2
echo "You will be able to add an x32 target after installation by running" 1>&2
echo " rustup target add x86_64-unknown-linux-gnux32" 1>&2
exit 1
}; else
_cputype=i686
fi
}; fi
;;
mips64)
_cputype=$(get_endianness mips '' el)
;;
powerpc64)
_cputype=powerpc
;;
aarch64)
_cputype=armv7
if [ "$_ostype" = "linux-android" ]; then
_ostype=linux-androideabi
else
_ostype="${_ostype}eabihf"
fi
;;
riscv64gc)
err "riscv64 with 32-bit userland unsupported"
;;
esac
fi
# Detect armv7 but without the CPU features Rust needs in that build,
# and fall back to arm.
# See https://github.com/rust-lang/rustup.rs/issues/587.
if [ "$_ostype" = "unknown-linux-gnueabihf" ] && [ "$_cputype" = armv7 ]; then
if ensure grep '^Features' /proc/cpuinfo | grep -q -v neon; then
# At least one processor does not have NEON.
_cputype=arm
fi
fi
_arch="${_cputype}-${_ostype}"
RETVAL="$_arch"
}
say() {
printf 'rustup: %s\n' "$1"
}
err() {
say "$1" >&2
exit 1
}
need_cmd() {
if ! check_cmd "$1"; then
err "need '$1' (command not found)"
fi
}
check_cmd() {
command -v "$1" > /dev/null 2>&1
}
assert_nz() {
if [ -z "$1" ]; then err "assert_nz $2"; fi
}
# Run a command that should never fail. If the command fails execution
# will immediately terminate with an error showing the failing
# command.
ensure() {
if ! "$@"; then err "command failed: $*"; fi
}
# This is just for indicating that commands' results are being
# intentionally ignored. Usually, because it's being executed
# as part of error handling.
ignore() {
"$@"
}
# This wraps curl or wget. Try curl first, if not installed,
# use wget instead.
downloader() {
local _dld
local _ciphersuites
local _err
local _status
local _retry
if check_cmd curl; then
_dld=curl
elif check_cmd wget; then
_dld=wget
else
_dld='curl or wget' # to be used in error message of need_cmd
fi
if [ "$1" = --check ]; then
need_cmd "$_dld"
elif [ "$_dld" = curl ]; then
check_curl_for_retry_support
_retry="$RETVAL"
get_ciphersuites_for_curl
_ciphersuites="$RETVAL"
if [ -n "$_ciphersuites" ]; then
_err=$(curl $_retry --proto '=https' --tlsv1.2 --ciphers "$_ciphersuites" --silent --show-error --fail --location "$1" --output "$2" 2>&1)
_status=$?
else
echo "Warning: Not enforcing strong cipher suites for TLS, this is potentially less secure"
if ! check_help_for "$3" curl --proto --tlsv1.2; then
echo "Warning: Not enforcing TLS v1.2, this is potentially less secure"
_err=$(curl $_retry --silent --show-error --fail --location "$1" --output "$2" 2>&1)
_status=$?
else
_err=$(curl $_retry --proto '=https' --tlsv1.2 --silent --show-error --fail --location "$1" --output "$2" 2>&1)
_status=$?
fi
fi
if [ -n "$_err" ]; then
echo "$_err" >&2
if echo "$_err" | grep -q 404$; then
err "installer for platform '$3' not found, this may be unsupported"
fi
fi
return $_status
elif [ "$_dld" = wget ]; then
if [ "$(wget -V 2>&1|head -2|tail -1|cut -f1 -d" ")" = "BusyBox" ]; then
echo "Warning: using the BusyBox version of wget. Not enforcing strong cipher suites for TLS or TLS v1.2, this is potentially less secure"
_err=$(wget "$1" -O "$2" 2>&1)
_status=$?
else
get_ciphersuites_for_wget
_ciphersuites="$RETVAL"
if [ -n "$_ciphersuites" ]; then
_err=$(wget --https-only --secure-protocol=TLSv1_2 --ciphers "$_ciphersuites" "$1" -O "$2" 2>&1)
_status=$?
else
echo "Warning: Not enforcing strong cipher suites for TLS, this is potentially less secure"
if ! check_help_for "$3" wget --https-only --secure-protocol; then
echo "Warning: Not enforcing TLS v1.2, this is potentially less secure"
_err=$(wget "$1" -O "$2" 2>&1)
_status=$?
else
_err=$(wget --https-only --secure-protocol=TLSv1_2 "$1" -O "$2" 2>&1)
_status=$?
fi
fi
fi
if [ -n "$_err" ]; then
echo "$_err" >&2
if echo "$_err" | grep -q ' 404 Not Found$'; then
err "installer for platform '$3' not found, this may be unsupported"
fi
fi
return $_status
else
err "Unknown downloader" # should not reach here
fi
}
check_help_for() {
local _arch
local _cmd
local _arg
_arch="$1"
shift
_cmd="$1"
shift
local _category
if "$_cmd" --help | grep -q 'For all options use the manual or "--help all".'; then
_category="all"
else
_category=""
fi
case "$_arch" in
*darwin*)
if check_cmd sw_vers; then
case $(sw_vers -productVersion) in
10.*)
# If we're running on macOS, older than 10.13, then we always
# fail to find these options to force fallback
if [ "$(sw_vers -productVersion | cut -d. -f2)" -lt 13 ]; then
# Older than 10.13
echo "Warning: Detected macOS platform older than 10.13"
return 1
fi
;;
11.*)
# We assume Big Sur will be OK for now
;;
*)
# Unknown product version, warn and continue
echo "Warning: Detected unknown macOS major version: $(sw_vers -productVersion)"
echo "Warning TLS capabilities detection may fail"
;;
esac
fi
;;
esac
for _arg in "$@"; do
if ! "$_cmd" --help "$_category" | grep -q -- "$_arg"; then
return 1
fi
done
true # not strictly needed
}
# Check if curl supports the --retry flag, then pass it to the curl invocation.
check_curl_for_retry_support() {
local _retry_supported=""
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
if check_help_for "notspecified" "curl" "--retry"; then
_retry_supported="--retry 3"
if check_help_for "notspecified" "curl" "--continue-at"; then
# "-C -" tells curl to automatically find where to resume the download when retrying.
_retry_supported="--retry 3 -C -"
fi
fi
RETVAL="$_retry_supported"
}
# Return cipher suite string specified by user, otherwise return strong TLS 1.2-1.3 cipher suites
# if support by local tools is detected. Detection currently supports these curl backends:
# GnuTLS and OpenSSL (possibly also LibreSSL and BoringSSL). Return value can be empty.
get_ciphersuites_for_curl() {
if [ -n "${RUSTUP_TLS_CIPHERSUITES-}" ]; then
# user specified custom cipher suites, assume they know what they're doing
RETVAL="$RUSTUP_TLS_CIPHERSUITES"
return
fi
local _openssl_syntax="no"
local _gnutls_syntax="no"
local _backend_supported="yes"
if curl -V | grep -q ' OpenSSL/'; then
_openssl_syntax="yes"
elif curl -V | grep -iq ' LibreSSL/'; then
_openssl_syntax="yes"
elif curl -V | grep -iq ' BoringSSL/'; then
_openssl_syntax="yes"
elif curl -V | grep -iq ' GnuTLS/'; then
_gnutls_syntax="yes"
else
_backend_supported="no"
fi
local _args_supported="no"
if [ "$_backend_supported" = "yes" ]; then
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
if check_help_for "notspecified" "curl" "--tlsv1.2" "--ciphers" "--proto"; then
_args_supported="yes"
fi
fi
local _cs=""
if [ "$_args_supported" = "yes" ]; then
if [ "$_openssl_syntax" = "yes" ]; then
_cs=$(get_strong_ciphersuites_for "openssl")
elif [ "$_gnutls_syntax" = "yes" ]; then
_cs=$(get_strong_ciphersuites_for "gnutls")
fi
fi
RETVAL="$_cs"
}
# Return cipher suite string specified by user, otherwise return strong TLS 1.2-1.3 cipher suites
# if support by local tools is detected. Detection currently supports these wget backends:
# GnuTLS and OpenSSL (possibly also LibreSSL and BoringSSL). Return value can be empty.
get_ciphersuites_for_wget() {
if [ -n "${RUSTUP_TLS_CIPHERSUITES-}" ]; then
# user specified custom cipher suites, assume they know what they're doing
RETVAL="$RUSTUP_TLS_CIPHERSUITES"
return
fi
local _cs=""
if wget -V | grep -q '\-DHAVE_LIBSSL'; then
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
if check_help_for "notspecified" "wget" "TLSv1_2" "--ciphers" "--https-only" "--secure-protocol"; then
_cs=$(get_strong_ciphersuites_for "openssl")
fi
elif wget -V | grep -q '\-DHAVE_LIBGNUTLS'; then
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
if check_help_for "notspecified" "wget" "TLSv1_2" "--ciphers" "--https-only" "--secure-protocol"; then
_cs=$(get_strong_ciphersuites_for "gnutls")
fi
fi
RETVAL="$_cs"
}
# Return strong TLS 1.2-1.3 cipher suites in OpenSSL or GnuTLS syntax. TLS 1.2
# excludes non-ECDHE and non-AEAD cipher suites. DHE is excluded due to bad
# DH params often found on servers (see RFC 7919). Sequence matches or is
# similar to Firefox 68 ESR with weak cipher suites disabled via about:config.
# $1 must be openssl or gnutls.
get_strong_ciphersuites_for() {
if [ "$1" = "openssl" ]; then
# OpenSSL is forgiving of unknown values, no problems with TLS 1.3 values on versions that don't support it yet.
echo "TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_256_GCM_SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384"
elif [ "$1" = "gnutls" ]; then
# GnuTLS isn't forgiving of unknown values, so this may require a GnuTLS version that supports TLS 1.3 even if wget doesn't.
# Begin with SECURE128 (and higher) then remove/add to build cipher suites. Produces same 9 cipher suites as OpenSSL but in slightly different order.
echo "SECURE128:-VERS-SSL3.0:-VERS-TLS1.0:-VERS-TLS1.1:-VERS-DTLS-ALL:-CIPHER-ALL:-MAC-ALL:-KX-ALL:+AEAD:+ECDHE-ECDSA:+ECDHE-RSA:+AES-128-GCM:+CHACHA20-POLY1305:+AES-256-GCM"
fi
}
main "$@" || exit 1