[ci/builds] Reuse binary builds for Docker. Static link ONNX runtime (#3382)
This commit is contained in:
parent
af412132c2
commit
307d7d68fc
12 changed files with 467 additions and 1211 deletions
351
.github/workflows/reusable_docker.yml
vendored
351
.github/workflows/reusable_docker.yml
vendored
|
@ -8,7 +8,7 @@ on:
|
|||
tag-prefix:
|
||||
required: true
|
||||
type: string
|
||||
description: "The prefix of the Docker image tag. i.e. 'nightly' for 'surrealdb/surrealdb:nightly-dev' or 'surrealdb/surrealdb:nightly-fdb'."
|
||||
description: "The prefix of the Docker image tag. i.e. 'nightly' for 'surrealdb/surrealdb:nightly-dev'."
|
||||
build:
|
||||
required: false
|
||||
type: boolean
|
||||
|
@ -29,10 +29,6 @@ on:
|
|||
required: false
|
||||
DOCKER_PASS:
|
||||
required: false
|
||||
AWS_CI_ACCESS_KEY_ID:
|
||||
required: false
|
||||
AWS_CI_SECRET_ACCESS_KEY:
|
||||
required: false
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
@ -43,328 +39,105 @@ jobs:
|
|||
name: Prepare steps
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
with-ecr: ${{ steps.aws-credentials.outputs.with-ecr }}
|
||||
tag-prefix: ${{ steps.tag-prefix.outputs.tag-prefix }}
|
||||
build-matrix: ${{ steps.set-matrix.outputs.build-matrix }}
|
||||
push-matrix: ${{ steps.set-matrix.outputs.push-matrix }}
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
|
||||
- name: Check if AWS credentials are set
|
||||
id: aws-credentials
|
||||
run: |
|
||||
if [[ "${{ secrets.AWS_CI_ACCESS_KEY_ID }}" == "" ]]; then
|
||||
echo "###"
|
||||
echo "### AWS credentials are not set. Will skip any AWS ECR action."
|
||||
echo "###"
|
||||
|
||||
echo "with-ecr=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "with-ecr=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Sanitize tag name
|
||||
id: tag-prefix
|
||||
run: |
|
||||
echo "tag-prefix=$(echo '${{ inputs.tag-prefix }}' | sed 's/[^a-zA-Z0-9_.-]/-/g' | tr '[:upper:]' '[:lower:]')" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# Define matrix here so we don't need to search for it when making changes
|
||||
- name: Set matrix
|
||||
id: set-matrix
|
||||
env:
|
||||
BUILD_MATRIX: |
|
||||
MATRIX: |
|
||||
include:
|
||||
########
|
||||
# Binary image
|
||||
########
|
||||
- name: Binary image
|
||||
dockerfile: Dockerfile.binary
|
||||
platform: amd64
|
||||
runner: ["self-hosted", "amd64", "builder"]
|
||||
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-binary
|
||||
########################################
|
||||
# Base images
|
||||
########################################
|
||||
# Prod AMD64 image
|
||||
# Prod image
|
||||
- &base_image
|
||||
name: Base image
|
||||
dockerfile: Dockerfile
|
||||
build-target: prod
|
||||
platform: amd64
|
||||
runner: ["self-hosted", "amd64", "builder"]
|
||||
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}
|
||||
# Prod ARM64 image
|
||||
name: Prod image
|
||||
build-target: prod-ci
|
||||
# Dev image
|
||||
- <<: *base_image
|
||||
platform: arm64
|
||||
runner: ["self-hosted", "arm64", "builder"]
|
||||
tag: arm64-${{ steps.tag-prefix.outputs.tag-prefix }}
|
||||
# Dev AMD64 image
|
||||
- <<: *base_image
|
||||
build-target: dev
|
||||
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-dev
|
||||
# Dev ARM64 image
|
||||
- <<: *base_image
|
||||
build-target: dev
|
||||
platform: arm64
|
||||
runner: ["self-hosted", "arm64", "builder"]
|
||||
tag: arm64-${{ steps.tag-prefix.outputs.tag-prefix }}-dev
|
||||
|
||||
########################################
|
||||
# FoundationDB images (FDB client library is only available for amd64)
|
||||
########################################
|
||||
# Prod AMD64 image
|
||||
- &fdb_image
|
||||
name: FDB image
|
||||
dockerfile: Dockerfile.fdb
|
||||
build-target: prod
|
||||
platform: amd64
|
||||
runner: ["self-hosted", "amd64", "builder"]
|
||||
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-fdb
|
||||
# Dev AMD64 image
|
||||
- <<: *fdb_image
|
||||
build-target: dev
|
||||
tag: amd64-${{ steps.tag-prefix.outputs.tag-prefix }}-fdb-dev
|
||||
|
||||
PUSH_MATRIX: |
|
||||
include:
|
||||
########################################
|
||||
# Base images
|
||||
########################################
|
||||
# Prod images
|
||||
- &base_image
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tag: ${{ steps.tag-prefix.outputs.tag-prefix }}
|
||||
tag-latest: latest
|
||||
# Dev images
|
||||
- <<: *base_image
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tag: ${{ steps.tag-prefix.outputs.tag-prefix }}-dev
|
||||
tag-latest: latest-dev
|
||||
|
||||
# TODO: Decide whether or not we want a dedicated image for FoundationDB
|
||||
# ########################################
|
||||
# # FoundationDB images (FDB client library is only available for amd64)
|
||||
# ########################################
|
||||
# # Prod images
|
||||
# - &fdb_image
|
||||
# platforms: linux/amd64
|
||||
# tag: ${{ steps.tag-prefix.outputs.tag-prefix }}-fdb
|
||||
# tag-latest: latest-fdb
|
||||
# # Dev images
|
||||
# - <<: *fdb_image
|
||||
# tag: ${{ steps.tag-prefix.outputs.tag-prefix }}-fdb-dev
|
||||
# tag-latest: latest-fdb-dev
|
||||
name: Dev image
|
||||
build-target: dev-ci
|
||||
tag-suffix: -dev
|
||||
|
||||
run: |
|
||||
echo '${{ env.BUILD_MATRIX }}' > build-matrix.yaml
|
||||
echo "build-matrix=$(yq -o json -I=0 build-matrix.yaml)" >> $GITHUB_OUTPUT
|
||||
echo '${{ env.PUSH_MATRIX }}' > push-matrix.yaml
|
||||
echo "push-matrix=$(yq -o json -I=0 push-matrix.yaml)" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.name }} (${{ matrix.build-target || 'default' }}, ${{ matrix.platform }})
|
||||
runs-on: ${{ matrix.runner }}
|
||||
echo '${{ env.MATRIX }}' > matrix.yaml
|
||||
echo "matrix=$(yq -o json -I=0 matrix.yaml)" >> $GITHUB_OUTPUT
|
||||
|
||||
docker:
|
||||
name: Build ${{ matrix.name }} (${{ matrix.build-target }})
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
if: ${{ inputs.build }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.prepare.outputs.build-matrix) }}
|
||||
matrix: ${{ fromJson(needs.prepare.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.git-ref }}
|
||||
|
||||
- name: Checkout docker
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: _docker
|
||||
|
||||
# Replace docker files. It allows us to test new Dockerfiles with workflow_dispatch and a custom git ref.
|
||||
# When triggered by a push or a schedule, this git ref will be the same as 'inputs.git-ref'
|
||||
- name: Replace docker files
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
rm -rf docker .dockerignore
|
||||
mv _docker/docker .
|
||||
mv _docker/.dockerignore .
|
||||
rm -rf _docker
|
||||
|
||||
- name: Cleanup
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
set -x
|
||||
set +e # Don't fail, do what we can
|
||||
docker system prune -f
|
||||
docker image prune -a -f
|
||||
|
||||
docker image ls
|
||||
docker ps -a
|
||||
df -h
|
||||
|
||||
- name: Set up Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and export to Docker.
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
context: .
|
||||
path: artifacts
|
||||
|
||||
- name: Build Docker image (amd64)
|
||||
uses: docker/build-push-action@v5
|
||||
id: build-amd64
|
||||
with:
|
||||
context: artifacts
|
||||
load: true
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
file: docker/${{ matrix.dockerfile }}
|
||||
platforms: linux/amd64
|
||||
file: docker/Dockerfile
|
||||
target: ${{ matrix.build-target }}
|
||||
tags: surrealdb-ci:${{ matrix.tag }}-${{ github.run_id }}
|
||||
tags: surrealdb-local:amd64
|
||||
build-args: |
|
||||
ARTIFACT_PREFIX=surreal-${{ inputs.tag-prefix }}
|
||||
|
||||
- name: Build Docker image (arm64)
|
||||
uses: docker/build-push-action@v5
|
||||
id: build-arm64
|
||||
with:
|
||||
context: artifacts
|
||||
load: true
|
||||
platforms: linux/arm64
|
||||
file: docker/Dockerfile
|
||||
target: ${{ matrix.build-target }}
|
||||
tags: surrealdb-local:arm64
|
||||
build-args: |
|
||||
ARTIFACT_PREFIX=surreal-${{ inputs.tag-prefix }}
|
||||
|
||||
# Start the docker image as server and wait until it is ready
|
||||
- name: Test the Docker image
|
||||
run: |
|
||||
docker run --net=host --rm ${{ steps.build.outputs.imageid }} start 2>&1 >surreal.log &
|
||||
run: docker run --platform linux/amd64 --rm surrealdb-local:amd64 version
|
||||
|
||||
retries=5
|
||||
until docker run --net=host --rm ${{ steps.build.outputs.imageid }} is-ready; do
|
||||
retries=$((retries-1))
|
||||
if [[ $retries -eq 0 ]]; then
|
||||
echo "###"
|
||||
echo "### The container is not ready after 5 seconds!"
|
||||
echo "###"
|
||||
cat surreal.log
|
||||
echo "###"
|
||||
echo "### ERROR: The docker image is not valid. Aborting."
|
||||
echo "###"
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
- name: Configure AWS credentials
|
||||
if: ${{ needs.prepare.outputs.with-ecr == 'true' }}
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
if: ${{ needs.prepare.outputs.with-ecr == 'true' }}
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Push individual images to CI registry.
|
||||
if: ${{ needs.prepare.outputs.with-ecr == 'true' }}
|
||||
run: |
|
||||
docker tag ${{ steps.build.outputs.imageid }} ${{ steps.login-ecr.outputs.registry }}/surrealdb-ci:${{ matrix.tag }}-${{ github.run_id }}
|
||||
docker push ${{ steps.login-ecr.outputs.registry }}/surrealdb-ci:${{ matrix.tag }}-${{ github.run_id }}
|
||||
|
||||
- name: Cleanup
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
set -x
|
||||
set +e # Don't fail, do what we can
|
||||
docker system prune -f
|
||||
docker image rm ${{ steps.build.outputs.imageid }}
|
||||
docker image rm ${{ steps.login-ecr.outputs.registry }}/surrealdb-ci:${{ matrix.tag }}-${{ github.run_id }}
|
||||
docker system prune -f
|
||||
|
||||
docker image ls
|
||||
docker ps -a
|
||||
df -h
|
||||
|
||||
# Push a multi-arch manifest to the CI registry
|
||||
push-all-to-ecr-ci:
|
||||
name: Push ${{ matrix.tag }} to CI registry
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare, build]
|
||||
if: ${{ inputs.build && needs.prepare.outputs.with-ecr == 'true' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.prepare.outputs.push-matrix) }}
|
||||
steps:
|
||||
# Checkout the workflow code, we don't need the code to build SurrealDB, that's why we don't checkout "input.git-ref" here
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Push multi-arch Docker manifest to CI registry
|
||||
- name: Push to DockerHub
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ inputs.push }}
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile.multi-arch
|
||||
platforms: ${{ matrix.platforms }}
|
||||
context: artifacts
|
||||
push: true
|
||||
tags: ${{ steps.login-ecr.outputs.registry }}/surrealdb-ci:${{ matrix.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: docker/Dockerfile
|
||||
target: ${{ matrix.build-target }}
|
||||
tags: surrealdb/surrealdb:${{ inputs.tag-prefix }}${{ matrix.tag-suffix }}
|
||||
build-args: |
|
||||
IMAGE_REPO=${{ steps.login-ecr.outputs.registry }}/surrealdb-ci
|
||||
TAG=${{ matrix.tag }}-${{ github.run_id }}
|
||||
ARTIFACT_PREFIX=surreal-${{ inputs.tag-prefix }}
|
||||
|
||||
# Push a multi-arch manifest to DockerHub
|
||||
push-all-to-dockerhub:
|
||||
name: Push ${{ matrix.tag }} to DockerHub
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare]
|
||||
if: ${{ inputs.push }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.prepare.outputs.push-matrix) }}
|
||||
steps:
|
||||
# Checkout the workflow code, we don't need the code to build SurrealDB, that's why we don't checkout "input.git-ref" here
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Configure DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Push multi-arch Docker manifest to DockerHub
|
||||
- name: Push to DockerHub (latest)
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ inputs.push && inputs.latest }}
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile.multi-arch
|
||||
platforms: ${{ matrix.platforms }}
|
||||
context: artifacts
|
||||
push: true
|
||||
tags: surrealdb/surrealdb:${{ matrix.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: docker/Dockerfile
|
||||
target: ${{ matrix.build-target }}
|
||||
tags: surrealdb/surrealdb:latest${{ matrix.tag-suffix }}
|
||||
build-args: |
|
||||
IMAGE_REPO=${{ steps.login-ecr.outputs.registry }}/surrealdb-ci
|
||||
TAG=${{ matrix.tag }}-${{ github.run_id }}
|
||||
|
||||
- name: Tag multi-arch Docker manifest as latest
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ inputs.latest }}
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile.multi-arch
|
||||
platforms: ${{ matrix.platforms }}
|
||||
push: true
|
||||
tags: surrealdb/surrealdb:${{ matrix.tag-latest }}
|
||||
build-args: |
|
||||
IMAGE_REPO=${{ steps.login-ecr.outputs.registry }}/surrealdb-ci
|
||||
TAG=${{ matrix.tag }}-${{ github.run_id }}
|
||||
|
||||
ARTIFACT_PREFIX=surreal-${{ inputs.tag-prefix }}
|
||||
|
|
249
.github/workflows/reusable_publish_version.yml
vendored
249
.github/workflows/reusable_publish_version.yml
vendored
|
@ -39,6 +39,21 @@ on:
|
|||
type: boolean
|
||||
default: true
|
||||
description: "Enable ML support in binaries"
|
||||
rust_version:
|
||||
required: false
|
||||
type: string
|
||||
default: "1.75.0"
|
||||
description: "The Rust version to use for building binaries"
|
||||
onnx_version:
|
||||
required: false
|
||||
type: string
|
||||
default: "1.16.3"
|
||||
description: "The ONNX library version"
|
||||
secrets:
|
||||
AWS_CI_ACCESS_KEY_ID:
|
||||
description: "AWS access key ID"
|
||||
AWS_CI_SECRET_ACCESS_KEY:
|
||||
description: "AWS secret access key"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
@ -56,7 +71,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.74.1
|
||||
toolchain: ${{ inputs.rust_version }}
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -64,7 +79,10 @@ jobs:
|
|||
ref: ${{ inputs.git-ref }}
|
||||
|
||||
- name: Install a TOML parser
|
||||
run: cargo install --force --locked --version 0.8.1 taplo-cli
|
||||
run: |
|
||||
curl -L https://github.com/tamasfe/taplo/releases/download/0.8.1/taplo-full-linux-x86_64.gz | gunzip - > taplo
|
||||
chmod +x taplo
|
||||
sudo mv taplo /usr/bin/taplo
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
|
@ -77,7 +95,7 @@ jobs:
|
|||
run: |
|
||||
set -x
|
||||
|
||||
currentVersion=$(/home/runner/.cargo/bin/taplo get -f lib/Cargo.toml "package.version")
|
||||
currentVersion=$(taplo get -f lib/Cargo.toml "package.version")
|
||||
|
||||
if [[ $currentVersion == *"-beta"* ]]; then
|
||||
git push origin --delete releases/stable || true
|
||||
|
@ -107,7 +125,7 @@ jobs:
|
|||
run: |
|
||||
set -x
|
||||
|
||||
currentVersion=$(/home/runner/.cargo/bin/taplo get -f lib/Cargo.toml "package.version")
|
||||
currentVersion=$(taplo get -f lib/Cargo.toml "package.version")
|
||||
|
||||
if [[ $currentVersion == *"-beta"* ]]; then
|
||||
if [[ "${{ inputs.bump-version }}" == "true" ]]; then
|
||||
|
@ -152,7 +170,7 @@ jobs:
|
|||
run: |
|
||||
set -x
|
||||
|
||||
version=$(/home/runner/.cargo/bin/taplo get -f lib/Cargo.toml "package.version")
|
||||
version=$(taplo get -f lib/Cargo.toml "package.version")
|
||||
|
||||
if [[ "${{ inputs.publish }}" == "true" && ("${{ inputs.environment }}" == "beta" || "${{ inputs.environment }}" == "stable") ]]; then
|
||||
echo "git-ref=v${version}" >> $GITHUB_OUTPUT
|
||||
|
@ -179,7 +197,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: ${{ inputs.rust_version }}
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -221,7 +239,6 @@ jobs:
|
|||
needs: [prepare-vars]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
|
@ -230,7 +247,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: ${{ inputs.rust_version }}
|
||||
targets: wasm32-unknown-unknown
|
||||
components: rustfmt, clippy
|
||||
|
||||
|
@ -249,27 +266,69 @@ jobs:
|
|||
- name: Check clippy
|
||||
run: cargo make ci-clippy
|
||||
|
||||
docker-build:
|
||||
name: Build Docker images
|
||||
docker-builder:
|
||||
name: Prepare docker builder
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare-vars]
|
||||
uses: ./.github/workflows/reusable_docker.yml
|
||||
with:
|
||||
git-ref: ${{ needs.prepare-vars.outputs.git-ref }}
|
||||
tag-prefix: ${{ needs.prepare-vars.outputs.name }}
|
||||
build: true
|
||||
push: false
|
||||
secrets: inherit
|
||||
outputs:
|
||||
name: ${{ steps.image.outputs.name }}
|
||||
tag: ${{ steps.image.outputs.tag }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Prepare docker image name
|
||||
id: image
|
||||
run: |
|
||||
set -x
|
||||
|
||||
# Use the github branch name so we can use modified builders on custom branches
|
||||
tag=$(echo ${{ github.ref_name }} | sed -e 's/[^a-zA-Z0-9]/-/g')
|
||||
|
||||
echo "name=${{ steps.login-ecr.outputs.registry }}/surrealdb-builder" >> $GITHUB_OUTPUT
|
||||
echo "tag=${tag}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build & Push builder image
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: ./docker/Dockerfile
|
||||
target: builder
|
||||
cache-from: |
|
||||
type=registry,ref=${{ steps.image.outputs.name }}:${{ steps.image.outputs.tag }}
|
||||
type=registry,ref=${{ steps.image.outputs.name }}:main
|
||||
cache-to: type=inline
|
||||
push: true
|
||||
tags: ${{ steps.image.outputs.name }}:${{ steps.image.outputs.tag }}
|
||||
build-args: |
|
||||
RUST_VERSION=${{ inputs.rust_version }}
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} binary
|
||||
needs: [prepare-vars]
|
||||
needs: [prepare-vars, docker-builder]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# MacOS amd64
|
||||
- arch: x86_64-apple-darwin
|
||||
runner: macos-latest-xl
|
||||
runner: macos-latest-large
|
||||
file: surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64
|
||||
build-step: |
|
||||
set -x
|
||||
|
@ -282,21 +341,28 @@ jobs:
|
|||
if [[ "${{ inputs.http-compression }}" == "true" ]]; then
|
||||
features=${features},http-compression
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.ml }}" == "true" ]]; then
|
||||
features=${features},ml
|
||||
|
||||
# Download libonnxruntime's static library and tell ORT crate to use it
|
||||
mkdir /tmp/onnxruntime
|
||||
curl -sSL https://github.com/surrealdb/onnxruntime-build/releases/download/v${{ inputs.onnx_version }}/onnxruntime-osx-x86_64-static_lib-${{ inputs.onnx_version }}.tgz | \
|
||||
tar -xz -C /tmp/onnxruntime/
|
||||
export ORT_STRATEGY=system ORT_LIB_LOCATION=/tmp/onnxruntime/lib
|
||||
fi
|
||||
|
||||
cargo build --features $features --release --locked --target x86_64-apple-darwin
|
||||
|
||||
# Package
|
||||
cp target/x86_64-apple-darwin/release/surreal surreal
|
||||
chmod +x surreal
|
||||
./surreal version
|
||||
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64.tgz surreal
|
||||
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.darwin-amd64.txt
|
||||
|
||||
# MacOS arm64
|
||||
- arch: aarch64-apple-darwin
|
||||
runner: macos-latest-xl
|
||||
runner: macos-latest-xlarge
|
||||
file: surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64
|
||||
build-step: |
|
||||
set -x
|
||||
|
@ -309,14 +375,21 @@ jobs:
|
|||
if [[ "${{ inputs.http-compression }}" == "true" ]]; then
|
||||
features=${features},http-compression
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.ml }}" == "true" ]]; then
|
||||
features=${features},ml
|
||||
|
||||
# Download libonnxruntime's static library and tell ORT crate to use it
|
||||
mkdir /tmp/onnxruntime
|
||||
curl -sSL https://github.com/surrealdb/onnxruntime-build/releases/download/v${{ inputs.onnx_version }}/onnxruntime-osx-arm64-static_lib-${{ inputs.onnx_version }}.tgz | \
|
||||
tar -xz -C /tmp/onnxruntime/
|
||||
export ORT_STRATEGY=system ORT_LIB_LOCATION=/tmp/onnxruntime/lib
|
||||
fi
|
||||
cargo build --features $features --release --locked --target aarch64-apple-darwin
|
||||
|
||||
# Package
|
||||
cp target/aarch64-apple-darwin/release/surreal surreal
|
||||
chmod +x surreal
|
||||
./surreal version
|
||||
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64.tgz surreal
|
||||
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.darwin-arm64.txt
|
||||
|
||||
|
@ -325,32 +398,43 @@ jobs:
|
|||
runner: ["self-hosted", "amd64", "builder"]
|
||||
file: surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64
|
||||
build-step: |
|
||||
set -x
|
||||
|
||||
# Build
|
||||
features=storage-tikv
|
||||
if [[ "${{ inputs.http-compression }}" == "true" ]]; then
|
||||
features=${features},http-compression
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.ml }}" == "true" ]]; then
|
||||
features=${features},ml
|
||||
|
||||
# Download libonnxruntime's static library and tell ORT crate to use it
|
||||
tmpdir=$(mktemp -d)
|
||||
curl -sSL https://github.com/surrealdb/onnxruntime-build/releases/download/v${{ inputs.onnx_version }}/onnxruntime-linux-x64-static_lib-${{ inputs.onnx_version }}.tgz | \
|
||||
tar -xz -C $tmpdir
|
||||
export ORT_STRATEGY=system ORT_LIB_LOCATION=$tmpdir/lib
|
||||
fi
|
||||
docker build \
|
||||
--platform linux/amd64 \
|
||||
--build-arg="CARGO_EXTRA_FEATURES=${features}" \
|
||||
--build-arg="SURREAL_BUILD_METADATA=${SURREAL_BUILD_METADATA}" \
|
||||
-t binary \
|
||||
-f docker/Dockerfile.binary \
|
||||
.
|
||||
docker create --name binary binary
|
||||
docker cp binary:/surrealdb/target/release/surreal surreal
|
||||
|
||||
|
||||
docker run \
|
||||
--rm -t \
|
||||
--pull always \
|
||||
-v $(pwd):/surrealdb \
|
||||
-e SURREAL_BUILD_METADATA=$SURREAL_BUILD_METADATA \
|
||||
-e ORT_STRATEGY=$ORT_STRATEGY \
|
||||
-e ORT_LIB_LOCATION=$ORT_LIB_LOCATION \
|
||||
-v $ORT_LIB_LOCATION:$ORT_LIB_LOCATION \
|
||||
${{ needs.docker-builder.outputs.name }}:${{ needs.docker-builder.outputs.tag }} \
|
||||
--target x86_64-unknown-linux-gnu --features ${features} --release --locked
|
||||
|
||||
# Package
|
||||
chmod +x surreal
|
||||
./surreal version
|
||||
cp target/x86_64-unknown-linux-gnu/release/surreal surreal
|
||||
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64.tgz surreal
|
||||
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.linux-amd64.txt
|
||||
|
||||
# Verify the binary is compatible with various Linux distributions
|
||||
docker run --platform linux/amd64 --rm -t -v ./target/x86_64-unknown-linux-gnu/release/surreal:/surreal ubuntu:20.04 /surreal version
|
||||
docker run --platform linux/amd64 --rm -t -v ./target/x86_64-unknown-linux-gnu/release/surreal:/surreal rockylinux:8 /surreal version
|
||||
docker run --platform linux/amd64 --rm -t -v ./target/x86_64-unknown-linux-gnu/release/surreal:/surreal debian:11 /surreal version
|
||||
|
||||
# Linux arm64
|
||||
- arch: aarch64-unknown-linux-gnu
|
||||
runner: ["self-hosted", "arm64", "builder"]
|
||||
|
@ -363,25 +447,38 @@ jobs:
|
|||
if [[ "${{ inputs.http-compression }}" == "true" ]]; then
|
||||
features=${features},http-compression
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.ml }}" == "true" ]]; then
|
||||
features=${features},ml
|
||||
|
||||
# Download libonnxruntime's static library and tell ORT crate to use it
|
||||
tmpdir=$(mktemp -d)
|
||||
curl -sSL https://github.com/surrealdb/onnxruntime-build/releases/download/v${{ inputs.onnx_version }}/onnxruntime-linux-aarch64-static_lib-${{ inputs.onnx_version }}.tgz | \
|
||||
tar -xz -C $tmpdir
|
||||
export ORT_STRATEGY=system ORT_LIB_LOCATION=$tmpdir/lib
|
||||
fi
|
||||
docker build \
|
||||
--platform linux/arm64 \
|
||||
--build-arg="CARGO_EXTRA_FEATURES=${features}" \
|
||||
--build-arg="SURREAL_BUILD_METADATA=${SURREAL_BUILD_METADATA}" \
|
||||
-t binary \
|
||||
-f docker/Dockerfile.binary \
|
||||
.
|
||||
docker create --name binary binary
|
||||
docker cp binary:/surrealdb/target/release/surreal surreal
|
||||
|
||||
docker run \
|
||||
--rm -t \
|
||||
--pull always \
|
||||
-v $(pwd):/surrealdb \
|
||||
-e SURREAL_BUILD_METADATA=$SURREAL_BUILD_METADATA \
|
||||
-e ORT_STRATEGY=$ORT_STRATEGY \
|
||||
-e ORT_LIB_LOCATION=$ORT_LIB_LOCATION \
|
||||
-v $ORT_LIB_LOCATION:$ORT_LIB_LOCATION \
|
||||
${{ needs.docker-builder.outputs.name }}:${{ needs.docker-builder.outputs.tag }} \
|
||||
--target aarch64-unknown-linux-gnu --features ${features} --release --locked
|
||||
|
||||
# Package
|
||||
chmod +x surreal
|
||||
./surreal version
|
||||
cp target/aarch64-unknown-linux-gnu/release/surreal surreal
|
||||
tar -zcvf surreal-${{ needs.prepare-vars.outputs.name }}.linux-arm64.tgz surreal
|
||||
echo $(shasum -a 256 surreal-${{ needs.prepare-vars.outputs.name }}.linux-arm64.tgz | cut -f1 -d' ') > surreal-${{ needs.prepare-vars.outputs.name }}.linux-arm64.txt
|
||||
|
||||
# Verify the binary is compatible with various Linux distributions
|
||||
docker run --platform linux/arm64 --rm -t -v ./target/aarch64-unknown-linux-gnu/release/surreal:/surreal ubuntu:20.04 /surreal version
|
||||
docker run --platform linux/arm64 --rm -t -v ./target/aarch64-unknown-linux-gnu/release/surreal:/surreal rockylinux:8 /surreal version
|
||||
docker run --platform linux/arm64 --rm -t -v ./target/aarch64-unknown-linux-gnu/release/surreal:/surreal debian:11 /surreal version
|
||||
|
||||
# Windows amd64
|
||||
- arch: x86_64-pc-windows-msvc
|
||||
runner: windows-latest
|
||||
|
@ -397,9 +494,17 @@ jobs:
|
|||
if [[ "${{ inputs.http-compression }}" == "true" ]]; then
|
||||
features=${features},http-compression
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.ml }}" == "true" ]]; then
|
||||
features=${features},ml
|
||||
|
||||
# Download libonnxruntime's static library and tell ORT crate to use it
|
||||
tmp_dir=$(mktemp -d)
|
||||
curl -sSL https://github.com/surrealdb/onnxruntime-build/releases/download/v${{ inputs.onnx_version }}/onnxruntime-win-x64-static_lib-${{ inputs.onnx_version }}.zip -o $tmp_dir/onnxruntime.zip
|
||||
unzip -d $tmp_dir $tmp_dir/onnxruntime.zip
|
||||
export ORT_STRATEGY=system ORT_LIB_LOCATION=$tmp_dir/lib
|
||||
fi
|
||||
|
||||
cargo build --features $features --release --locked --target x86_64-pc-windows-msvc
|
||||
|
||||
# Package
|
||||
|
@ -414,28 +519,25 @@ jobs:
|
|||
with:
|
||||
ref: ${{ needs.prepare-vars.outputs.git-ref }}
|
||||
|
||||
- name: Checkout docker
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: _docker
|
||||
|
||||
# Replace docker files. It allows us to test new Dockerfiles with workflow_dispatch and a custom git ref.
|
||||
# When triggered by a push or a schedule, this git ref will be the same as 'inputs.git-ref'
|
||||
- name: Replace docker files
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
rm -rf docker .dockerignore
|
||||
mv _docker/docker .
|
||||
mv _docker/.dockerignore .
|
||||
rm -rf _docker
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.74.1
|
||||
toolchain: ${{ inputs.rust_version }}
|
||||
targets: ${{ matrix.arch }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
if: ${{ matrix.arch == 'x86_64-unknown-linux-gnu' || matrix.arch == 'aarch64-unknown-linux-gnu' }}
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_CI_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_CI_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
if: ${{ matrix.arch == 'x86_64-unknown-linux-gnu' || matrix.arch == 'aarch64-unknown-linux-gnu' }}
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Output package versions
|
||||
run: |
|
||||
set -x
|
||||
|
@ -459,7 +561,7 @@ jobs:
|
|||
|
||||
publish:
|
||||
name: Publish crate and artifacts binaries
|
||||
needs: [prepare-vars, test, lint, build, docker-build]
|
||||
needs: [prepare-vars, test, lint, build]
|
||||
if: ${{ inputs.publish }}
|
||||
environment: ${{ inputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -472,14 +574,17 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.74.1
|
||||
toolchain: ${{ inputs.rust_version }}
|
||||
|
||||
- name: Install release-plz
|
||||
run: cargo install --force --locked --version 0.3.30 release-plz
|
||||
|
||||
- name: Install a TOML parser
|
||||
if: ${{ inputs.environment == 'beta' }}
|
||||
run: cargo install --force --locked --version 0.8.1 taplo-cli
|
||||
run: |
|
||||
curl -L https://github.com/tamasfe/taplo/releases/download/0.8.1/taplo-full-linux-x86_64.gz | gunzip - > taplo
|
||||
chmod +x taplo
|
||||
sudo mv taplo /usr/bin/taplo
|
||||
|
||||
- name: Create a temporary branch
|
||||
run: git checkout -b crate
|
||||
|
@ -500,7 +605,7 @@ jobs:
|
|||
set -x
|
||||
|
||||
# Derive crate version
|
||||
currentVersion=$(/home/runner/.cargo/bin/taplo get -f lib/Cargo.toml "package.version")
|
||||
currentVersion=$(taplo get -f lib/Cargo.toml "package.version")
|
||||
major=$(echo $currentVersion | tr "." "\n" | sed -n 1p)
|
||||
minor=$(echo $currentVersion | tr "." "\n" | sed -n 2p)
|
||||
betaNum=$(echo $currentVersion | tr "." "\n" | sed -n 4p)
|
||||
|
@ -509,7 +614,7 @@ jobs:
|
|||
# Update crate version
|
||||
sed -i "s#^version = \".*\"#version = \"${version}\"#" lib/Cargo.toml
|
||||
|
||||
- name: Patch nightly crate version
|
||||
- name: Patch nightly crate version
|
||||
if: ${{ inputs.environment == 'nightly' }}
|
||||
run: |
|
||||
# Get the date and time of the last commit
|
||||
|
@ -586,15 +691,15 @@ jobs:
|
|||
aws s3 cp --cache-control 'no-store' $file s3://download.surrealdb.com/${{ needs.prepare-vars.outputs.name }}/
|
||||
done
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker images
|
||||
docker:
|
||||
name: Docker images
|
||||
needs: [prepare-vars, publish]
|
||||
uses: ./.github/workflows/reusable_docker.yml
|
||||
with:
|
||||
git-ref: ${{ needs.prepare-vars.outputs.git-ref }}
|
||||
tag-prefix: ${{ needs.prepare-vars.outputs.name }}
|
||||
latest: ${{ inputs.latest }}
|
||||
build: false
|
||||
build: true
|
||||
push: true
|
||||
secrets: inherit
|
||||
|
||||
|
|
48
Cargo.lock
generated
48
Cargo.lock
generated
|
@ -1198,7 +1198,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b"
|
||||
dependencies = [
|
||||
"ciborium-io",
|
||||
"half",
|
||||
"half 1.8.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2305,6 +2305,16 @@ version = "1.8.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
|
||||
|
||||
[[package]]
|
||||
name = "half"
|
||||
version = "2.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc52e53916c08643f1b56ec082790d1e86a32e58dc5268f897f313fbae7b4872"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crunchy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hash32"
|
||||
version = "0.2.1"
|
||||
|
@ -3544,6 +3554,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "889dca4c98efa21b1ba54ddb2bde44fd4920d910f492b618351f839d8428d79d"
|
||||
dependencies = [
|
||||
"flate2",
|
||||
"half 2.3.1",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"libloading 0.7.4",
|
||||
|
@ -3551,8 +3562,10 @@ dependencies = [
|
|||
"tar",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
"ureq",
|
||||
"vswhom",
|
||||
"winapi",
|
||||
"zip",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4883,7 +4896,7 @@ version = "0.11.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5"
|
||||
dependencies = [
|
||||
"half",
|
||||
"half 1.8.2",
|
||||
"serde",
|
||||
]
|
||||
|
||||
|
@ -5492,9 +5505,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "surrealml-core"
|
||||
version = "0.0.3"
|
||||
version = "0.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6cde4ceed0b05a8f512afe7fa1a260e4a67cfbba439eccb858e189f826fc3fff"
|
||||
checksum = "3614a87a606d39a6152a82452a1aa8946d985465c425972a2ef13e078c6df59a"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
|
@ -6271,6 +6284,21 @@ version = "0.9.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"log",
|
||||
"once_cell",
|
||||
"rustls",
|
||||
"rustls-webpki",
|
||||
"url",
|
||||
"webpki-roots",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.0"
|
||||
|
@ -6802,6 +6830,18 @@ version = "1.7.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"flate2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.13.0"
|
||||
|
|
53
Cargo.toml
53
Cargo.toml
|
@ -43,7 +43,12 @@ axum-server = { version = "0.5.1", features = ["tls-rustls"] }
|
|||
base64 = "0.21.5"
|
||||
bytes = "1.5.0"
|
||||
ciborium = "0.2.1"
|
||||
clap = { version = "4.4.11", features = ["env", "derive", "wrap_help", "unicode"] }
|
||||
clap = { version = "4.4.11", features = [
|
||||
"env",
|
||||
"derive",
|
||||
"wrap_help",
|
||||
"unicode",
|
||||
] }
|
||||
futures = "0.3.29"
|
||||
futures-util = "0.3.29"
|
||||
glob = "0.3.1"
|
||||
|
@ -57,21 +62,40 @@ opentelemetry = { version = "0.19", features = ["rt-tokio"] }
|
|||
opentelemetry-otlp = { version = "0.12.0", features = ["metrics"] }
|
||||
pin-project-lite = "0.2.13"
|
||||
rand = "0.8.5"
|
||||
reqwest = { version = "0.11.22", default-features = false, features = ["blocking", "gzip"] }
|
||||
reqwest = { version = "0.11.22", default-features = false, features = [
|
||||
"blocking",
|
||||
"gzip",
|
||||
] }
|
||||
rmpv = "1.0.1"
|
||||
rustyline = { version = "12.0.0", features = ["derive"] }
|
||||
serde = { version = "1.0.193", features = ["derive"] }
|
||||
serde_cbor = "0.11.2"
|
||||
serde_json = "1.0.108"
|
||||
serde_pack = { version = "1.1.2", package = "rmp-serde" }
|
||||
surrealdb = { path = "lib", features = ["protocol-http", "protocol-ws", "rustls"] }
|
||||
surrealml-core = { version = "0.0.3", optional = true}
|
||||
surrealdb = { path = "lib", features = [
|
||||
"protocol-http",
|
||||
"protocol-ws",
|
||||
"rustls",
|
||||
] }
|
||||
surrealml-core = { version = "0.0.7", optional = true }
|
||||
tempfile = "3.8.1"
|
||||
thiserror = "1.0.50"
|
||||
tokio = { version = "1.34.0", features = ["macros", "signal"] }
|
||||
tokio-util = { version = "0.7.10", features = ["io"] }
|
||||
tower = "0.4.13"
|
||||
tower-http = { version = "0.4.4", features = ["trace", "sensitive-headers", "auth", "request-id", "util", "catch-panic", "cors", "set-header", "limit", "add-extension", "compression-full"] }
|
||||
tower-http = { version = "0.4.4", features = [
|
||||
"trace",
|
||||
"sensitive-headers",
|
||||
"auth",
|
||||
"request-id",
|
||||
"util",
|
||||
"catch-panic",
|
||||
"cors",
|
||||
"set-header",
|
||||
"limit",
|
||||
"add-extension",
|
||||
"compression-full",
|
||||
] }
|
||||
tracing = "0.1"
|
||||
tracing-opentelemetry = "0.19.0"
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
|
@ -93,7 +117,12 @@ jemallocator = "0.5.4"
|
|||
[dev-dependencies]
|
||||
assert_fs = "1.0.13"
|
||||
env_logger = "0.10.1"
|
||||
opentelemetry-proto = { version = "0.2.0", features = ["gen-tonic", "traces", "metrics", "logs"] }
|
||||
opentelemetry-proto = { version = "0.2.0", features = [
|
||||
"gen-tonic",
|
||||
"traces",
|
||||
"metrics",
|
||||
"logs",
|
||||
] }
|
||||
rcgen = "0.11.3"
|
||||
serial_test = "2.0.0"
|
||||
temp-env = { version = "0.3.6", features = ["async_closure"] }
|
||||
|
@ -116,8 +145,16 @@ depends = "$auto"
|
|||
section = "utility"
|
||||
priority = "optional"
|
||||
assets = [
|
||||
["target/release/surreal", "usr/share/surrealdb/surreal", "755"],
|
||||
["pkg/deb/README", "usr/share/surrealdb/README", "644"],
|
||||
[
|
||||
"target/release/surreal",
|
||||
"usr/share/surrealdb/surreal",
|
||||
"755",
|
||||
],
|
||||
[
|
||||
"pkg/deb/README",
|
||||
"usr/share/surrealdb/README",
|
||||
"644",
|
||||
],
|
||||
]
|
||||
extended-description = "A scalable, distributed, collaborative, document-graph database, for the realtime web."
|
||||
license-file = ["LICENSE", "4"]
|
||||
|
|
1
build.rs
1
build.rs
|
@ -8,6 +8,7 @@ fn main() {
|
|||
println!("cargo:rerun-if-env-changed={BUILD_METADATA}");
|
||||
println!("cargo:rerun-if-changed=lib");
|
||||
println!("cargo:rerun-if-changed=src");
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
println!("cargo:rerun-if-changed=Cargo.toml");
|
||||
println!("cargo:rerun-if-changed=Cargo.lock");
|
||||
if let Some(metadata) = build_metadata() {
|
||||
|
|
|
@ -1,36 +1,85 @@
|
|||
#
|
||||
# Dockerfile that builds a SurrealDB docker image.
|
||||
#
|
||||
###
|
||||
# STAGE: builder
|
||||
# This stage is used to build the SurrealDB linux binary
|
||||
###
|
||||
|
||||
FROM cgr.dev/chainguard/rust:latest-dev as builder
|
||||
FROM docker.io/rockylinux:8 as builder
|
||||
|
||||
|
||||
RUN yum install -y gcc-toolset-13 git cmake llvm-toolset patch zlib-devel python3.11
|
||||
|
||||
# Install rust
|
||||
ARG RUST_VERSION=1.75.0
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > /tmp/rustup.sh
|
||||
RUN bash /tmp/rustup.sh -y --default-toolchain ${RUST_VERSION}
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-gnu
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=/opt/rh/gcc-toolset-13/root/usr/bin/aarch64-redhat-linux-gcc
|
||||
|
||||
WORKDIR /surrealdb
|
||||
|
||||
COPY docker/builder-entrypoint.sh /builder-entrypoint.sh
|
||||
RUN chmod +x /builder-entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/builder-entrypoint.sh"]
|
||||
|
||||
###
|
||||
# Final Images
|
||||
###
|
||||
|
||||
#
|
||||
# Development image (built on the CI environment)
|
||||
#
|
||||
FROM --platform=$TARGETPLATFORM cgr.dev/chainguard/glibc-dynamic:latest-dev as dev-ci
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG ARTIFACT_PREFIX
|
||||
|
||||
USER root
|
||||
RUN apk update
|
||||
RUN apk add patch clang curl gcc cmake
|
||||
|
||||
ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-unknown-linux-gnu-gcc
|
||||
COPY ${ARTIFACT_PREFIX}.${TARGETOS}-${TARGETARCH}/surreal /surreal
|
||||
|
||||
RUN mkdir /surrealdb
|
||||
WORKDIR /surrealdb
|
||||
COPY . /surrealdb/
|
||||
RUN chmod +x /surreal
|
||||
|
||||
RUN cargo build --features storage-tikv --release --locked
|
||||
ENTRYPOINT ["/surreal"]
|
||||
|
||||
#
|
||||
# Development image
|
||||
# Development image (built on the CI environment)
|
||||
#
|
||||
FROM --platform=$TARGETPLATFORM cgr.dev/chainguard/glibc-dynamic:latest as prod-ci
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
COPY --from=dev-ci /surreal /surreal
|
||||
|
||||
ENTRYPOINT ["/surreal"]
|
||||
|
||||
#
|
||||
# Development image (built locally)
|
||||
#
|
||||
FROM cgr.dev/chainguard/glibc-dynamic:latest-dev as dev
|
||||
|
||||
ARG SURREALDB_BINARY=target/release/surreal
|
||||
|
||||
COPY ${SURREALDB_BINARY} /surreal
|
||||
|
||||
RUN chmod +x /surreal
|
||||
|
||||
USER root
|
||||
COPY --from=builder /surrealdb/target/release/surreal /surreal
|
||||
|
||||
ENTRYPOINT ["/surreal"]
|
||||
|
||||
#
|
||||
# Production image
|
||||
# Production image (built locally)
|
||||
#
|
||||
FROM cgr.dev/chainguard/glibc-dynamic:latest as prod
|
||||
|
||||
COPY --from=builder /surrealdb/target/release/surreal /surreal
|
||||
COPY --from=dev /surreal /surreal
|
||||
|
||||
ENTRYPOINT ["/surreal"]
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
#
|
||||
# Dockerfile that builds the SurrealDB Linux binary and makes it depend on GLIBC 2.17.
|
||||
#
|
||||
|
||||
FROM docker.io/ubuntu:20.04
|
||||
|
||||
ARG CARGO_EXTRA_FEATURES="storage-tikv"
|
||||
ARG SURREAL_BUILD_METADATA=""
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && apt-get install -y curl patch clang gpg build-essential git
|
||||
|
||||
# Install rust
|
||||
COPY docker/files/rustup-init.sh /tmp/rustup-init.sh
|
||||
RUN /tmp/rustup-init.sh -y --default-toolchain 1.74.1
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
RUN mkdir /surrealdb
|
||||
WORKDIR /surrealdb
|
||||
COPY . /surrealdb/
|
||||
|
||||
RUN cargo build --features ${CARGO_EXTRA_FEATURES} --release --locked
|
||||
|
||||
# For testing purposes
|
||||
RUN cp target/release/surreal /surreal
|
||||
|
||||
ENTRYPOINT ["/surreal"]
|
|
@ -1,39 +0,0 @@
|
|||
#
|
||||
# Dockerfile that builds a SurrealDB docker image with FoundationDB support.
|
||||
#
|
||||
|
||||
FROM --platform=linux/amd64 cgr.dev/chainguard/rust:latest-dev as builder
|
||||
|
||||
USER root
|
||||
RUN apk update
|
||||
RUN apk add patch clang curl cmake
|
||||
|
||||
RUN mkdir /surrealdb
|
||||
WORKDIR /surrealdb
|
||||
COPY . /surrealdb/
|
||||
|
||||
RUN curl -L https://github.com/apple/foundationdb/releases/download/7.1.42/libfdb_c.x86_64.so -o libfdb_c.so && \
|
||||
echo "9501a7910fe2d47b805c48c467fddaf485ccf4b1195863e3c5fb0c86648084f1 libfdb_c.so" | sha256sum -c -s - || exit 1 && \
|
||||
mv libfdb_c.so /usr/lib/ && \
|
||||
cargo build --features storage-tikv,storage-fdb --release --locked
|
||||
|
||||
#
|
||||
# Development image
|
||||
#
|
||||
FROM cgr.dev/chainguard/glibc-dynamic:latest-dev as dev
|
||||
|
||||
USER root
|
||||
COPY --from=builder /surrealdb/target/release/surreal /surreal
|
||||
COPY --from=builder /usr/lib/libfdb_c.so /usr/lib/libfdb_c.so
|
||||
|
||||
ENTRYPOINT ["/surreal"]
|
||||
|
||||
#
|
||||
# Production image
|
||||
#
|
||||
FROM cgr.dev/chainguard/glibc-dynamic:latest as prod
|
||||
|
||||
COPY --from=builder /surrealdb/target/release/surreal /surreal
|
||||
COPY --from=builder /usr/lib/libfdb_c.so /usr/lib/libfdb_c.so
|
||||
|
||||
ENTRYPOINT ["/surreal"]
|
|
@ -1,9 +0,0 @@
|
|||
#
|
||||
# Dockerfile that builds SurrealDB docker images for multiple architectures.
|
||||
#
|
||||
|
||||
ARG TAG IMAGE_REPO
|
||||
|
||||
FROM ${IMAGE_REPO}:${TARGETARCH}-${TAG}
|
||||
|
||||
ARG TARGETARCH
|
5
docker/builder-entrypoint.sh
Normal file
5
docker/builder-entrypoint.sh
Normal file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
|
||||
. /opt/rh/gcc-toolset-13/enable
|
||||
|
||||
exec cargo build "$@"
|
|
@ -1,731 +0,0 @@
|
|||
#!/bin/sh
|
||||
# shellcheck shell=dash
|
||||
|
||||
# This is just a little script that can be downloaded from the internet to
|
||||
# install rustup. It just does platform detection, downloads the installer
|
||||
# and runs it.
|
||||
|
||||
# It runs on Unix shells like {a,ba,da,k,z}sh. It uses the common `local`
|
||||
# extension. Note: Most shells limit `local` to 1 var per line, contra bash.
|
||||
|
||||
if [ "$KSH_VERSION" = 'Version JM 93t+ 2010-03-05' ]; then
|
||||
# The version of ksh93 that ships with many illumos systems does not
|
||||
# support the "local" extension. Print a message rather than fail in
|
||||
# subtle ways later on:
|
||||
echo 'rustup does not work with this ksh93 version; please try bash!' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
set -u
|
||||
|
||||
# If RUSTUP_UPDATE_ROOT is unset or empty, default it.
|
||||
RUSTUP_UPDATE_ROOT="${RUSTUP_UPDATE_ROOT:-https://static.rust-lang.org/rustup}"
|
||||
|
||||
# NOTICE: If you change anything here, please make the same changes in setup_mode.rs
|
||||
usage() {
|
||||
cat <<EOF
|
||||
rustup-init 1.26.0 (577bf51ae 2023-04-05)
|
||||
The installer for rustup
|
||||
|
||||
USAGE:
|
||||
rustup-init [OPTIONS]
|
||||
|
||||
OPTIONS:
|
||||
-v, --verbose
|
||||
Enable verbose output
|
||||
|
||||
-q, --quiet
|
||||
Disable progress output
|
||||
|
||||
-y
|
||||
Disable confirmation prompt.
|
||||
|
||||
--default-host <default-host>
|
||||
Choose a default host triple
|
||||
|
||||
--default-toolchain <default-toolchain>
|
||||
Choose a default toolchain to install. Use 'none' to not install any toolchains at all
|
||||
|
||||
--profile <profile>
|
||||
[default: default] [possible values: minimal, default, complete]
|
||||
|
||||
-c, --component <components>...
|
||||
Component name to also install
|
||||
|
||||
-t, --target <targets>...
|
||||
Target name to also install
|
||||
|
||||
--no-update-default-toolchain
|
||||
Don't update any existing default toolchain after install
|
||||
|
||||
--no-modify-path
|
||||
Don't configure the PATH environment variable
|
||||
|
||||
-h, --help
|
||||
Print help information
|
||||
|
||||
-V, --version
|
||||
Print version information
|
||||
EOF
|
||||
}
|
||||
|
||||
main() {
|
||||
downloader --check
|
||||
need_cmd uname
|
||||
need_cmd mktemp
|
||||
need_cmd chmod
|
||||
need_cmd mkdir
|
||||
need_cmd rm
|
||||
need_cmd rmdir
|
||||
|
||||
get_architecture || return 1
|
||||
local _arch="$RETVAL"
|
||||
assert_nz "$_arch" "arch"
|
||||
|
||||
local _ext=""
|
||||
case "$_arch" in
|
||||
*windows*)
|
||||
_ext=".exe"
|
||||
;;
|
||||
esac
|
||||
|
||||
local _url="${RUSTUP_UPDATE_ROOT}/dist/${_arch}/rustup-init${_ext}"
|
||||
|
||||
local _dir
|
||||
if ! _dir="$(ensure mktemp -d)"; then
|
||||
# Because the previous command ran in a subshell, we must manually
|
||||
# propagate exit status.
|
||||
exit 1
|
||||
fi
|
||||
local _file="${_dir}/rustup-init${_ext}"
|
||||
|
||||
local _ansi_escapes_are_valid=false
|
||||
if [ -t 2 ]; then
|
||||
if [ "${TERM+set}" = 'set' ]; then
|
||||
case "$TERM" in
|
||||
xterm*|rxvt*|urxvt*|linux*|vt*)
|
||||
_ansi_escapes_are_valid=true
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
|
||||
# check if we have to use /dev/tty to prompt the user
|
||||
local need_tty=yes
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
OPTIND=1
|
||||
if [ "${arg%%--*}" = "" ]; then
|
||||
# Long option (other than --help);
|
||||
# don't attempt to interpret it.
|
||||
continue
|
||||
fi
|
||||
while getopts :hy sub_arg "$arg"; do
|
||||
case "$sub_arg" in
|
||||
h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
y)
|
||||
# user wants to skip the prompt --
|
||||
# we don't need /dev/tty
|
||||
need_tty=no
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
done
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if $_ansi_escapes_are_valid; then
|
||||
printf "\33[1minfo:\33[0m downloading installer\n" 1>&2
|
||||
else
|
||||
printf '%s\n' 'info: downloading installer' 1>&2
|
||||
fi
|
||||
|
||||
ensure mkdir -p "$_dir"
|
||||
ensure downloader "$_url" "$_file" "$_arch"
|
||||
ensure chmod u+x "$_file"
|
||||
if [ ! -x "$_file" ]; then
|
||||
printf '%s\n' "Cannot execute $_file (likely because of mounting /tmp as noexec)." 1>&2
|
||||
printf '%s\n' "Please copy the file to a location where you can execute binaries and run ./rustup-init${_ext}." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$need_tty" = "yes" ] && [ ! -t 0 ]; then
|
||||
# The installer is going to want to ask for confirmation by
|
||||
# reading stdin. This script was piped into `sh` though and
|
||||
# doesn't have stdin to pass to its children. Instead we're going
|
||||
# to explicitly connect /dev/tty to the installer's stdin.
|
||||
if [ ! -t 1 ]; then
|
||||
err "Unable to run interactively. Run with -y to accept defaults, --help for additional options"
|
||||
fi
|
||||
|
||||
ignore "$_file" "$@" < /dev/tty
|
||||
else
|
||||
ignore "$_file" "$@"
|
||||
fi
|
||||
|
||||
local _retval=$?
|
||||
|
||||
ignore rm "$_file"
|
||||
ignore rmdir "$_dir"
|
||||
|
||||
return "$_retval"
|
||||
}
|
||||
|
||||
check_proc() {
|
||||
# Check for /proc by looking for the /proc/self/exe link
|
||||
# This is only run on Linux
|
||||
if ! test -L /proc/self/exe ; then
|
||||
err "fatal: Unable to find /proc/self/exe. Is /proc mounted? Installation cannot proceed without /proc."
|
||||
fi
|
||||
}
|
||||
|
||||
get_bitness() {
|
||||
need_cmd head
|
||||
# Architecture detection without dependencies beyond coreutils.
|
||||
# ELF files start out "\x7fELF", and the following byte is
|
||||
# 0x01 for 32-bit and
|
||||
# 0x02 for 64-bit.
|
||||
# The printf builtin on some shells like dash only supports octal
|
||||
# escape sequences, so we use those.
|
||||
local _current_exe_head
|
||||
_current_exe_head=$(head -c 5 /proc/self/exe )
|
||||
if [ "$_current_exe_head" = "$(printf '\177ELF\001')" ]; then
|
||||
echo 32
|
||||
elif [ "$_current_exe_head" = "$(printf '\177ELF\002')" ]; then
|
||||
echo 64
|
||||
else
|
||||
err "unknown platform bitness"
|
||||
fi
|
||||
}
|
||||
|
||||
is_host_amd64_elf() {
|
||||
need_cmd head
|
||||
need_cmd tail
|
||||
# ELF e_machine detection without dependencies beyond coreutils.
|
||||
# Two-byte field at offset 0x12 indicates the CPU,
|
||||
# but we're interested in it being 0x3E to indicate amd64, or not that.
|
||||
local _current_exe_machine
|
||||
_current_exe_machine=$(head -c 19 /proc/self/exe | tail -c 1)
|
||||
[ "$_current_exe_machine" = "$(printf '\076')" ]
|
||||
}
|
||||
|
||||
get_endianness() {
|
||||
local cputype=$1
|
||||
local suffix_eb=$2
|
||||
local suffix_el=$3
|
||||
|
||||
# detect endianness without od/hexdump, like get_bitness() does.
|
||||
need_cmd head
|
||||
need_cmd tail
|
||||
|
||||
local _current_exe_endianness
|
||||
_current_exe_endianness="$(head -c 6 /proc/self/exe | tail -c 1)"
|
||||
if [ "$_current_exe_endianness" = "$(printf '\001')" ]; then
|
||||
echo "${cputype}${suffix_el}"
|
||||
elif [ "$_current_exe_endianness" = "$(printf '\002')" ]; then
|
||||
echo "${cputype}${suffix_eb}"
|
||||
else
|
||||
err "unknown platform endianness"
|
||||
fi
|
||||
}
|
||||
|
||||
get_architecture() {
|
||||
local _ostype _cputype _bitness _arch _clibtype
|
||||
_ostype="$(uname -s)"
|
||||
_cputype="$(uname -m)"
|
||||
_clibtype="gnu"
|
||||
|
||||
if [ "$_ostype" = Linux ]; then
|
||||
if [ "$(uname -o)" = Android ]; then
|
||||
_ostype=Android
|
||||
fi
|
||||
if ldd --version 2>&1 | grep -q 'musl'; then
|
||||
_clibtype="musl"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$_ostype" = Darwin ] && [ "$_cputype" = i386 ]; then
|
||||
# Darwin `uname -m` lies
|
||||
if sysctl hw.optional.x86_64 | grep -q ': 1'; then
|
||||
_cputype=x86_64
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$_ostype" = SunOS ]; then
|
||||
# Both Solaris and illumos presently announce as "SunOS" in "uname -s"
|
||||
# so use "uname -o" to disambiguate. We use the full path to the
|
||||
# system uname in case the user has coreutils uname first in PATH,
|
||||
# which has historically sometimes printed the wrong value here.
|
||||
if [ "$(/usr/bin/uname -o)" = illumos ]; then
|
||||
_ostype=illumos
|
||||
fi
|
||||
|
||||
# illumos systems have multi-arch userlands, and "uname -m" reports the
|
||||
# machine hardware name; e.g., "i86pc" on both 32- and 64-bit x86
|
||||
# systems. Check for the native (widest) instruction set on the
|
||||
# running kernel:
|
||||
if [ "$_cputype" = i86pc ]; then
|
||||
_cputype="$(isainfo -n)"
|
||||
fi
|
||||
fi
|
||||
|
||||
case "$_ostype" in
|
||||
|
||||
Android)
|
||||
_ostype=linux-android
|
||||
;;
|
||||
|
||||
Linux)
|
||||
check_proc
|
||||
_ostype=unknown-linux-$_clibtype
|
||||
_bitness=$(get_bitness)
|
||||
;;
|
||||
|
||||
FreeBSD)
|
||||
_ostype=unknown-freebsd
|
||||
;;
|
||||
|
||||
NetBSD)
|
||||
_ostype=unknown-netbsd
|
||||
;;
|
||||
|
||||
DragonFly)
|
||||
_ostype=unknown-dragonfly
|
||||
;;
|
||||
|
||||
Darwin)
|
||||
_ostype=apple-darwin
|
||||
;;
|
||||
|
||||
illumos)
|
||||
_ostype=unknown-illumos
|
||||
;;
|
||||
|
||||
MINGW* | MSYS* | CYGWIN* | Windows_NT)
|
||||
_ostype=pc-windows-gnu
|
||||
;;
|
||||
|
||||
*)
|
||||
err "unrecognized OS type: $_ostype"
|
||||
;;
|
||||
|
||||
esac
|
||||
|
||||
case "$_cputype" in
|
||||
|
||||
i386 | i486 | i686 | i786 | x86)
|
||||
_cputype=i686
|
||||
;;
|
||||
|
||||
xscale | arm)
|
||||
_cputype=arm
|
||||
if [ "$_ostype" = "linux-android" ]; then
|
||||
_ostype=linux-androideabi
|
||||
fi
|
||||
;;
|
||||
|
||||
armv6l)
|
||||
_cputype=arm
|
||||
if [ "$_ostype" = "linux-android" ]; then
|
||||
_ostype=linux-androideabi
|
||||
else
|
||||
_ostype="${_ostype}eabihf"
|
||||
fi
|
||||
;;
|
||||
|
||||
armv7l | armv8l)
|
||||
_cputype=armv7
|
||||
if [ "$_ostype" = "linux-android" ]; then
|
||||
_ostype=linux-androideabi
|
||||
else
|
||||
_ostype="${_ostype}eabihf"
|
||||
fi
|
||||
;;
|
||||
|
||||
aarch64 | arm64)
|
||||
_cputype=aarch64
|
||||
;;
|
||||
|
||||
x86_64 | x86-64 | x64 | amd64)
|
||||
_cputype=x86_64
|
||||
;;
|
||||
|
||||
mips)
|
||||
_cputype=$(get_endianness mips '' el)
|
||||
;;
|
||||
|
||||
mips64)
|
||||
if [ "$_bitness" -eq 64 ]; then
|
||||
# only n64 ABI is supported for now
|
||||
_ostype="${_ostype}abi64"
|
||||
_cputype=$(get_endianness mips64 '' el)
|
||||
fi
|
||||
;;
|
||||
|
||||
ppc)
|
||||
_cputype=powerpc
|
||||
;;
|
||||
|
||||
ppc64)
|
||||
_cputype=powerpc64
|
||||
;;
|
||||
|
||||
ppc64le)
|
||||
_cputype=powerpc64le
|
||||
;;
|
||||
|
||||
s390x)
|
||||
_cputype=s390x
|
||||
;;
|
||||
riscv64)
|
||||
_cputype=riscv64gc
|
||||
;;
|
||||
loongarch64)
|
||||
_cputype=loongarch64
|
||||
;;
|
||||
*)
|
||||
err "unknown CPU type: $_cputype"
|
||||
|
||||
esac
|
||||
|
||||
# Detect 64-bit linux with 32-bit userland
|
||||
if [ "${_ostype}" = unknown-linux-gnu ] && [ "${_bitness}" -eq 32 ]; then
|
||||
case $_cputype in
|
||||
x86_64)
|
||||
if [ -n "${RUSTUP_CPUTYPE:-}" ]; then
|
||||
_cputype="$RUSTUP_CPUTYPE"
|
||||
else {
|
||||
# 32-bit executable for amd64 = x32
|
||||
if is_host_amd64_elf; then {
|
||||
echo "This host is running an x32 userland; as it stands, x32 support is poor," 1>&2
|
||||
echo "and there isn't a native toolchain -- you will have to install" 1>&2
|
||||
echo "multiarch compatibility with i686 and/or amd64, then select one" 1>&2
|
||||
echo "by re-running this script with the RUSTUP_CPUTYPE environment variable" 1>&2
|
||||
echo "set to i686 or x86_64, respectively." 1>&2
|
||||
echo 1>&2
|
||||
echo "You will be able to add an x32 target after installation by running" 1>&2
|
||||
echo " rustup target add x86_64-unknown-linux-gnux32" 1>&2
|
||||
exit 1
|
||||
}; else
|
||||
_cputype=i686
|
||||
fi
|
||||
}; fi
|
||||
;;
|
||||
mips64)
|
||||
_cputype=$(get_endianness mips '' el)
|
||||
;;
|
||||
powerpc64)
|
||||
_cputype=powerpc
|
||||
;;
|
||||
aarch64)
|
||||
_cputype=armv7
|
||||
if [ "$_ostype" = "linux-android" ]; then
|
||||
_ostype=linux-androideabi
|
||||
else
|
||||
_ostype="${_ostype}eabihf"
|
||||
fi
|
||||
;;
|
||||
riscv64gc)
|
||||
err "riscv64 with 32-bit userland unsupported"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Detect armv7 but without the CPU features Rust needs in that build,
|
||||
# and fall back to arm.
|
||||
# See https://github.com/rust-lang/rustup.rs/issues/587.
|
||||
if [ "$_ostype" = "unknown-linux-gnueabihf" ] && [ "$_cputype" = armv7 ]; then
|
||||
if ensure grep '^Features' /proc/cpuinfo | grep -q -v neon; then
|
||||
# At least one processor does not have NEON.
|
||||
_cputype=arm
|
||||
fi
|
||||
fi
|
||||
|
||||
_arch="${_cputype}-${_ostype}"
|
||||
|
||||
RETVAL="$_arch"
|
||||
}
|
||||
|
||||
say() {
|
||||
printf 'rustup: %s\n' "$1"
|
||||
}
|
||||
|
||||
err() {
|
||||
say "$1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
need_cmd() {
|
||||
if ! check_cmd "$1"; then
|
||||
err "need '$1' (command not found)"
|
||||
fi
|
||||
}
|
||||
|
||||
check_cmd() {
|
||||
command -v "$1" > /dev/null 2>&1
|
||||
}
|
||||
|
||||
assert_nz() {
|
||||
if [ -z "$1" ]; then err "assert_nz $2"; fi
|
||||
}
|
||||
|
||||
# Run a command that should never fail. If the command fails execution
|
||||
# will immediately terminate with an error showing the failing
|
||||
# command.
|
||||
ensure() {
|
||||
if ! "$@"; then err "command failed: $*"; fi
|
||||
}
|
||||
|
||||
# This is just for indicating that commands' results are being
|
||||
# intentionally ignored. Usually, because it's being executed
|
||||
# as part of error handling.
|
||||
ignore() {
|
||||
"$@"
|
||||
}
|
||||
|
||||
# This wraps curl or wget. Try curl first, if not installed,
|
||||
# use wget instead.
|
||||
downloader() {
|
||||
local _dld
|
||||
local _ciphersuites
|
||||
local _err
|
||||
local _status
|
||||
local _retry
|
||||
if check_cmd curl; then
|
||||
_dld=curl
|
||||
elif check_cmd wget; then
|
||||
_dld=wget
|
||||
else
|
||||
_dld='curl or wget' # to be used in error message of need_cmd
|
||||
fi
|
||||
|
||||
if [ "$1" = --check ]; then
|
||||
need_cmd "$_dld"
|
||||
elif [ "$_dld" = curl ]; then
|
||||
check_curl_for_retry_support
|
||||
_retry="$RETVAL"
|
||||
get_ciphersuites_for_curl
|
||||
_ciphersuites="$RETVAL"
|
||||
if [ -n "$_ciphersuites" ]; then
|
||||
_err=$(curl $_retry --proto '=https' --tlsv1.2 --ciphers "$_ciphersuites" --silent --show-error --fail --location "$1" --output "$2" 2>&1)
|
||||
_status=$?
|
||||
else
|
||||
echo "Warning: Not enforcing strong cipher suites for TLS, this is potentially less secure"
|
||||
if ! check_help_for "$3" curl --proto --tlsv1.2; then
|
||||
echo "Warning: Not enforcing TLS v1.2, this is potentially less secure"
|
||||
_err=$(curl $_retry --silent --show-error --fail --location "$1" --output "$2" 2>&1)
|
||||
_status=$?
|
||||
else
|
||||
_err=$(curl $_retry --proto '=https' --tlsv1.2 --silent --show-error --fail --location "$1" --output "$2" 2>&1)
|
||||
_status=$?
|
||||
fi
|
||||
fi
|
||||
if [ -n "$_err" ]; then
|
||||
echo "$_err" >&2
|
||||
if echo "$_err" | grep -q 404$; then
|
||||
err "installer for platform '$3' not found, this may be unsupported"
|
||||
fi
|
||||
fi
|
||||
return $_status
|
||||
elif [ "$_dld" = wget ]; then
|
||||
if [ "$(wget -V 2>&1|head -2|tail -1|cut -f1 -d" ")" = "BusyBox" ]; then
|
||||
echo "Warning: using the BusyBox version of wget. Not enforcing strong cipher suites for TLS or TLS v1.2, this is potentially less secure"
|
||||
_err=$(wget "$1" -O "$2" 2>&1)
|
||||
_status=$?
|
||||
else
|
||||
get_ciphersuites_for_wget
|
||||
_ciphersuites="$RETVAL"
|
||||
if [ -n "$_ciphersuites" ]; then
|
||||
_err=$(wget --https-only --secure-protocol=TLSv1_2 --ciphers "$_ciphersuites" "$1" -O "$2" 2>&1)
|
||||
_status=$?
|
||||
else
|
||||
echo "Warning: Not enforcing strong cipher suites for TLS, this is potentially less secure"
|
||||
if ! check_help_for "$3" wget --https-only --secure-protocol; then
|
||||
echo "Warning: Not enforcing TLS v1.2, this is potentially less secure"
|
||||
_err=$(wget "$1" -O "$2" 2>&1)
|
||||
_status=$?
|
||||
else
|
||||
_err=$(wget --https-only --secure-protocol=TLSv1_2 "$1" -O "$2" 2>&1)
|
||||
_status=$?
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [ -n "$_err" ]; then
|
||||
echo "$_err" >&2
|
||||
if echo "$_err" | grep -q ' 404 Not Found$'; then
|
||||
err "installer for platform '$3' not found, this may be unsupported"
|
||||
fi
|
||||
fi
|
||||
return $_status
|
||||
else
|
||||
err "Unknown downloader" # should not reach here
|
||||
fi
|
||||
}
|
||||
|
||||
check_help_for() {
|
||||
local _arch
|
||||
local _cmd
|
||||
local _arg
|
||||
_arch="$1"
|
||||
shift
|
||||
_cmd="$1"
|
||||
shift
|
||||
|
||||
local _category
|
||||
if "$_cmd" --help | grep -q 'For all options use the manual or "--help all".'; then
|
||||
_category="all"
|
||||
else
|
||||
_category=""
|
||||
fi
|
||||
|
||||
case "$_arch" in
|
||||
|
||||
*darwin*)
|
||||
if check_cmd sw_vers; then
|
||||
case $(sw_vers -productVersion) in
|
||||
10.*)
|
||||
# If we're running on macOS, older than 10.13, then we always
|
||||
# fail to find these options to force fallback
|
||||
if [ "$(sw_vers -productVersion | cut -d. -f2)" -lt 13 ]; then
|
||||
# Older than 10.13
|
||||
echo "Warning: Detected macOS platform older than 10.13"
|
||||
return 1
|
||||
fi
|
||||
;;
|
||||
11.*)
|
||||
# We assume Big Sur will be OK for now
|
||||
;;
|
||||
*)
|
||||
# Unknown product version, warn and continue
|
||||
echo "Warning: Detected unknown macOS major version: $(sw_vers -productVersion)"
|
||||
echo "Warning TLS capabilities detection may fail"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
|
||||
esac
|
||||
|
||||
for _arg in "$@"; do
|
||||
if ! "$_cmd" --help "$_category" | grep -q -- "$_arg"; then
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
|
||||
true # not strictly needed
|
||||
}
|
||||
|
||||
# Check if curl supports the --retry flag, then pass it to the curl invocation.
|
||||
check_curl_for_retry_support() {
|
||||
local _retry_supported=""
|
||||
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
|
||||
if check_help_for "notspecified" "curl" "--retry"; then
|
||||
_retry_supported="--retry 3"
|
||||
if check_help_for "notspecified" "curl" "--continue-at"; then
|
||||
# "-C -" tells curl to automatically find where to resume the download when retrying.
|
||||
_retry_supported="--retry 3 -C -"
|
||||
fi
|
||||
fi
|
||||
|
||||
RETVAL="$_retry_supported"
|
||||
}
|
||||
|
||||
# Return cipher suite string specified by user, otherwise return strong TLS 1.2-1.3 cipher suites
|
||||
# if support by local tools is detected. Detection currently supports these curl backends:
|
||||
# GnuTLS and OpenSSL (possibly also LibreSSL and BoringSSL). Return value can be empty.
|
||||
get_ciphersuites_for_curl() {
|
||||
if [ -n "${RUSTUP_TLS_CIPHERSUITES-}" ]; then
|
||||
# user specified custom cipher suites, assume they know what they're doing
|
||||
RETVAL="$RUSTUP_TLS_CIPHERSUITES"
|
||||
return
|
||||
fi
|
||||
|
||||
local _openssl_syntax="no"
|
||||
local _gnutls_syntax="no"
|
||||
local _backend_supported="yes"
|
||||
if curl -V | grep -q ' OpenSSL/'; then
|
||||
_openssl_syntax="yes"
|
||||
elif curl -V | grep -iq ' LibreSSL/'; then
|
||||
_openssl_syntax="yes"
|
||||
elif curl -V | grep -iq ' BoringSSL/'; then
|
||||
_openssl_syntax="yes"
|
||||
elif curl -V | grep -iq ' GnuTLS/'; then
|
||||
_gnutls_syntax="yes"
|
||||
else
|
||||
_backend_supported="no"
|
||||
fi
|
||||
|
||||
local _args_supported="no"
|
||||
if [ "$_backend_supported" = "yes" ]; then
|
||||
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
|
||||
if check_help_for "notspecified" "curl" "--tlsv1.2" "--ciphers" "--proto"; then
|
||||
_args_supported="yes"
|
||||
fi
|
||||
fi
|
||||
|
||||
local _cs=""
|
||||
if [ "$_args_supported" = "yes" ]; then
|
||||
if [ "$_openssl_syntax" = "yes" ]; then
|
||||
_cs=$(get_strong_ciphersuites_for "openssl")
|
||||
elif [ "$_gnutls_syntax" = "yes" ]; then
|
||||
_cs=$(get_strong_ciphersuites_for "gnutls")
|
||||
fi
|
||||
fi
|
||||
|
||||
RETVAL="$_cs"
|
||||
}
|
||||
|
||||
# Return cipher suite string specified by user, otherwise return strong TLS 1.2-1.3 cipher suites
|
||||
# if support by local tools is detected. Detection currently supports these wget backends:
|
||||
# GnuTLS and OpenSSL (possibly also LibreSSL and BoringSSL). Return value can be empty.
|
||||
get_ciphersuites_for_wget() {
|
||||
if [ -n "${RUSTUP_TLS_CIPHERSUITES-}" ]; then
|
||||
# user specified custom cipher suites, assume they know what they're doing
|
||||
RETVAL="$RUSTUP_TLS_CIPHERSUITES"
|
||||
return
|
||||
fi
|
||||
|
||||
local _cs=""
|
||||
if wget -V | grep -q '\-DHAVE_LIBSSL'; then
|
||||
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
|
||||
if check_help_for "notspecified" "wget" "TLSv1_2" "--ciphers" "--https-only" "--secure-protocol"; then
|
||||
_cs=$(get_strong_ciphersuites_for "openssl")
|
||||
fi
|
||||
elif wget -V | grep -q '\-DHAVE_LIBGNUTLS'; then
|
||||
# "unspecified" is for arch, allows for possibility old OS using macports, homebrew, etc.
|
||||
if check_help_for "notspecified" "wget" "TLSv1_2" "--ciphers" "--https-only" "--secure-protocol"; then
|
||||
_cs=$(get_strong_ciphersuites_for "gnutls")
|
||||
fi
|
||||
fi
|
||||
|
||||
RETVAL="$_cs"
|
||||
}
|
||||
|
||||
# Return strong TLS 1.2-1.3 cipher suites in OpenSSL or GnuTLS syntax. TLS 1.2
|
||||
# excludes non-ECDHE and non-AEAD cipher suites. DHE is excluded due to bad
|
||||
# DH params often found on servers (see RFC 7919). Sequence matches or is
|
||||
# similar to Firefox 68 ESR with weak cipher suites disabled via about:config.
|
||||
# $1 must be openssl or gnutls.
|
||||
get_strong_ciphersuites_for() {
|
||||
if [ "$1" = "openssl" ]; then
|
||||
# OpenSSL is forgiving of unknown values, no problems with TLS 1.3 values on versions that don't support it yet.
|
||||
echo "TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_256_GCM_SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384"
|
||||
elif [ "$1" = "gnutls" ]; then
|
||||
# GnuTLS isn't forgiving of unknown values, so this may require a GnuTLS version that supports TLS 1.3 even if wget doesn't.
|
||||
# Begin with SECURE128 (and higher) then remove/add to build cipher suites. Produces same 9 cipher suites as OpenSSL but in slightly different order.
|
||||
echo "SECURE128:-VERS-SSL3.0:-VERS-TLS1.0:-VERS-TLS1.1:-VERS-DTLS-ALL:-CIPHER-ALL:-MAC-ALL:-KX-ALL:+AEAD:+ECDHE-ECDSA:+ECDHE-RSA:+AES-128-GCM:+CHACHA20-POLY1305:+AES-256-GCM"
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@" || exit 1
|
|
@ -10,7 +10,13 @@ description = "A scalable, distributed, collaborative, document-graph database,
|
|||
repository = "https://github.com/surrealdb/surrealdb"
|
||||
homepage = "https://github.com/surrealdb/surrealdb"
|
||||
documentation = "https://docs.rs/surrealdb/"
|
||||
keywords = ["database", "embedded-database", "key-value", "key-value-store", "kv-store"]
|
||||
keywords = [
|
||||
"database",
|
||||
"embedded-database",
|
||||
"key-value",
|
||||
"key-value-store",
|
||||
"kv-store",
|
||||
]
|
||||
categories = ["database-implementations", "data-structures", "embedded"]
|
||||
license-file = "../LICENSE"
|
||||
resolver = "2"
|
||||
|
@ -35,11 +41,25 @@ kv-fdb-7_0 = ["foundationdb/fdb-7_0", "kv-fdb"]
|
|||
kv-fdb-7_1 = ["foundationdb/fdb-7_1", "kv-fdb"]
|
||||
scripting = ["dep:js"]
|
||||
http = ["dep:reqwest"]
|
||||
native-tls = ["dep:native-tls", "reqwest?/native-tls", "tokio-tungstenite?/native-tls"]
|
||||
rustls = ["dep:rustls", "reqwest?/rustls-tls", "tokio-tungstenite?/rustls-tls-webpki-roots"]
|
||||
native-tls = [
|
||||
"dep:native-tls",
|
||||
"reqwest?/native-tls",
|
||||
"tokio-tungstenite?/native-tls",
|
||||
]
|
||||
rustls = [
|
||||
"dep:rustls",
|
||||
"reqwest?/rustls-tls",
|
||||
"tokio-tungstenite?/rustls-tls-webpki-roots",
|
||||
]
|
||||
ml = ["surrealml-core", "ndarray"]
|
||||
jwks = ["dep:reqwest"]
|
||||
arbitrary = ["dep:arbitrary", "dep:regex-syntax", "rust_decimal/rust-fuzz", "geo-types/arbitrary", "uuid/arbitrary"]
|
||||
arbitrary = [
|
||||
"dep:arbitrary",
|
||||
"dep:regex-syntax",
|
||||
"rust_decimal/rust-fuzz",
|
||||
"geo-types/arbitrary",
|
||||
"uuid/arbitrary",
|
||||
]
|
||||
experimental-parser = ["dep:phf", "dep:unicase"]
|
||||
# Private features
|
||||
kv-fdb = ["foundationdb", "tokio/time"]
|
||||
|
@ -47,10 +67,16 @@ kv-fdb = ["foundationdb", "tokio/time"]
|
|||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
features = [
|
||||
"protocol-ws", "protocol-http",
|
||||
"kv-mem", "kv-indxdb", "kv-rocksdb",
|
||||
"rustls", "native-tls",
|
||||
"http", "scripting", "jwks"
|
||||
"protocol-ws",
|
||||
"protocol-http",
|
||||
"kv-mem",
|
||||
"kv-indxdb",
|
||||
"kv-rocksdb",
|
||||
"rustls",
|
||||
"native-tls",
|
||||
"http",
|
||||
"scripting",
|
||||
"jwks",
|
||||
]
|
||||
targets = []
|
||||
|
||||
|
@ -72,7 +98,9 @@ dmp = "0.2.0"
|
|||
echodb = { version = "0.4.0", optional = true }
|
||||
executor = { version = "1.8.0", package = "async-executor" }
|
||||
flume = "0.11.0"
|
||||
foundationdb = { version = "0.8.0", default-features = false, features = ["embedded-fdb-include"], optional = true }
|
||||
foundationdb = { version = "0.8.0", default-features = false, features = [
|
||||
"embedded-fdb-include",
|
||||
], optional = true }
|
||||
fst = "0.4.7"
|
||||
futures = "0.3.29"
|
||||
futures-concurrency = "7.4.3"
|
||||
|
@ -82,7 +110,17 @@ hex = { version = "0.4.3", optional = false }
|
|||
indexmap = { version = "2.1.0", features = ["serde"] }
|
||||
indxdb = { version = "0.4.0", optional = true }
|
||||
ipnet = "2.9.0"
|
||||
js = { version = "0.4.2", package = "rquickjs", features = ["array-buffer", "bindgen", "classes", "futures", "loader", "macro", "parallel", "properties","rust-alloc"], optional = true }
|
||||
js = { version = "0.4.2", package = "rquickjs", features = [
|
||||
"array-buffer",
|
||||
"bindgen",
|
||||
"classes",
|
||||
"futures",
|
||||
"loader",
|
||||
"macro",
|
||||
"parallel",
|
||||
"properties",
|
||||
"rust-alloc",
|
||||
], optional = true }
|
||||
jsonwebtoken = { version = "8.3.0-surreal.1", package = "surrealdb-jsonwebtoken" }
|
||||
lexicmp = "0.1.0"
|
||||
md-5 = "0.10.6"
|
||||
|
@ -100,7 +138,11 @@ quick_cache = "0.4.0"
|
|||
radix_trie = { version = "0.2.1", features = ["serde"] }
|
||||
rand = "0.8.5"
|
||||
regex = "1.10.2"
|
||||
reqwest = { version = "0.11.22", default-features = false, features = ["json", "stream", "multipart"], optional = true }
|
||||
reqwest = { version = "0.11.22", default-features = false, features = [
|
||||
"json",
|
||||
"stream",
|
||||
"multipart",
|
||||
], optional = true }
|
||||
revision = "0.5.0"
|
||||
roaring = { version = "0.10.2", features = ["serde"] }
|
||||
rocksdb = { version = "0.21.0", features = ["lz4", "snappy"], optional = true }
|
||||
|
@ -116,7 +158,7 @@ sha2 = "0.10.8"
|
|||
snap = "1.1.0"
|
||||
speedb = { version = "0.0.4", features = ["lz4", "snappy"], optional = true }
|
||||
storekey = "0.5.0"
|
||||
surrealml-core = { version = "0.0.3", optional = true }
|
||||
surrealml-core = { version = "0.0.7", optional = true }
|
||||
thiserror = "1.0.50"
|
||||
tikv = { version = "0.2.0-surreal.2", default-features = false, package = "surrealdb-tikv-client", optional = true }
|
||||
tokio-util = { version = "0.7.10", optional = true, features = ["compat"] }
|
||||
|
@ -124,19 +166,19 @@ tracing = "0.1.40"
|
|||
trice = "0.4.0"
|
||||
ulid = { version = "1.1.0", features = ["serde"] }
|
||||
url = "2.5.0"
|
||||
phf = { version = "0.11.2", features = ["macros", "unicase"], optional=true }
|
||||
phf = { version = "0.11.2", features = ["macros", "unicase"], optional = true }
|
||||
unicase = { version = "2.7.0", optional = true }
|
||||
arbitrary = { version = "1.3.2", features = ["derive"], optional = true }
|
||||
regex-syntax = { version = "0.8.2", optional = true, features = ["arbitrary"] }
|
||||
geo-types = { version = "0.7.12", features = ["arbitrary"] }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version="0.5.1", features= ["async_tokio"] }
|
||||
criterion = { version = "0.5.1", features = ["async_tokio"] }
|
||||
env_logger = "0.10.1"
|
||||
pprof = { version = "0.13.0", features = ["flamegraph", "criterion"] }
|
||||
serial_test = "2.0.0"
|
||||
temp-dir = "0.1.11"
|
||||
test-log = { version="0.2.13", features = ["trace"] }
|
||||
test-log = { version = "0.2.13", features = ["trace"] }
|
||||
time = { version = "0.3.30", features = ["serde"] }
|
||||
tokio = { version = "1.34.0", features = ["macros", "sync", "rt-multi-thread"] }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
|
@ -148,11 +190,21 @@ ring = { version = "0.17.7", features = ["wasm32_unknown_unknown_js"] } # Make r
|
|||
tokio = { version = "1.34.0", default-features = false, features = ["rt", "sync"] }
|
||||
uuid = { version = "1.6.1", features = ["serde", "js", "v4", "v7"] }
|
||||
wasm-bindgen-futures = "0.4.39"
|
||||
wasmtimer = { version = "0.2.0", default-features = false, features = ["tokio"] }
|
||||
wasmtimer = { version = "0.2.0", default-features = false, features = [
|
||||
"tokio",
|
||||
] }
|
||||
ws_stream_wasm = "0.7.4"
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
tokio = { version = "1.34.0", default-features = false, features = ["macros", "io-util", "io-std", "fs", "rt-multi-thread", "time", "sync"] }
|
||||
tokio = { version = "1.34.0", default-features = false, features = [
|
||||
"macros",
|
||||
"io-util",
|
||||
"io-std",
|
||||
"fs",
|
||||
"rt-multi-thread",
|
||||
"time",
|
||||
"sync",
|
||||
] }
|
||||
tokio-tungstenite = { version = "0.20.1", optional = true }
|
||||
uuid = { version = "1.6.1", features = ["serde", "v4", "v7"] }
|
||||
|
||||
|
|
Loading…
Reference in a new issue