From 9657dd80c737bc3e6e7d28783d6d03c87d4f9640 Mon Sep 17 00:00:00 2001 From: Raphael Darley Date: Sun, 18 Aug 2024 19:49:27 +0100 Subject: [PATCH] Add graphql support (#3797) Co-authored-by: Gerard Guillemas Martos Co-authored-by: Gerard Guillemas Martos Co-authored-by: Tobie Morgan Hitchcock Co-authored-by: Micha de Vries --- .gitignore | 1 + Cargo.lock | 285 ++++++++- Cargo.toml | 11 +- cackle.toml | 458 ++++---------- core/Cargo.toml | 6 + core/src/cnf/mod.rs | 3 + core/src/gql/cache.rs | 87 +++ core/src/gql/error.rs | 58 ++ core/src/gql/ext.rs | 158 +++++ core/src/gql/mod.rs | 9 + core/src/gql/schema.rs | 1114 +++++++++++++++++++++++++++++++++ core/src/gql/utils.rs | 60 ++ core/src/lib.rs | 3 + core/src/rpc/basic_context.rs | 44 +- core/src/rpc/method.rs | 6 +- core/src/rpc/rpc_context.rs | 141 ++++- core/src/rpc/rpc_error.rs | 4 +- core/src/sql/query.rs | 6 + core/src/sql/value/value.rs | 9 + deny.toml | 1 + lib/Cargo.toml | 4 + src/cnf/mod.rs | 3 + src/gql/mod.rs | 134 ++++ src/main.rs | 2 + src/net/gql.rs | 17 + src/net/mod.rs | 25 +- src/rpc/connection.rs | 47 +- src/rpc/post_context.rs | 64 +- supply-chain/audits.toml | 90 +++ supply-chain/config.toml | 126 ++-- supply-chain/imports.lock | 145 ++++- 31 files changed, 2566 insertions(+), 555 deletions(-) create mode 100644 core/src/gql/cache.rs create mode 100644 core/src/gql/error.rs create mode 100644 core/src/gql/ext.rs create mode 100644 core/src/gql/mod.rs create mode 100644 core/src/gql/schema.rs create mode 100644 core/src/gql/utils.rs create mode 100644 src/gql/mod.rs create mode 100644 src/net/gql.rs diff --git a/.gitignore b/.gitignore index 466cb644..1330e03e 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,7 @@ Temporary Items **/*.rs.bk *.db +*.skv *.sw? *.skv diff --git a/Cargo.lock b/Cargo.lock index dffdbce1..aa51cf8f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,16 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + [[package]] name = "actix-codec" version = "0.5.2" @@ -430,6 +440,12 @@ dependencies = [ "term", ] +[[package]] +name = "ascii_utils" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71938f30533e4d95a6d17aa530939da3842c2ab6f4f84b9dae68447e4129f74a" + [[package]] name = "assert-json-diff" version = "2.0.2" @@ -509,6 +525,80 @@ dependencies = [ "slab", ] +[[package]] +name = "async-graphql" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b76aba2f176af685c2229633881a3adeae51f87ae1811781e73910b7001c93e" +dependencies = [ + "async-graphql-derive", + "async-graphql-parser", + "async-graphql-value", + "async-stream", + "async-trait", + "base64 0.22.0", + "bytes", + "fast_chemail", + "fnv", + "futures-util", + "handlebars", + "http 1.1.0", + "indexmap 2.2.6", + "mime", + "multer 3.1.0", + "num-traits", + "once_cell", + "pin-project-lite", + "regex", + "serde", + "serde_json", + "serde_urlencoded", + "static_assertions_next", + "tempfile", + "thiserror", +] + +[[package]] +name = "async-graphql-derive" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72e2e26a6b44bc61df3ca8546402cf9204c28e30c06084cc8e75cd5e34d4f150" +dependencies = [ + "Inflector", + "async-graphql-parser", + "darling", + "proc-macro-crate 3.1.0", + "proc-macro2", + "quote", + "strum", + "syn 2.0.58", + "thiserror", +] + +[[package]] +name = "async-graphql-parser" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f801451484b4977d6fe67b29030f81353cabdcbb754e5a064f39493582dac0cf" +dependencies = [ + "async-graphql-value", + "pest", + "serde", + "serde_json", +] + +[[package]] +name = "async-graphql-value" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69117c43c01d81a69890a9f5dd6235f2f027ca8d1ec62d6d3c5e01ca0edb4f2b" +dependencies = [ + "bytes", + "indexmap 2.2.6", + "serde", + "serde_json", +] + [[package]] name = "async-lock" version = "3.3.0" @@ -572,9 +662,9 @@ checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" [[package]] name = "async-trait" -version = "0.1.79" +version = "0.1.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" +checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", @@ -1092,9 +1182,12 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.6.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" +checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" +dependencies = [ + "serde", +] [[package]] name = "bytestring" @@ -1301,7 +1394,7 @@ dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.1", + "strsim", "terminal_size", "unicase", "unicode-width", @@ -1534,9 +1627,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.8" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -1544,23 +1637,23 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.8" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", - "strsim 0.10.0", + "strsim", "syn 2.0.58", ] [[package]] name = "darling_macro" -version = "0.20.8" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", @@ -1932,6 +2025,15 @@ dependencies = [ "rand 0.7.3", ] +[[package]] +name = "fast_chemail" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "495a39d30d624c2caabe6312bfead73e7717692b44e0b32df168c275a2e8e9e4" +dependencies = [ + "ascii_utils", +] + [[package]] name = "fastrand" version = "2.0.2" @@ -2421,14 +2523,28 @@ dependencies = [ [[package]] name = "half" -version = "2.4.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5eceaaeec696539ddaf7b333340f1af35a5aa87ae3e4f3ead0532f72affab2e" +checksum = "bc52e53916c08643f1b56ec082790d1e86a32e58dc5268f897f313fbae7b4872" dependencies = [ "cfg-if", "crunchy", ] +[[package]] +name = "handlebars" +version = "5.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d08485b96a0e6393e9e4d1b8d48cf74ad6c063cd905eb33f42c1ce3f0377539b" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "hash32" version = "0.2.1" @@ -3408,6 +3524,23 @@ dependencies = [ "version_check", ] +[[package]] +name = "multer" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http 1.1.0", + "httparse", + "memchr", + "mime", + "spin 0.9.8", + "version_check", +] + [[package]] name = "multimap" version = "0.8.3" @@ -3890,6 +4023,51 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "pest" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.58", +] + +[[package]] +name = "pest_meta" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + [[package]] name = "petgraph" version = "0.6.4" @@ -4915,7 +5093,7 @@ dependencies = [ "indexmap 2.2.6", "log", "memchr", - "multer", + "multer 2.1.0", "num_cpus", "parking_lot", "pin-project-lite", @@ -5385,9 +5563,9 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.204" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" dependencies = [ "serde_derive", ] @@ -5412,9 +5590,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", @@ -5436,12 +5614,13 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.115" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed" dependencies = [ "indexmap 2.2.6", "itoa", + "memchr", "ryu", "serde", ] @@ -5734,6 +5913,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "static_assertions_next" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766" + [[package]] name = "storekey" version = "0.5.0" @@ -5784,18 +5969,34 @@ dependencies = [ "quote", ] -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.58", +] + [[package]] name = "subtle" version = "2.6.1" @@ -5808,6 +6009,7 @@ version = "2.0.0" dependencies = [ "argon2", "assert_fs", + "async-graphql", "axum 0.7.5", "axum-extra", "axum-server", @@ -5843,12 +6045,14 @@ dependencies = [ "revision 0.8.0", "rmp-serde", "rmpv", + "rust_decimal", "rustyline", "semver", "serde", "serde_json", "serial_test", "surrealdb", + "surrealdb-async-graphql-axum", "temp-env", "tempfile", "test-log", @@ -5860,6 +6064,7 @@ dependencies = [ "tonic 0.12.1", "tower", "tower-http", + "tower-service", "tracing", "tracing-opentelemetry", "tracing-subscriber", @@ -5873,6 +6078,7 @@ dependencies = [ name = "surrealdb" version = "2.0.0" dependencies = [ + "arrayvec", "async-channel 1.9.0", "bincode", "chrono", @@ -5925,6 +6131,24 @@ dependencies = [ "ws_stream_wasm", ] +[[package]] +name = "surrealdb-async-graphql-axum" +version = "7.0.7-surrealdb.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6761517bf15e976c363d9d71ddf7f215e545a8ed787dba52d77b42732cf1a3da" +dependencies = [ + "async-graphql", + "async-trait", + "axum 0.7.5", + "bytes", + "futures-util", + "serde_json", + "tokio", + "tokio-stream", + "tokio-util", + "tower-service", +] + [[package]] name = "surrealdb-core" version = "2.0.0" @@ -5937,6 +6161,7 @@ dependencies = [ "argon2", "async-channel 1.9.0", "async-executor", + "async-graphql", "async-recursion 1.1.0", "base64 0.21.7", "bcrypt", @@ -6851,6 +7076,12 @@ dependencies = [ "serde", ] +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + [[package]] name = "ulid" version = "1.1.2" diff --git a/Cargo.toml b/Cargo.toml index 5b4aacc5..a41b83e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -63,6 +63,7 @@ axum-extra = { version = "0.9.2", features = [ axum-server = { version = "0.7.1", features = ["tls-rustls-no-provider"] } base64 = "0.21.5" bytes = "1.5.0" +chrono = "0.4.31" ciborium = "0.2.1" clap = { version = "4.4.11", features = [ "env", @@ -111,6 +112,7 @@ revision = { version = "0.8.0", features = [ "uuid", ] } rmpv = "1.0.1" +rust_decimal = "1.35.0" rustyline = { version = "12.0.0", features = ["derive"] } semver = "1.0.20" serde = { version = "1.0.193", features = ["derive"] } @@ -124,6 +126,7 @@ surrealdb = { version = "2", path = "lib", features = [ tempfile = "3.8.1" thiserror = "1.0.50" tokio = { version = "1.34.0", features = ["macros", "signal"] } +tokio-stream = "0.1" tokio-util = { version = "0.7.10", features = ["io"] } tower = "0.4.13" tower-http = { version = "0.5.2", features = [ @@ -139,9 +142,12 @@ tower-http = { version = "0.5.2", features = [ "add-extension", "compression-full", ] } +tower-service = "0.3.2" urlencoding = "2.1.3" uuid = { version = "1.6.1", features = ["serde", "js", "v4", "v7"] } tokio-tungstenite = "0.23.0" +async-graphql-axum = { package = "surrealdb-async-graphql-axum", version = "7.0.7-surrealdb.1" } +async-graphql = { version = "7.0.7", default-features = false } [target.'cfg(unix)'.dependencies] nix = { version = "0.27.1", features = ["user"] } @@ -202,4 +208,7 @@ extended-description = "A scalable, distributed, collaborative, document-graph d license-file = ["LICENSE", "4"] [lints.rust] -unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docker)'] } +unexpected_cfgs = { level = "warn", check-cfg = [ + 'cfg(docker)', + 'cfg(surrealdb_unstable)', +] } diff --git a/cackle.toml b/cackle.toml index c9961d0f..04ec55c0 100644 --- a/cackle.toml +++ b/cackle.toml @@ -4,11 +4,7 @@ [common] version = 2 -import_std = [ - "fs", - "net", - "process", -] +import_std = ["fs", "net", "process"] features = ["default"] build_flags = ["--tests"] @@ -16,14 +12,8 @@ build_flags = ["--tests"] kind = "Bubblewrap" [api.fs] -include = [ - "rustix::fs", - "tokio::fs", -] -exclude = [ - "std::path::Path", - "std::path::PathBuf", -] +include = ["rustix::fs", "tokio::fs"] +exclude = ["std::path::Path", "std::path::PathBuf"] [api.net] include = [ @@ -39,18 +29,14 @@ include = [ "tracing", "tracing_core", ] -exclude = [ - "hashbrown::map", -] +exclude = ["hashbrown::map"] # # Crates Linking to Libraries # [pkg.ring] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] build.allow_build_instructions = [ "cargo:rustc-env=RING_CORE_PREFIX=*", "cargo:rustc-link-lib=static=ring", @@ -61,19 +47,13 @@ build.allow_build_instructions = [ "cargo:rustc-link-search=native=*", ] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.walkdir] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.zstd-sys] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] build.allow_build_instructions = [ "cargo:include=*", "cargo:root=*", @@ -82,10 +62,7 @@ build.allow_build_instructions = [ ] [pkg.libz-sys] -build.allow_apis = [ - "fs", - "process", -] +build.allow_apis = ["fs", "process"] build.allow_build_instructions = [ "cargo:include=*", "cargo:root=*", @@ -95,9 +72,7 @@ build.allow_build_instructions = [ allow_unsafe = true [pkg.bzip2-sys] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] build.allow_build_instructions = [ "cargo:include=*", "cargo:root=*", @@ -106,9 +81,7 @@ build.allow_build_instructions = [ ] [pkg.lz4-sys] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] build.allow_build_instructions = [ "cargo:include=*", "cargo:root=*", @@ -125,10 +98,7 @@ build.allow_build_instructions = [ ] [pkg.rquickjs-sys] -build.allow_apis = [ - "fs", - "process", -] +build.allow_apis = ["fs", "process"] build.allow_build_instructions = [ "cargo:rustc-link-lib=static=quickjs", "cargo:rustc-link-search=native=*", @@ -145,13 +115,8 @@ build.allow_build_instructions = [ [pkg.librocksdb-sys] allow_unsafe = true build.sandbox.allow_network = true -build.sandbox.make_writable = [ - "target", -] -build.allow_apis = [ - "fs", - "process", -] +build.sandbox.make_writable = ["target"] +build.allow_apis = ["fs", "process"] build.allow_build_instructions = [ "cargo:rustc-link-lib=static=rocksdb", "cargo:rustc-link-search=native=*", @@ -224,9 +189,6 @@ allow_proc_macro = true [pkg.async-trait] allow_proc_macro = true -build.allow_apis = [ - "process", -] allow_unsafe = true [pkg.clap_derive] @@ -250,10 +212,7 @@ allow_unsafe = true [pkg.rustversion] allow_proc_macro = true -build.allow_apis = [ - "fs", - "process", -] +build.allow_apis = ["fs", "process"] [pkg.bytecheck_derive] allow_proc_macro = true @@ -263,9 +222,7 @@ allow_proc_macro = true [pkg.rquickjs-macro] allow_proc_macro = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] allow_unsafe = true [pkg.async-stream-impl] @@ -283,9 +240,7 @@ allow_proc_macro = true [pkg.paste] allow_proc_macro = true -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] [pkg.surrealdb-derive] allow_proc_macro = true @@ -312,24 +267,16 @@ allow_unsafe = true allow_unsafe = true [pkg.serde] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.proc-macro2] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] allow_unsafe = true [pkg.libc] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] allow_unsafe = true [pkg.memchr] @@ -342,18 +289,13 @@ allow_unsafe = true allow_unsafe = true [pkg.autocfg] -from.build.allow_apis = [ - "fs", - "process", -] +from.build.allow_apis = ["fs", "process"] [pkg.scopeguard] allow_unsafe = true [pkg.version_check] -from.build.allow_apis = [ - "process", -] +from.build.allow_apis = ["process"] [pkg.zerocopy] allow_unsafe = true @@ -390,46 +332,32 @@ allow_unsafe = true [pkg.jobserver] allow_unsafe = true -from.build.allow_apis = [ - "fs", -] +from.build.allow_apis = ["fs"] [pkg.getrandom] allow_unsafe = true [pkg.syn] allow_unsafe = true -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] [pkg.mio] allow_unsafe = true -allow_apis = [ - "fs", - "net", -] +allow_apis = ["fs", "net"] [pkg.num_cpus] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.socket2] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.signal-hook-registry] allow_unsafe = true [pkg.rustix] -build.allow_apis = [ - "fs", - "process", -] +build.allow_apis = ["fs", "process"] allow_unsafe = true [pkg.tracing-core] @@ -442,30 +370,19 @@ allow_unsafe = true allow_unsafe = true [pkg.thiserror] -build.allow_apis = [ - "process", -] -build.allow_build_instructions = [ - "cargo:rustc-check-cfg=*", -] -allow_apis = [ - "fs", -] +build.allow_apis = ["process"] +build.allow_build_instructions = ["cargo:rustc-check-cfg=*"] +allow_apis = ["fs"] [pkg.ahash] allow_unsafe = true [pkg.cc] allow_unsafe = true -from.build.allow_apis = [ - "fs", - "process", -] +from.build.allow_apis = ["fs", "process"] [pkg.typenum] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] [pkg.rand_core] allow_unsafe = true @@ -484,16 +401,11 @@ allow_unsafe = true [pkg.tracing] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.tokio] allow_unsafe = true -allow_apis = [ - "net", - "process", -] +allow_apis = ["net", "process"] [pkg.regex-automata] allow_unsafe = true @@ -506,10 +418,7 @@ allow_unsafe = true [pkg.clang-sys] allow_unsafe = true -from.build.allow_apis = [ - "fs", - "process", -] +from.build.allow_apis = ["fs", "process"] [pkg.http] allow_unsafe = true @@ -531,9 +440,7 @@ allow_unsafe = true [pkg.indexmap] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.httparse] allow_unsafe = true @@ -552,9 +459,7 @@ allow_unsafe = true [pkg.tokio-util] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.nom] allow_unsafe = true @@ -567,9 +472,7 @@ allow_unsafe = true [pkg.prettyplease] allow_unsafe = true -build.allow_build_instructions = [ - "cargo:VERSION=*", -] +build.allow_build_instructions = ["cargo:VERSION=*"] [pkg.lazycell] allow_unsafe = true @@ -596,9 +499,7 @@ allow_unsafe = true allow_unsafe = true [pkg.semver] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] allow_unsafe = true [pkg.unicode-bidi] @@ -609,32 +510,22 @@ allow_unsafe = true [pkg.bindgen] allow_unsafe = true -from.build.allow_apis = [ - "fs", - "process", -] +from.build.allow_apis = ["fs", "process"] [pkg.pkg-config] -from.build.allow_apis = [ - "fs", - "process", -] +from.build.allow_apis = ["fs", "process"] [pkg.parking_lot_core] allow_unsafe = true [pkg.wasm-bindgen-shared] -build.allow_build_instructions = [ - "cargo:rustc-env=SCHEMA_FILE_HASH=*", -] +build.allow_build_instructions = ["cargo:rustc-env=SCHEMA_FILE_HASH=*"] [pkg.dirs-sys-next] allow_unsafe = true [pkg.crunchy] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] [pkg.is-terminal] allow_unsafe = true @@ -646,9 +537,7 @@ allow_unsafe = true allow_unsafe = true [pkg.anyhow] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] allow_unsafe = true [pkg.siphasher] @@ -658,18 +547,14 @@ allow_unsafe = true allow_unsafe = true [pkg.rustc_version] -from.build.allow_apis = [ - "process", -] +from.build.allow_apis = ["process"] [pkg.concurrent-queue] allow_unsafe = true [pkg.h2] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.fixedbitset] allow_unsafe = true @@ -729,22 +614,15 @@ allow_unsafe = true allow_unsafe = true [pkg.crc32fast] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] allow_unsafe = true [pkg.heapless] allow_unsafe = true -build.allow_apis = [ - "fs", - "process", -] +build.allow_apis = ["fs", "process"] [pkg.vcpkg] -from.build.allow_apis = [ - "fs", -] +from.build.allow_apis = ["fs"] [pkg.inout] allow_unsafe = true @@ -754,9 +632,7 @@ allow_unsafe = true [pkg.tokio-stream] allow_unsafe = true -from.test.allow_apis = [ - "net", -] +from.test.allow_apis = ["net"] [pkg.deranged] allow_unsafe = true @@ -764,14 +640,10 @@ allow_unsafe = true [pkg.same-file] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.doc-comment] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] [pkg.itertools] allow_unsafe = true @@ -784,9 +656,7 @@ allow_unsafe = true [pkg.hyper] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.brotli-decompressor] allow_unsafe = true @@ -798,9 +668,7 @@ allow_unsafe = true allow_unsafe = true [pkg.num-bigint] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] allow_unsafe = true [pkg.utf8parse] @@ -826,14 +694,10 @@ allow_unsafe = true [pkg.glob] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.which] -from.build.allow_apis = [ - "fs", -] +from.build.allow_apis = ["fs"] [pkg.geo-types] allow_unsafe = true @@ -857,32 +721,22 @@ allow_unsafe = true allow_unsafe = true [pkg.mime_guess] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] [pkg.term] -from.build.allow_apis = [ - "fs", -] +from.build.allow_apis = ["fs"] [pkg.lalrpop] -from.build.allow_apis = [ - "fs", -] +from.build.allow_apis = ["fs"] [pkg.smol_str] allow_unsafe = true [pkg.rust_decimal] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] [pkg.ref-cast] -build.allow_apis = [ - "process", -] +build.allow_apis = ["process"] allow_unsafe = true [pkg.atomic] @@ -890,9 +744,7 @@ allow_unsafe = true [pkg.iana-time-zone] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.bitmaps] allow_unsafe = true @@ -908,6 +760,7 @@ allow_unsafe = true [pkg.async-stream] allow_unsafe = true +allow_apis = ["net"] [pkg.prost] allow_unsafe = true @@ -923,9 +776,7 @@ allow_unsafe = true [pkg.chrono] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.anstyle-parse] allow_unsafe = true @@ -947,9 +798,7 @@ allow_unsafe = true [pkg.tracing-log] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.arrayvec] allow_unsafe = true @@ -958,9 +807,7 @@ allow_unsafe = true allow_unsafe = true [pkg.fst] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] [pkg.anstyle-query] allow_unsafe = true @@ -969,9 +816,7 @@ allow_unsafe = true allow_unsafe = true [pkg.snap] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] allow_unsafe = true [pkg.arc-swap] @@ -991,9 +836,7 @@ allow_unsafe = true [pkg.tracing-subscriber] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.imbl] allow_unsafe = true @@ -1003,9 +846,7 @@ allow_unsafe = true [pkg.hyper-util] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.terminal_size] allow_unsafe = true @@ -1048,19 +889,11 @@ allow_unsafe = true [pkg.quick_cache] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.surreal] -allow_apis = [ - "fs", - "net", - "process", -] -build.allow_build_instructions = [ - "cargo:rustc-env=*", -] +allow_apis = ["fs", "net", "process"] +build.allow_build_instructions = ["cargo:rustc-env=*"] [pkg.surreal.build.sandbox] allow_network = true @@ -1091,18 +924,14 @@ allow_unsafe = true [pkg.tracing-opentelemetry] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.predicates] allow_unsafe = true [pkg.rustyline] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.zstd-safe] allow_unsafe = true @@ -1124,116 +953,72 @@ allow_unsafe = true [pkg.reqwest] allow_unsafe = true -allow_apis = [ - "fs", - "net", -] +allow_apis = ["fs", "net"] [pkg.proc-macro-crate] -allow_apis = [ - "fs", -] +allow_apis = ["fs", "process"] [pkg.tonic] allow_unsafe = true -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.rocksdb] allow_unsafe = true -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.surrealdb] -allow_apis = [ - "fs", -] -build.allow_build_instructions = [ - "cargo::rustc-check-cfg=*", -] +allow_apis = ["fs"] +build.allow_build_instructions = ["cargo::rustc-check-cfg=*"] [pkg.surrealdb-core] allow_unsafe = true -allow_apis = [ - "net", - "fs", -] +allow_apis = ["net", "fs"] build.allow_build_instructions = [ + "cargo:rustc-cfg=*", "cargo::rustc-check-cfg=*", ] [pkg.assert_fs] -from.test.allow_apis = [ - "fs", -] +from.test.allow_apis = ["fs"] [pkg.tempfile] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.object_store] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.hyper-rustls] -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.wiremock] -from.test.allow_apis = [ - "net", -] +from.test.allow_apis = ["net"] [pkg.tokio-tungstenite] -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.path-clean] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.axum-server] -allow_apis = [ - "fs", - "net", -] +allow_apis = ["fs", "net"] [pkg.clap_builder] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.axum] -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.tower] -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.opentelemetry_sdk] -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.tower-http] -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.axum-core] -allow_apis = [ - "net", -] +allow_apis = ["net"] [pkg.cpp_demangle] allow_unsafe = true @@ -1283,6 +1068,27 @@ allow_unsafe = true [pkg.pprof] allow_unsafe = true +[pkg.pest_derive] +allow_proc_macro = true + +[pkg.pest] +allow_unsafe = true + +[pkg.pest_generator] +allow_apis = ["fs"] + +[pkg.async-graphql-derive] +allow_proc_macro = true + +[pkg.strum_macros] +allow_proc_macro = true + +[pkg.async-graphql] +allow_apis = ["fs", "net"] + +[pkg.multer] +allow_apis = ["net"] + # examples [pkg.pear_codegen] allow_proc_macro = true @@ -1294,9 +1100,7 @@ allow_proc_macro = true allow_proc_macro = true [pkg.ext-sort] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.phf_macros] allow_proc_macro = true @@ -1329,17 +1133,13 @@ allow_unsafe = true allow_unsafe = true [pkg.html5ever] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] [pkg.futf] allow_unsafe = true [pkg.markup5ever] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] allow_unsafe = true [pkg.tendril] @@ -1349,9 +1149,7 @@ allow_unsafe = true allow_unsafe = true [pkg.surrealkv] -allow_apis = [ - "fs", -] +allow_apis = ["fs"] [pkg.atomic-waker] allow_unsafe = true @@ -1359,13 +1157,17 @@ allow_unsafe = true [pkg.rustls-pki-types] allow_unsafe = true +[pkg.static_assertions_next] +allow_unsafe = true + +[pkg.surrealdb-async-graphql-axum] +allow_apis = ["net"] + [pkg.castaway] allow_unsafe = true [pkg.blake3] -build.allow_apis = [ - "fs", -] +build.allow_apis = ["fs"] build.allow_build_instructions = [ "cargo:rustc-link-lib=static=blake3_avx512_assembly", "cargo:rustc-link-lib=static=blake3_sse2_sse41_avx2_assembly", diff --git a/core/Cargo.toml b/core/Cargo.toml index fb308d6b..393e22d8 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -153,6 +153,9 @@ trice = "0.4.0" ulid = { version = "1.1.0", features = ["serde"] } unicase = "2.7.0" url = "2.5.0" +async-graphql = { version = "7.0.7", default-features = false, features = [ + "dynamic-schema", +] } castaway = "0.2.3" serde-content = "0.1.0" @@ -196,5 +199,8 @@ tokio = { version = "1.34.0", default-features = false, features = [ tokio-tungstenite = { version = "0.21.0", optional = true } uuid = { version = "1.6.1", features = ["serde", "v4", "v7"] } +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(surrealdb_unstable)'] } + [lib] name = "surrealdb_core" diff --git a/core/src/cnf/mod.rs b/core/src/cnf/mod.rs index 3d9d6027..d36ae5ff 100644 --- a/core/src/cnf/mod.rs +++ b/core/src/cnf/mod.rs @@ -51,6 +51,9 @@ pub static INSECURE_FORWARD_ACCESS_ERRORS: Lazy = pub static EXTERNAL_SORTING_BUFFER_LIMIT: Lazy = lazy_env_parse!("SURREAL_EXTERNAL_SORTING_BUFFER_LIMIT", usize, 50_000); +pub static GRAPHQL_ENABLE: Lazy = + lazy_env_parse!("SURREAL_EXPERIMENTAL_GRAPHQL", bool, false); + /// Enable experimental bearer access and stateful access grant management. Still under active development. /// Using this experimental feature may introduce risks related to breaking changes and security issues. #[cfg(not(test))] diff --git a/core/src/gql/cache.rs b/core/src/gql/cache.rs new file mode 100644 index 00000000..042018ae --- /dev/null +++ b/core/src/gql/cache.rs @@ -0,0 +1,87 @@ +use tokio::sync::RwLock; + +use std::{collections::BTreeMap, fmt::Debug, hash::Hash, marker::PhantomData, sync::Arc}; + +use async_graphql::dynamic::Schema; + +use crate::dbs::Session; +use crate::kvs::Datastore; + +use super::{error::GqlError, schema::generate_schema}; + +pub trait Invalidator: Debug + Clone + Send + Sync + 'static { + type MetaData: Debug + Clone + Send + Sync + Hash; + + fn is_valid(datastore: &Datastore, session: &Session, meta: &Self::MetaData) -> bool; + + fn generate( + datastore: &Arc, + session: &Session, + ) -> impl std::future::Future> + std::marker::Send; +} + +#[derive(Debug, Clone, Copy)] +pub struct Pessimistic; +impl Invalidator for Pessimistic { + type MetaData = (); + + fn is_valid(_datastore: &Datastore, _session: &Session, _meta: &Self::MetaData) -> bool { + false + } + + async fn generate( + datastore: &Arc, + session: &Session, + ) -> Result<(Schema, Self::MetaData), GqlError> { + let schema = generate_schema(datastore, session).await?; + Ok((schema, ())) + } +} + +#[derive(Clone)] +pub struct SchemaCache { + #[allow(clippy::type_complexity)] + inner: Arc>>, + pub datastore: Arc, + _invalidator: PhantomData, +} + +impl Debug for SchemaCache { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SchemaCache") + .field("inner", &self.inner) + .field("_invalidator", &self._invalidator) + .finish() + } +} + +impl SchemaCache { + pub fn new(datastore: Arc) -> Self { + SchemaCache { + inner: Default::default(), + datastore, + _invalidator: PhantomData, + } + } + pub async fn get_schema(&self, session: &Session) -> Result { + let ns = session.ns.as_ref().expect("missing ns should have been caught"); + let db = session.db.as_ref().expect("missing db should have been caught"); + { + let guard = self.inner.read().await; + if let Some(cand) = guard.get(&(ns.to_owned(), db.to_owned())) { + if I::is_valid(&self.datastore, session, &cand.1) { + return Ok(cand.0.clone()); + } + } + }; + + let (schema, meta) = I::generate(&self.datastore, session).await?; + + { + let mut guard = self.inner.write().await; + guard.insert((ns.to_owned(), db.to_owned()), (schema.clone(), meta)); + } + + Ok(schema) + } +} diff --git a/core/src/gql/error.rs b/core/src/gql/error.rs new file mode 100644 index 00000000..bd0c4e4f --- /dev/null +++ b/core/src/gql/error.rs @@ -0,0 +1,58 @@ +use std::fmt::Debug; + +use async_graphql::{InputType, InputValueError}; +use thiserror::Error; + +use crate::sql::Kind; + +#[derive(Debug, Error)] +pub enum GqlError { + #[error("Database error: {0}")] + DbError(crate::err::Error), + #[error("Error generating schema: {0}")] + SchemaError(String), + #[error("Error resolving request: {0}")] + ResolverError(String), + #[error("Internal Error: {0}")] + InternalError(String), + #[error("Error converting value: {val} to type: {target}")] + TypeError { + target: Kind, + val: async_graphql::Value, + }, +} + +pub fn schema_error(msg: impl Into) -> GqlError { + GqlError::SchemaError(msg.into()) +} + +pub fn resolver_error(msg: impl Into) -> GqlError { + GqlError::ResolverError(msg.into()) +} +pub fn internal_error(msg: impl Into) -> GqlError { + let msg = msg.into(); + error!("{}", msg); + GqlError::InternalError(msg) +} + +pub fn type_error(kind: Kind, val: &async_graphql::Value) -> GqlError { + GqlError::TypeError { + target: kind, + val: val.to_owned(), + } +} + +impl From for GqlError { + fn from(value: crate::err::Error) -> Self { + GqlError::DbError(value) + } +} + +impl From> for GqlError +where + T: InputType + Debug, +{ + fn from(value: InputValueError) -> Self { + GqlError::ResolverError(format!("{value:?}")) + } +} diff --git a/core/src/gql/ext.rs b/core/src/gql/ext.rs new file mode 100644 index 00000000..712ced85 --- /dev/null +++ b/core/src/gql/ext.rs @@ -0,0 +1,158 @@ +use std::mem; + +use crate::sql::{ + statements::UseStatement, Cond, Ident, Idiom, Limit, Order, Orders, Part, Start, Table, Value, +}; +use async_graphql::dynamic::Scalar; + +pub trait IntoExt { + fn intox(self) -> T; +} + +impl IntoExt for S +where + T: FromExt, +{ + fn intox(self) -> T { + T::from(self) + } +} + +trait FromExt { + fn from(value: T) -> Self; +} + +impl FromExt for Cond +where + V: Into, +{ + fn from(value: V) -> Self { + Self(value.into()) + } +} +impl FromExt for Limit +where + V: Into, +{ + fn from(value: V) -> Self { + Self(value.into()) + } +} + +impl FromExt<(I, bool, bool, bool, bool)> for Order +where + I: Into, +{ + fn from((order, random, collate, numeric, direction): (I, bool, bool, bool, bool)) -> Self { + Self { + order: order.into(), + random, + collate, + numeric, + direction, + } + } +} + +impl FromExt for Start +where + V: Into, +{ + fn from(value: V) -> Self { + Start(value.into()) + } +} + +impl FromExt<(&str, &str)> for UseStatement { + fn from(value: (&str, &str)) -> Self { + Self { + ns: Some(value.0.into()), + db: Some(value.1.into()), + } + } +} +impl FromExt<(String, String)> for UseStatement { + fn from(value: (String, String)) -> Self { + Self { + ns: Some(value.0), + db: Some(value.1), + } + } +} +impl FromExt<(Option, Option)> for UseStatement { + fn from(value: (Option, Option)) -> Self { + Self { + ns: value.0, + db: value.1, + } + } +} + +impl FromExt for Table +where + S: Into, +{ + fn from(value: S) -> Self { + Table(value.into()) + } +} + +impl FromExt> for Orders { + fn from(value: Vec) -> Self { + Orders(value) + } +} + +impl FromExt for Ident +where + S: Into, +{ + fn from(value: S) -> Self { + Ident(value.into()) + } +} + +impl

FromExt

for Idiom +where + P: Into, +{ + fn from(value: P) -> Self { + Idiom(vec![value.into()]) + } +} + +pub trait ValidatorExt { + fn add_validator( + &mut self, + validator: impl Fn(&async_graphql::Value) -> bool + Send + Sync + 'static, + ) -> &mut Self; +} + +impl ValidatorExt for Scalar { + fn add_validator( + &mut self, + validator: impl Fn(&async_graphql::Value) -> bool + Send + Sync + 'static, + ) -> &mut Self { + let mut tmp = Scalar::new(""); + mem::swap(self, &mut tmp); + *self = tmp.validator(validator); + self + } +} + +use crate::sql::Object as SqlObject; +use crate::sql::Value as SqlValue; + +pub trait TryAsExt { + fn try_as_object(self) -> Result + where + Self: Sized; +} +impl TryAsExt for SqlValue { + fn try_as_object(self) -> Result { + match self { + SqlValue::Object(o) => Ok(o), + v => Err(v), + } + } +} diff --git a/core/src/gql/mod.rs b/core/src/gql/mod.rs new file mode 100644 index 00000000..69c34468 --- /dev/null +++ b/core/src/gql/mod.rs @@ -0,0 +1,9 @@ +pub mod cache; +pub mod error; +mod ext; +pub mod schema; +mod utils; + +pub use error::GqlError; + +pub use cache::*; diff --git a/core/src/gql/schema.rs b/core/src/gql/schema.rs new file mode 100644 index 00000000..df0a19dd --- /dev/null +++ b/core/src/gql/schema.rs @@ -0,0 +1,1114 @@ +use std::collections::BTreeMap; +use std::fmt::Display; +use std::sync::Arc; + +use crate::dbs::Session; +use crate::kvs::Datastore; +use crate::sql::statements::{DefineFieldStatement, SelectStatement}; +use crate::sql::Kind; +use crate::sql::{self, Table}; +use crate::sql::{Cond, Fields}; +use crate::sql::{Expression, Geometry}; +use crate::sql::{Statement, Thing}; +use async_graphql::dynamic::{Enum, FieldValue, ResolverContext, Type, Union}; +use async_graphql::dynamic::{Field, Interface}; +use async_graphql::dynamic::{FieldFuture, InterfaceField}; +use async_graphql::dynamic::{InputObject, Object}; +use async_graphql::dynamic::{InputValue, Schema}; +use async_graphql::dynamic::{Scalar, TypeRef}; +use async_graphql::indexmap::IndexMap; +use async_graphql::Name; +use async_graphql::Value as GqlValue; +use rust_decimal::prelude::FromPrimitive; +use rust_decimal::Decimal; +use serde_json::Number; + +use super::error::{resolver_error, GqlError}; +use super::ext::IntoExt; +use super::ext::ValidatorExt; +use crate::gql::error::{internal_error, schema_error, type_error}; +use crate::gql::ext::TryAsExt; +use crate::gql::utils::{get_record, GqlValueUtils}; +use crate::kvs::LockType; +use crate::kvs::TransactionType; +use crate::sql::Object as SqlObject; +use crate::sql::Value as SqlValue; + +macro_rules! limit_input { + () => { + InputValue::new("limit", TypeRef::named(TypeRef::INT)) + }; +} + +macro_rules! start_input { + () => { + InputValue::new("start", TypeRef::named(TypeRef::INT)) + }; +} + +macro_rules! id_input { + () => { + InputValue::new("id", TypeRef::named_nn(TypeRef::ID)) + }; +} + +macro_rules! order { + (asc, $field:expr) => {{ + let mut tmp = sql::Order::default(); + tmp.order = $field.into(); + tmp.direction = true; + tmp + }}; + (desc, $field:expr) => {{ + let mut tmp = sql::Order::default(); + tmp.order = $field.into(); + tmp + }}; +} + +fn filter_name_from_table(tb_name: impl Display) -> String { + format!("_filter_{tb_name}") +} + +pub async fn generate_schema( + datastore: &Arc, + session: &Session, +) -> Result { + let kvs = datastore.as_ref(); + let tx = kvs.transaction(TransactionType::Read, LockType::Optimistic).await?; + let ns = session.ns.as_ref().expect("missing ns should have been caught"); + let db = session.db.as_ref().expect("missing db should have been caught"); + let tbs = tx.all_tb(ns, db).await?; + let mut query = Object::new("Query"); + let mut types: Vec = Vec::new(); + + trace!(ns, db, ?tbs, "generating schema"); + + if tbs.len() == 0 { + return Err(schema_error("no tables found in database")); + } + + for tb in tbs.iter() { + trace!("Adding table: {}", tb.name); + let tb_name = tb.name.to_string(); + let first_tb_name = tb_name.clone(); + let second_tb_name = tb_name.clone(); + + let table_orderable_name = format!("_orderable_{tb_name}"); + let mut table_orderable = Enum::new(&table_orderable_name).item("id"); + let table_order_name = format!("_order_{tb_name}"); + let table_order = InputObject::new(&table_order_name) + .field(InputValue::new("asc", TypeRef::named(&table_orderable_name))) + .field(InputValue::new("desc", TypeRef::named(&table_orderable_name))) + .field(InputValue::new("then", TypeRef::named(&table_order_name))); + + let table_filter_name = filter_name_from_table(tb_name); + let mut table_filter = InputObject::new(&table_filter_name); + table_filter = table_filter + .field(InputValue::new("id", TypeRef::named("_filter_id"))) + .field(InputValue::new("and", TypeRef::named_nn_list(&table_filter_name))) + .field(InputValue::new("or", TypeRef::named_nn_list(&table_filter_name))) + .field(InputValue::new("not", TypeRef::named(&table_filter_name))); + types.push(Type::InputObject(filter_id())); + + let sess1 = session.to_owned(); + let fds = tx.all_tb_fields(ns, db, &tb.name.0).await?; + let fds1 = fds.clone(); + let kvs1 = datastore.clone(); + + query = query.field( + Field::new( + tb.name.to_string(), + TypeRef::named_nn_list_nn(tb.name.to_string()), + move |ctx| { + let tb_name = first_tb_name.clone(); + let sess1 = sess1.clone(); + let fds1 = fds1.clone(); + let kvs1 = kvs1.clone(); + FieldFuture::new(async move { + let kvs = kvs1.as_ref(); + + let args = ctx.args.as_index_map(); + trace!("received request with args: {args:?}"); + + let start = args.get("start").and_then(|v| v.as_i64()).map(|s| s.intox()); + + let limit = args.get("limit").and_then(|v| v.as_i64()).map(|l| l.intox()); + + let order = args.get("order"); + + let filter = args.get("filter"); + + let orders = match order { + Some(GqlValue::Object(o)) => { + let mut orders = vec![]; + let mut current = o; + loop { + let asc = current.get("asc"); + let desc = current.get("desc"); + match (asc, desc) { + (Some(_), Some(_)) => { + return Err("Found both asc and desc in order".into()); + } + (Some(GqlValue::Enum(a)), None) => { + orders.push(order!(asc, a.as_str())) + } + (None, Some(GqlValue::Enum(d))) => { + orders.push(order!(desc, d.as_str())) + } + (_, _) => { + break; + } + } + if let Some(GqlValue::Object(next)) = current.get("then") { + current = next; + } else { + break; + } + } + Some(orders) + } + _ => None, + }; + trace!("parsed orders: {orders:?}"); + + let cond = match filter { + Some(f) => { + let o = match f { + GqlValue::Object(o) => o, + f => { + error!("Found filter {f}, which should be object and should have been rejected by async graphql."); + return Err("Value in cond doesn't fit schema".into()); + } + }; + + let cond = cond_from_filter(o, &fds1)?; + + Some(cond) + } + None => None, + }; + + trace!("parsed filter: {cond:?}"); + + let ast = Statement::Select({ + SelectStatement { + what: vec![SqlValue::Table(tb_name.intox())].into(), + expr: Fields::all(), + order: orders.map(IntoExt::intox), + cond, + limit, + start, + ..Default::default() + } + }); + + trace!("generated query ast: {ast:?}"); + + let query = ast.into(); + trace!("generated query: {}", query); + + let res = kvs.process(query, &sess1, Default::default()).await?; + debug_assert_eq!(res.len(), 1); + let res = res + .into_iter() + .next() + .expect("response vector should have exactly one value") + .result?; + + let res_vec = + match res { + SqlValue::Array(a) => a, + v => { + error!("Found top level value, in result which should be array: {v:?}"); + return Err("Internal Error".into()); + } + }; + + let out: Result, SqlValue> = res_vec + .0 + .into_iter() + .map(|v| v.try_as_object().map(FieldValue::owned_any)) + .collect(); + + match out { + Ok(l) => Ok(Some(FieldValue::list(l))), + Err(v) => { + Err(internal_error(format!("expected object, found: {v:?}")).into()) + } + } + }) + }, + ) + .argument(limit_input!()) + .argument(start_input!()) + .argument(InputValue::new("order", TypeRef::named(&table_order_name))) + .argument(InputValue::new("filter", TypeRef::named(&table_filter_name))), + ); + + let sess2 = session.to_owned(); + let kvs2 = datastore.to_owned(); + query = query.field( + Field::new( + format!("_get_{}", tb.name), + TypeRef::named(tb.name.to_string()), + move |ctx| { + let tb_name = second_tb_name.clone(); + let kvs2 = kvs2.clone(); + FieldFuture::new({ + let sess2 = sess2.clone(); + async move { + let kvs = kvs2.as_ref(); + + let args = ctx.args.as_index_map(); + let id = match args.get("id").and_then(GqlValueUtils::as_string) { + Some(i) => i, + None => { + return Err(internal_error( + "Schema validation failed: No id found in _get_", + ) + .into()); + } + }; + let thing = match id.clone().try_into() { + Ok(t) => t, + Err(_) => Thing::from((tb_name, id)), + }; + + match get_record(kvs, &sess2, &thing).await? { + SqlValue::Object(o) => Ok(Some(FieldValue::owned_any(o))), + _ => Ok(None), + } + } + }) + }, + ) + .argument(id_input!()), + ); + + let mut table_ty_obj = Object::new(tb.name.to_string()) + .field(Field::new( + "id", + TypeRef::named_nn(TypeRef::ID), + make_table_field_resolver( + datastore, + session, + "id", + Some(Kind::Record(vec![Table::from(tb.name.to_string())])), + ), + )) + .implement("record"); + + for fd in fds.iter() { + let Some(ref kind) = fd.kind else { + continue; + }; + let fd_name = Name::new(fd.name.to_string()); + let fd_type = kind_to_type(kind.clone(), &mut types)?; + table_orderable = table_orderable.item(fd_name.to_string()); + let type_filter_name = format!("_filter_{}", unwrap_type(fd_type.clone())); + + let type_filter = Type::InputObject(filter_from_type( + kind.clone(), + type_filter_name.clone(), + &mut types, + )?); + trace!("\n{type_filter:?}\n"); + types.push(type_filter); + + table_filter = table_filter + .field(InputValue::new(fd.name.to_string(), TypeRef::named(type_filter_name))); + + table_ty_obj = table_ty_obj.field(Field::new( + fd.name.to_string(), + fd_type, + make_table_field_resolver(datastore, session, fd_name.as_str(), fd.kind.clone()), + )); + } + + types.push(Type::Object(table_ty_obj)); + types.push(table_order.into()); + types.push(Type::Enum(table_orderable)); + types.push(Type::InputObject(table_filter)); + } + + let sess3 = session.to_owned(); + let kvs3 = datastore.to_owned(); + query = query.field( + Field::new("_get", TypeRef::named("record"), move |ctx| { + let kvs3 = kvs3.clone(); + FieldFuture::new({ + let sess3 = sess3.clone(); + async move { + let kvs = kvs3.as_ref(); + + let args = ctx.args.as_index_map(); + let id = match args.get("id").and_then(GqlValueUtils::as_string) { + Some(i) => i, + None => { + return Err(internal_error( + "Schema validation failed: No id found in _get", + ) + .into()); + } + }; + + let thing: Thing = match id.clone().try_into() { + Ok(t) => t, + Err(_) => return Err(resolver_error(format!("invalid id: {id}")).into()), + }; + + match get_record(kvs, &sess3, &thing).await? { + SqlValue::Object(o) => { + let out = FieldValue::owned_any(o).with_type(thing.tb.to_string()); + + Ok(Some(out)) + } + _ => Ok(None), + } + } + }) + }) + .argument(id_input!()), + ); + + trace!("current Query object for schema: {:?}", query); + + let mut schema = Schema::build("Query", None, None).register(query); + for ty in types { + trace!("adding type: {ty:?}"); + schema = schema.register(ty); + } + + macro_rules! scalar_debug_validated { + ($schema:ident, $name:expr, $kind:expr) => { + scalar_debug_validated!( + $schema, + $name, + $kind, + ::std::option::Option::<&str>::None, + ::std::option::Option::<&str>::None + ) + }; + ($schema:ident, $name:expr, $kind:expr, $desc:literal) => { + scalar_debug_validated!($schema, $name, $kind, std::option::Option::Some($desc), None) + }; + ($schema:ident, $name:expr, $kind:expr, $desc:literal, $url:literal) => { + scalar_debug_validated!( + $schema, + $name, + $kind, + std::option::Option::Some($desc), + Some($url) + ) + }; + ($schema:ident, $name:expr, $kind:expr, $desc:expr, $url:expr) => {{ + let new_type = Type::Scalar({ + let mut tmp = Scalar::new($name); + if let Some(desc) = $desc { + tmp = tmp.description(desc); + } + if let Some(url) = $url { + tmp = tmp.specified_by_url(url); + } + #[cfg(debug_assertions)] + tmp.add_validator(|v| gql_to_sql_kind(v, $kind).is_ok()); + tmp + }); + $schema = $schema.register(new_type); + }}; + } + + scalar_debug_validated!( + schema, + "uuid", + Kind::Uuid, + "a string encoded uuid", + "https://datatracker.ietf.org/doc/html/rfc4122" + ); + + scalar_debug_validated!(schema, "decimal", Kind::Decimal); + scalar_debug_validated!(schema, "number", Kind::Number); + scalar_debug_validated!(schema, "null", Kind::Null); + scalar_debug_validated!(schema, "datetime", Kind::Datetime); + scalar_debug_validated!(schema, "duration", Kind::Duration); + scalar_debug_validated!(schema, "object", Kind::Object); + scalar_debug_validated!(schema, "any", Kind::Any); + + let id_interface = + Interface::new("record").field(InterfaceField::new("id", TypeRef::named_nn(TypeRef::ID))); + schema = schema.register(id_interface); + + // TODO: when used get: `Result::unwrap()` on an `Err` value: SchemaError("Field \"like.in\" is not sub-type of \"relation.in\"") + let relation_interface = Interface::new("relation") + .field(InterfaceField::new("id", TypeRef::named_nn(TypeRef::ID))) + .field(InterfaceField::new("in", TypeRef::named_nn("record"))) + .field(InterfaceField::new("out", TypeRef::named_nn("record"))) + .implement("record"); + schema = schema.register(relation_interface); + + schema + .finish() + .map_err(|e| schema_error(format!("there was an error generating schema: {e:?}"))) +} + +fn make_table_field_resolver( + kvs: &Arc, + sess: &Session, + fd_name: impl Into, + kind: Option, +) -> impl for<'a> Fn(ResolverContext<'a>) -> FieldFuture<'a> + Send + Sync + 'static { + let fd_name = fd_name.into(); + let sess_field = Arc::new(sess.to_owned()); + let kvs_field = kvs.clone(); + move |ctx: ResolverContext| { + let sess_field = sess_field.clone(); + let fd_name = fd_name.clone(); + let kvs_field = kvs_field.clone(); + let field_kind = kind.clone(); + FieldFuture::new({ + let kvs_field = kvs_field.clone(); + async move { + let kvs = kvs_field.as_ref(); + + let record: &SqlObject = ctx + .parent_value + .downcast_ref::() + .ok_or_else(|| internal_error("failed to downcast"))?; + + let Some(val) = record.get(fd_name.as_str()) else { + return Ok(None); + }; + + let out = match val { + SqlValue::Thing(rid)if fd_name != "id" => match get_record(kvs, &sess_field, rid).await? + { + SqlValue::Object(o) => { + let mut tmp = FieldValue::owned_any(o); + + match field_kind { + Some(Kind::Record(ts)) if ts.len() != 1 => {tmp = tmp.with_type(rid.tb.clone())} + _ => {} + } + + Ok(Some(tmp)) + } + v => Err(resolver_error(format!("expected object, but found (referential integrity might be broken): {v:?}")).into()), + } + v => { + match field_kind { + Some(Kind::Either(ks)) if ks.len() != 1 => { + } + _ => {} + } + let out = sql_value_to_gql_value(v.to_owned()) + .map_err(|_| "SQL to GQL translation failed")?; + Ok(Some(FieldValue::value(out))) + } + }; + out + } + }) + } +} + +pub fn sql_value_to_gql_value(v: SqlValue) -> Result { + let out = match v { + SqlValue::None => GqlValue::Null, + SqlValue::Null => GqlValue::Null, + SqlValue::Bool(b) => GqlValue::Boolean(b), + SqlValue::Number(n) => match n { + crate::sql::Number::Int(i) => GqlValue::Number(i.into()), + crate::sql::Number::Float(f) => GqlValue::Number( + Number::from_f64(f) + .ok_or(resolver_error("unimplemented: graceful NaN and Inf handling"))?, + ), + num @ crate::sql::Number::Decimal(_) => GqlValue::String(num.to_string()), + }, + SqlValue::Strand(s) => GqlValue::String(s.0), + d @ SqlValue::Duration(_) => GqlValue::String(d.to_string()), + SqlValue::Datetime(d) => GqlValue::String(d.to_rfc3339()), + SqlValue::Uuid(uuid) => GqlValue::String(uuid.to_string()), + SqlValue::Array(a) => { + GqlValue::List(a.into_iter().map(|v| sql_value_to_gql_value(v).unwrap()).collect()) + } + SqlValue::Object(o) => GqlValue::Object( + o.0.into_iter() + .map(|(k, v)| (Name::new(k), sql_value_to_gql_value(v).unwrap())) + .collect(), + ), + SqlValue::Geometry(_) => return Err(resolver_error("unimplemented: Geometry types")), + SqlValue::Bytes(b) => GqlValue::Binary(b.into_inner().into()), + SqlValue::Thing(t) => GqlValue::String(t.to_string()), + v => return Err(internal_error(format!("found unsupported value variant: {v:?}"))), + }; + Ok(out) +} + +fn kind_to_type(kind: Kind, types: &mut Vec) -> Result { + let (optional, match_kind) = match kind { + Kind::Option(op_ty) => (true, *op_ty), + _ => (false, kind), + }; + let out_ty = match match_kind { + Kind::Any => TypeRef::named("any"), + Kind::Null => TypeRef::named("null"), + Kind::Bool => TypeRef::named(TypeRef::BOOLEAN), + Kind::Bytes => TypeRef::named("bytes"), + Kind::Datetime => TypeRef::named("datetime"), + Kind::Decimal => TypeRef::named("decimal"), + Kind::Duration => TypeRef::named("duration"), + Kind::Float => TypeRef::named(TypeRef::FLOAT), + Kind::Int => TypeRef::named(TypeRef::INT), + Kind::Number => TypeRef::named("number"), + Kind::Object => TypeRef::named("object"), + Kind::Point => return Err(schema_error("Kind::Point is not yet supported")), + Kind::String => TypeRef::named(TypeRef::STRING), + Kind::Uuid => TypeRef::named("uuid"), + Kind::Record(mut r) => match r.len() { + 0 => TypeRef::named("record"), + 1 => TypeRef::named(r.pop().unwrap().0), + _ => { + let names: Vec = r.into_iter().map(|t| t.0).collect(); + let ty_name = names.join("_or_"); + + let mut tmp_union = Union::new(ty_name.clone()) + .description(format!("A record which is one of: {}", names.join(", "))); + for n in names { + tmp_union = tmp_union.possible_type(n); + } + + types.push(Type::Union(tmp_union)); + TypeRef::named(ty_name) + } + }, + Kind::Geometry(_) => return Err(schema_error("Kind::Geometry is not yet supported")), + Kind::Option(t) => { + let mut non_op_ty = *t; + while let Kind::Option(inner) = non_op_ty { + non_op_ty = *inner; + } + kind_to_type(non_op_ty, types)? + } + Kind::Either(ts) => { + let pos_names: Result, GqlError> = + ts.into_iter().map(|k| kind_to_type(k, types)).collect(); + let pos_names: Vec = pos_names?.into_iter().map(|tr| tr.to_string()).collect(); + let ty_name = pos_names.join("_or_"); + + let mut tmp_union = Union::new(ty_name.clone()); + for n in pos_names { + tmp_union = tmp_union.possible_type(n); + } + + types.push(Type::Union(tmp_union)); + TypeRef::named(ty_name) + } + Kind::Set(_, _) => return Err(schema_error("Kind::Set is not yet supported")), + Kind::Array(k, _) => TypeRef::List(Box::new(kind_to_type(*k, types)?)), + Kind::Function(_, _) => return Err(schema_error("Kind::Function is not yet supported")), + }; + + let out = match optional { + true => out_ty, + false => TypeRef::NonNull(Box::new(out_ty)), + }; + Ok(out) +} + +macro_rules! filter_impl { + ($filter:ident, $ty:ident, $name:expr) => { + $filter = $filter.field(InputValue::new($name, $ty.clone())); + }; +} + +fn filter_id() -> InputObject { + let mut filter = InputObject::new("_filter_id"); + let ty = TypeRef::named(TypeRef::ID); + filter_impl!(filter, ty, "eq"); + filter_impl!(filter, ty, "ne"); + filter +} +fn filter_from_type( + kind: Kind, + filter_name: String, + types: &mut Vec, +) -> Result { + let ty = match &kind { + Kind::Record(ts) => match ts.len() { + 1 => TypeRef::named(filter_name_from_table( + ts.first().expect("ts should have exactly one element").as_str(), + )), + _ => TypeRef::named(TypeRef::ID), + }, + k => unwrap_type(kind_to_type(k.clone(), types)?), + }; + + let mut filter = InputObject::new(filter_name); + filter_impl!(filter, ty, "eq"); + filter_impl!(filter, ty, "ne"); + + match kind { + Kind::Any => {} + Kind::Null => {} + Kind::Bool => {} + Kind::Bytes => {} + Kind::Datetime => {} + Kind::Decimal => {} + Kind::Duration => {} + Kind::Float => {} + Kind::Int => {} + Kind::Number => {} + Kind::Object => {} + Kind::Point => {} + Kind::String => {} + Kind::Uuid => {} + Kind::Record(_) => {} + Kind::Geometry(_) => {} + Kind::Option(_) => {} + Kind::Either(_) => {} + Kind::Set(_, _) => {} + Kind::Array(_, _) => {} + Kind::Function(_, _) => {} + }; + Ok(filter) +} + +fn unwrap_type(ty: TypeRef) -> TypeRef { + match ty { + TypeRef::NonNull(t) => unwrap_type(*t), + _ => ty, + } +} + +fn cond_from_filter( + filter: &IndexMap, + fds: &[DefineFieldStatement], +) -> Result { + val_from_filter(filter, fds).map(IntoExt::intox) +} + +fn val_from_filter( + filter: &IndexMap, + fds: &[DefineFieldStatement], +) -> Result { + if filter.len() != 1 { + return Err(resolver_error("Table Filter must have one item")); + } + + let (k, v) = filter.iter().next().unwrap(); + + let cond = match k.as_str().to_lowercase().as_str() { + "or" => aggregate(v, AggregateOp::Or, fds), + "and" => aggregate(v, AggregateOp::And, fds), + "not" => negate(v, fds), + _ => binop(k.as_str(), v, fds), + }; + + cond +} + +fn parse_op(name: impl AsRef) -> Result { + match name.as_ref() { + "eq" => Ok(sql::Operator::Equal), + "ne" => Ok(sql::Operator::NotEqual), + op => Err(resolver_error(format!("Unsupported op: {op}"))), + } +} + +fn negate(filter: &GqlValue, fds: &[DefineFieldStatement]) -> Result { + let obj = filter.as_object().ok_or(resolver_error("Value of NOT must be object"))?; + let inner_cond = val_from_filter(obj, fds)?; + + Ok(Expression::Unary { + o: sql::Operator::Not, + v: inner_cond, + } + .into()) +} + +enum AggregateOp { + And, + Or, +} + +fn aggregate( + filter: &GqlValue, + op: AggregateOp, + fds: &[DefineFieldStatement], +) -> Result { + let op_str = match op { + AggregateOp::And => "AND", + AggregateOp::Or => "OR", + }; + let op = match op { + AggregateOp::And => sql::Operator::And, + AggregateOp::Or => sql::Operator::Or, + }; + let list = + filter.as_list().ok_or(resolver_error(format!("Value of {op_str} should be a list")))?; + let filter_arr = list + .iter() + .map(|v| v.as_object().map(|o| val_from_filter(o, fds))) + .collect::, GqlError>>>() + .ok_or(resolver_error(format!("List of {op_str} should contain objects")))??; + + let mut iter = filter_arr.into_iter(); + + let mut cond = iter + .next() + .ok_or(resolver_error(format!("List of {op_str} should contain at least one object")))?; + + for clause in iter { + cond = Expression::Binary { + l: clause, + o: op.clone(), + r: cond, + } + .into(); + } + + Ok(cond) +} + +fn binop( + field_name: &str, + val: &GqlValue, + fds: &[DefineFieldStatement], +) -> Result { + let obj = val.as_object().ok_or(resolver_error("Field filter should be object"))?; + + let Some(fd) = fds.iter().find(|fd| fd.name.to_string() == field_name) else { + return Err(resolver_error(format!("Field `{field_name}` not found"))); + }; + + if obj.len() != 1 { + return Err(resolver_error("Field Filter must have one item")); + } + + let lhs = sql::Value::Idiom(field_name.intox()); + + let (k, v) = obj.iter().next().unwrap(); + let op = parse_op(k)?; + + let rhs = gql_to_sql_kind(v, fd.kind.clone().unwrap_or_default())?; + + let expr = sql::Expression::Binary { + l: lhs, + o: op, + r: rhs, + }; + + Ok(expr.into()) +} + +macro_rules! either_try_kind { + ($ks:ident, $val:expr, Kind::Array) => { + for arr_kind in $ks.iter().filter(|k| matches!(k, Kind::Array(_, _))).cloned() { + either_try_kind!($ks, $val, arr_kind); + } + }; + ($ks:ident, $val:expr, Array) => { + for arr_kind in $ks.iter().filter(|k| matches!(k, Kind::Array(_, _))).cloned() { + either_try_kind!($ks, $val, arr_kind); + } + }; + ($ks:ident, $val:expr, Record) => { + for arr_kind in $ks.iter().filter(|k| matches!(k, Kind::Array(_, _))).cloned() { + either_try_kind!($ks, $val, arr_kind); + } + }; + ($ks:ident, $val:expr, AllNumbers) => { + either_try_kind!($ks, $val, Kind::Int); + either_try_kind!($ks, $val, Kind::Float); + either_try_kind!($ks, $val, Kind::Decimal); + either_try_kind!($ks, $val, Kind::Number); + }; + ($ks:ident, $val:expr, $kind:expr) => { + if $ks.contains(&$kind) { + if let Ok(out) = gql_to_sql_kind($val, $kind) { + return Ok(out); + } + } + }; +} + +macro_rules! either_try_kinds { + ($ks:ident, $val:expr, $($kind:tt),+) => { + $(either_try_kind!($ks, $val, $kind));+ + }; +} + +macro_rules! any_try_kind { + ($val:expr, $kind:expr) => { + if let Ok(out) = gql_to_sql_kind($val, $kind) { + return Ok(out); + } + }; +} +macro_rules! any_try_kinds { + ($val:expr, $($kind:tt),+) => { + $(any_try_kind!($val, $kind));+ + }; +} + +fn gql_to_sql_kind(val: &GqlValue, kind: Kind) -> Result { + use crate::syn; + match kind { + Kind::Any => match val { + GqlValue::String(s) => { + use Kind::*; + any_try_kinds!(val, Datetime, Duration, Uuid); + syn::value_legacy_strand(s.as_str()).map_err(|_| type_error(kind, val)) + } + GqlValue::Null => Ok(SqlValue::Null), + obj @ GqlValue::Object(_) => gql_to_sql_kind(obj, Kind::Object), + num @ GqlValue::Number(_) => gql_to_sql_kind(num, Kind::Number), + GqlValue::Boolean(b) => Ok(SqlValue::Bool(*b)), + bin @ GqlValue::Binary(_) => gql_to_sql_kind(bin, Kind::Bytes), + GqlValue::Enum(s) => Ok(SqlValue::Strand(s.as_str().into())), + arr @ GqlValue::List(_) => gql_to_sql_kind(arr, Kind::Array(Box::new(Kind::Any), None)), + }, + Kind::Null => match val { + GqlValue::Null => Ok(SqlValue::Null), + _ => Err(type_error(kind, val)), + }, + Kind::Bool => match val { + GqlValue::Boolean(b) => Ok(SqlValue::Bool(*b)), + _ => Err(type_error(kind, val)), + }, + Kind::Bytes => match val { + GqlValue::Binary(b) => Ok(SqlValue::Bytes(b.to_owned().to_vec().into())), + _ => Err(type_error(kind, val)), + }, + Kind::Datetime => match val { + GqlValue::String(s) => match syn::datetime_raw(s) { + Ok(dt) => Ok(dt.into()), + Err(_) => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Decimal => match val { + GqlValue::Number(n) => { + if let Some(int) = n.as_i64() { + Ok(SqlValue::Number(sql::Number::Decimal(int.into()))) + } else if let Some(d) = n.as_f64().and_then(Decimal::from_f64) { + Ok(SqlValue::Number(sql::Number::Decimal(d))) + } else if let Some(uint) = n.as_u64() { + Ok(SqlValue::Number(sql::Number::Decimal(uint.into()))) + } else { + Err(type_error(kind, val)) + } + } + GqlValue::String(s) => match syn::value(s) { + Ok(SqlValue::Number(n)) => match n { + sql::Number::Int(i) => Ok(SqlValue::Number(sql::Number::Decimal(i.into()))), + sql::Number::Float(f) => match Decimal::from_f64(f) { + Some(d) => Ok(SqlValue::Number(sql::Number::Decimal(d))), + None => Err(type_error(kind, val)), + }, + sql::Number::Decimal(d) => Ok(SqlValue::Number(sql::Number::Decimal(d))), + }, + _ => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Duration => match val { + GqlValue::String(s) => match syn::duration(s) { + Ok(d) => Ok(d.into()), + Err(_) => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Float => match val { + GqlValue::Number(n) => { + if let Some(i) = n.as_i64() { + Ok(SqlValue::Number(sql::Number::Float(i as f64))) + } else if let Some(f) = n.as_f64() { + Ok(SqlValue::Number(sql::Number::Float(f))) + } else if let Some(uint) = n.as_u64() { + Ok(SqlValue::Number(sql::Number::Float(uint as f64))) + } else { + unreachable!("serde_json::Number must be either i64, u64 or f64") + } + } + GqlValue::String(s) => match syn::value(s) { + Ok(SqlValue::Number(n)) => match n { + sql::Number::Int(int) => Ok(SqlValue::Number(sql::Number::Float(int as f64))), + sql::Number::Float(float) => Ok(SqlValue::Number(sql::Number::Float(float))), + sql::Number::Decimal(d) => match d.try_into() { + Ok(f) => Ok(SqlValue::Number(sql::Number::Float(f))), + _ => Err(type_error(kind, val)), + }, + }, + _ => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Int => match val { + GqlValue::Number(n) => { + if let Some(i) = n.as_i64() { + Ok(SqlValue::Number(sql::Number::Int(i))) + } else { + Err(type_error(kind, val)) + } + } + GqlValue::String(s) => match syn::value(s) { + Ok(SqlValue::Number(n)) => match n { + sql::Number::Int(int) => Ok(SqlValue::Number(sql::Number::Int(int))), + sql::Number::Float(float) => { + if float.fract() == 0.0 { + Ok(SqlValue::Number(sql::Number::Int(float as i64))) + } else { + Err(type_error(kind, val)) + } + } + sql::Number::Decimal(d) => match d.try_into() { + Ok(i) => Ok(SqlValue::Number(sql::Number::Int(i))), + _ => Err(type_error(kind, val)), + }, + }, + _ => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Number => match val { + GqlValue::Number(n) => { + if let Some(i) = n.as_i64() { + Ok(SqlValue::Number(sql::Number::Int(i))) + } else if let Some(f) = n.as_f64() { + Ok(SqlValue::Number(sql::Number::Float(f))) + } else if let Some(uint) = n.as_u64() { + Ok(SqlValue::Number(sql::Number::Decimal(uint.into()))) + } else { + unreachable!("serde_json::Number must be either i64, u64 or f64") + } + } + GqlValue::String(s) => match syn::value(s) { + Ok(SqlValue::Number(n)) => Ok(SqlValue::Number(n.clone())), + _ => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Object => match val { + GqlValue::Object(o) => { + let out: Result, GqlError> = o + .iter() + .map(|(k, v)| gql_to_sql_kind(v, Kind::Any).map(|sqlv| (k.to_string(), sqlv))) + .collect(); + Ok(SqlValue::Object(out?.into())) + } + GqlValue::String(s) => match syn::value_legacy_strand(s.as_str()) { + Ok(obj @ SqlValue::Object(_)) => Ok(obj), + _ => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Point => match val { + GqlValue::List(l) => match l.as_slice() { + [GqlValue::Number(x), GqlValue::Number(y)] => match (x.as_f64(), y.as_f64()) { + (Some(x), Some(y)) => Ok(SqlValue::Geometry(Geometry::Point((x, y).into()))), + _ => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::String => match val { + GqlValue::String(s) => Ok(SqlValue::Strand(s.to_owned().into())), + GqlValue::Enum(s) => Ok(SqlValue::Strand(s.as_str().into())), + _ => Err(type_error(kind, val)), + }, + Kind::Uuid => match val { + GqlValue::String(s) => match s.parse::() { + Ok(u) => Ok(SqlValue::Uuid(u.into())), + Err(_) => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + Kind::Record(ref ts) => match val { + GqlValue::String(s) => match syn::thing(s) { + Ok(t) => match ts.contains(&t.tb.as_str().into()) { + true => Ok(SqlValue::Thing(t)), + false => Err(type_error(kind, val)), + }, + Err(_) => Err(type_error(kind, val)), + }, + _ => Err(type_error(kind, val)), + }, + // TODO: add geometry + Kind::Geometry(_) => Err(resolver_error("Geometry is not yet supported")), + Kind::Option(k) => match val { + GqlValue::Null => Ok(SqlValue::None), + v => gql_to_sql_kind(v, *k), + }, + // TODO: handle nested eithers + Kind::Either(ref ks) => { + use Kind::*; + + match val { + GqlValue::Null => { + if ks.iter().any(|k| matches!(k, Kind::Option(_))) { + Ok(SqlValue::None) + } else if ks.contains(&Kind::Null) { + Ok(SqlValue::Null) + } else { + Err(type_error(kind, val)) + } + } + num @ GqlValue::Number(_) => { + either_try_kind!(ks, num, AllNumbers); + Err(type_error(kind, val)) + } + string @ GqlValue::String(_) => { + either_try_kinds!( + ks, string, Datetime, Duration, AllNumbers, Object, Uuid, Array, Any, + String + ); + Err(type_error(kind, val)) + } + bool @ GqlValue::Boolean(_) => { + either_try_kind!(ks, bool, Kind::Bool); + Err(type_error(kind, val)) + } + GqlValue::Binary(_) => { + Err(resolver_error("binary input for Either is not yet supported")) + } + GqlValue::Enum(n) => { + either_try_kind!(ks, &GqlValue::String(n.to_string()), Kind::String); + Err(type_error(kind, val)) + } + list @ GqlValue::List(_) => { + either_try_kind!(ks, list, Kind::Array); + Err(type_error(kind, val)) + } + // TODO: consider geometry and other types that can come from objects + obj @ GqlValue::Object(_) => { + either_try_kind!(ks, obj, Object); + Err(type_error(kind, val)) + } + } + } + Kind::Set(_k, _n) => Err(resolver_error("Sets are not yet supported")), + Kind::Array(ref k, n) => match val { + GqlValue::List(l) => { + let list_iter = l.iter().map(|v| gql_to_sql_kind(v, *k.to_owned())); + let list: Result, GqlError> = list_iter.collect(); + + match (list, n) { + (Ok(l), Some(n)) => { + if l.len() as u64 == n { + Ok(l.into()) + } else { + Err(type_error(kind, val)) + } + } + (Ok(l), None) => Ok(l.into()), + (Err(e), _) => Err(e), + } + } + _ => Err(type_error(kind, val)), + }, + Kind::Function(_, _) => Err(resolver_error("Sets are not yet supported")), + } +} diff --git a/core/src/gql/utils.rs b/core/src/gql/utils.rs new file mode 100644 index 00000000..09e28941 --- /dev/null +++ b/core/src/gql/utils.rs @@ -0,0 +1,60 @@ +use async_graphql::{dynamic::indexmap::IndexMap, Name, Value as GqlValue}; +pub(crate) trait GqlValueUtils { + fn as_i64(&self) -> Option; + fn as_string(&self) -> Option; + fn as_list(&self) -> Option<&Vec>; + fn as_object(&self) -> Option<&IndexMap>; +} + +impl GqlValueUtils for GqlValue { + fn as_i64(&self) -> Option { + if let GqlValue::Number(n) = self { + n.as_i64() + } else { + None + } + } + + fn as_string(&self) -> Option { + if let GqlValue::String(s) = self { + Some(s.to_owned()) + } else { + None + } + } + fn as_list(&self) -> Option<&Vec> { + if let GqlValue::List(a) = self { + Some(a) + } else { + None + } + } + fn as_object(&self) -> Option<&IndexMap> { + if let GqlValue::Object(o) = self { + Some(o) + } else { + None + } + } +} + +use crate::dbs::Session; +use crate::kvs::Datastore; +use crate::kvs::LockType; +use crate::kvs::TransactionType; +use crate::sql::{Thing, Value as SqlValue}; + +use super::error::GqlError; + +pub async fn get_record( + kvs: &Datastore, + sess: &Session, + rid: &Thing, +) -> Result { + let tx = kvs.transaction(TransactionType::Read, LockType::Optimistic).await?; + Ok(tx + .get_record(sess.ns.as_ref().unwrap(), sess.db.as_ref().unwrap(), &rid.tb, &rid.id) + .await? + .as_ref() + .to_owned()) +} diff --git a/core/src/lib.rs b/core/src/lib.rs index 44372d1b0..cf2ceb4e 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -24,6 +24,9 @@ pub mod env; pub mod err; #[doc(hidden)] pub mod fflags; +#[cfg(all(not(target_arch = "wasm32"), surrealdb_unstable))] +#[doc(hidden)] +pub mod gql; #[doc(hidden)] pub mod iam; #[doc(hidden)] diff --git a/core/src/rpc/basic_context.rs b/core/src/rpc/basic_context.rs index bb0ac2d1..c2960a91 100644 --- a/core/src/rpc/basic_context.rs +++ b/core/src/rpc/basic_context.rs @@ -1,13 +1,6 @@ use std::collections::BTreeMap; -use crate::{ - dbs::Session, - kvs::Datastore, - rpc::RpcContext, - sql::{Array, Value}, -}; - -use super::{args::Take, Data, RpcError}; +use crate::{dbs::Session, kvs::Datastore, rpc::RpcContext, sql::Value}; #[non_exhaustive] pub struct BasicRpcContext<'a> { @@ -57,39 +50,4 @@ impl RpcContext for BasicRpcContext<'_> { fn version_data(&self) -> impl Into { Value::Strand(self.version_string.clone().into()) } - - // reimplimentaions: - - async fn signup(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Object(v)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - let out: Result = - crate::iam::signup::signup(self.kvs, &mut self.session, v) - .await - .map(Into::into) - .map_err(Into::into); - - out - } - - async fn signin(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Object(v)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - let out: Result = - crate::iam::signin::signin(self.kvs, &mut self.session, v) - .await - .map(Into::into) - .map_err(Into::into); - out - } - - async fn authenticate(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Strand(token)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - crate::iam::verify::token(self.kvs, &mut self.session, &token.0).await?; - Ok(Value::None) - } } diff --git a/core/src/rpc/method.rs b/core/src/rpc/method.rs index 52c558a4..a7263d47 100644 --- a/core/src/rpc/method.rs +++ b/core/src/rpc/method.rs @@ -24,6 +24,7 @@ pub enum Method { Query, Relate, Run, + GraphQL, } impl Method { @@ -55,6 +56,7 @@ impl Method { "query" => Self::Query, "relate" => Self::Relate, "run" => Self::Run, + "graphql" => Self::GraphQL, _ => Self::Unknown, } } @@ -87,6 +89,7 @@ impl Method { Self::Query => "query", Self::Relate => "relate", Self::Run => "run", + Self::GraphQL => "graphql", } } } @@ -111,7 +114,8 @@ impl Method { | Method::Merge | Method::Patch | Method::Delete | Method::Version | Method::Query | Method::Relate - | Method::Run | Method::Unknown + | Method::Run | Method::GraphQL + | Method::Unknown ) } } diff --git a/core/src/rpc/rpc_context.rs b/core/src/rpc/rpc_context.rs index 435099fb..e1b49c83 100644 --- a/core/src/rpc/rpc_context.rs +++ b/core/src/rpc/rpc_context.rs @@ -1,7 +1,11 @@ -use std::collections::BTreeMap; +use std::{collections::BTreeMap, mem}; +#[cfg(all(not(target_arch = "wasm32"), surrealdb_unstable))] +use async_graphql::BatchRequest; use uuid::Uuid; +#[cfg(all(not(target_arch = "wasm32"), surrealdb_unstable))] +use crate::gql::SchemaCache; use crate::{ dbs::{QueryType, Response, Session}, kvs::Datastore, @@ -22,10 +26,18 @@ pub trait RpcContext { const LQ_SUPPORT: bool = false; fn handle_live(&self, _lqid: &Uuid) -> impl std::future::Future + Send { - async { unreachable!() } + async { unimplemented!("handle functions must be redefined if LQ_SUPPORT = true") } } fn handle_kill(&self, _lqid: &Uuid) -> impl std::future::Future + Send { - async { unreachable!() } + async { unimplemented!("handle functions must be redefined if LQ_SUPPORT = true") } + } + + #[cfg(all(not(target_arch = "wasm32"), surrealdb_unstable))] + const GQL_SUPPORT: bool = false; + + #[cfg(all(not(target_arch = "wasm32"), surrealdb_unstable))] + fn graphql_schema_cache(&self) -> &SchemaCache { + unimplemented!("graphql_schema_cache must be implemented if GQL_SUPPORT = true") } async fn execute(&mut self, method: Method, params: Array) -> Result { @@ -55,6 +67,7 @@ pub trait RpcContext { Method::Query => self.query(params).await.map(Into::into).map_err(Into::into), Method::Relate => self.relate(params).await.map(Into::into).map_err(Into::into), Method::Run => self.run(params).await.map(Into::into).map_err(Into::into), + Method::GraphQL => self.graphql(params).await.map(Into::into).map_err(Into::into), Method::Unknown => Err(RpcError::MethodNotFound), } } @@ -75,6 +88,7 @@ pub trait RpcContext { Method::Query => self.query(params).await.map(Into::into).map_err(Into::into), Method::Relate => self.relate(params).await.map(Into::into).map_err(Into::into), Method::Run => self.run(params).await.map(Into::into).map_err(Into::into), + Method::GraphQL => self.graphql(params).await.map(Into::into).map_err(Into::into), Method::Unknown => Err(RpcError::MethodNotFound), _ => Err(RpcError::MethodNotFound), } @@ -116,12 +130,14 @@ pub trait RpcContext { let Ok(Value::Object(v)) = params.needs_one() else { return Err(RpcError::InvalidParams); }; - let mut tmp_session = self.session().clone(); + let mut tmp_session = mem::take(self.session_mut()); + let out: Result = crate::iam::signup::signup(self.kvs(), &mut tmp_session, v) .await .map(Into::into) .map_err(Into::into); + *self.session_mut() = tmp_session; out @@ -131,7 +147,7 @@ pub trait RpcContext { let Ok(Value::Object(v)) = params.needs_one() else { return Err(RpcError::InvalidParams); }; - let mut tmp_session = self.session().clone(); + let mut tmp_session = mem::take(self.session_mut()); let out: Result = crate::iam::signin::signin(self.kvs(), &mut tmp_session, v) .await @@ -150,7 +166,7 @@ pub trait RpcContext { let Ok(Value::Strand(token)) = params.needs_one() else { return Err(RpcError::InvalidParams); }; - let mut tmp_session = self.session().clone(); + let mut tmp_session = mem::take(self.session_mut()); crate::iam::verify::token(self.kvs(), &mut tmp_session, &token.0).await?; *self.session_mut() = tmp_session; Ok(Value::None) @@ -603,6 +619,119 @@ pub trait RpcContext { res.remove(0).result.map_err(Into::into) } + // ------------------------------ + // Methods for querying with GraphQL + // ------------------------------ + + #[cfg(any(target_arch = "wasm32", not(surrealdb_unstable)))] + async fn graphql(&self, _params: Array) -> Result, RpcError> { + Result::::Err(RpcError::MethodNotFound) + } + + #[cfg(all(not(target_arch = "wasm32"), surrealdb_unstable))] + async fn graphql(&self, params: Array) -> Result, RpcError> { + if !*GRAPHQL_ENABLE { + return Err(RpcError::BadGQLConfig); + } + + use serde::Serialize; + + use crate::{cnf::GRAPHQL_ENABLE, gql}; + + if !Self::GQL_SUPPORT { + return Err(RpcError::BadGQLConfig); + } + + let Ok((query, options)) = params.needs_one_or_two() else { + return Err(RpcError::InvalidParams); + }; + + enum GraphQLFormat { + Json, + Cbor, + } + + let mut pretty = false; + let mut format = GraphQLFormat::Json; + match options { + Value::Object(o) => { + for (k, v) in o { + match (k.as_str(), v) { + ("pretty", Value::Bool(b)) => pretty = b, + ("format", Value::Strand(s)) => match s.as_str() { + "json" => format = GraphQLFormat::Json, + "cbor" => format = GraphQLFormat::Cbor, + _ => return Err(RpcError::InvalidParams), + }, + _ => return Err(RpcError::InvalidParams), + } + } + } + _ => return Err(RpcError::InvalidParams), + } + + let req = match query { + Value::Strand(s) => match format { + GraphQLFormat::Json => { + let tmp: BatchRequest = + serde_json::from_str(s.as_str()).map_err(|_| RpcError::ParseError)?; + tmp.into_single().map_err(|_| RpcError::ParseError)? + } + GraphQLFormat::Cbor => { + return Err(RpcError::Thrown("Cbor is not yet supported".to_string())) + } + }, + Value::Object(mut o) => { + let mut tmp = match o.remove("query") { + Some(Value::Strand(s)) => async_graphql::Request::new(s), + _ => return Err(RpcError::InvalidParams), + }; + + match o.remove("variables").or(o.remove("vars")) { + Some(obj @ Value::Object(_)) => { + let gql_vars = gql::schema::sql_value_to_gql_value(obj) + .map_err(|_| RpcError::InvalidRequest)?; + + tmp = tmp.variables(async_graphql::Variables::from_value(gql_vars)); + } + Some(_) => return Err(RpcError::InvalidParams), + None => {} + } + + match o.remove("operationName").or(o.remove("operation")) { + Some(Value::Strand(s)) => tmp = tmp.operation_name(s), + Some(_) => return Err(RpcError::InvalidParams), + None => {} + } + + tmp + } + _ => return Err(RpcError::InvalidParams), + }; + + let schema = self + .graphql_schema_cache() + .get_schema(self.session()) + .await + .map_err(|e| RpcError::Thrown(e.to_string()))?; + + let res = schema.execute(req).await; + + let out = match pretty { + true => { + let mut buf = Vec::new(); + let formatter = serde_json::ser::PrettyFormatter::with_indent(b" "); + let mut ser = serde_json::Serializer::with_formatter(&mut buf, formatter); + + res.serialize(&mut ser).ok().and_then(|_| String::from_utf8(buf).ok()) + } + false => serde_json::to_string(&res).ok(), + } + .ok_or(RpcError::Thrown("Serialization Error".to_string()))?; + + Ok(Value::Strand(out.into())) + } + // ------------------------------ // Private methods // ------------------------------ diff --git a/core/src/rpc/rpc_error.rs b/core/src/rpc/rpc_error.rs index 9c24e6a5..07ff6e75 100644 --- a/core/src/rpc/rpc_error.rs +++ b/core/src/rpc/rpc_error.rs @@ -17,8 +17,10 @@ pub enum RpcError { InternalError(err::Error), #[error("Live Query was made, but is not supported")] LqNotSuported, - #[error("RT is enabled for the session, but LQ is not supported with the context")] + #[error("RT is enabled for the session, but LQ is not supported by the context")] BadLQConfig, + #[error("A GraphQL request was made, but GraphQL is not supported by the context")] + BadGQLConfig, #[error("Error: {0}")] Thrown(String), } diff --git a/core/src/sql/query.rs b/core/src/sql/query.rs index 9a60d5aa..28e5a10a 100644 --- a/core/src/sql/query.rs +++ b/core/src/sql/query.rs @@ -36,6 +36,12 @@ impl From for Query { } } +impl From> for Query { + fn from(s: Vec) -> Self { + Query(Statements(s)) + } +} + impl Deref for Query { type Target = Vec; fn deref(&self) -> &Self::Target { diff --git a/core/src/sql/value/value.rs b/core/src/sql/value/value.rs index 9e7707b0..e5f6ce95 100644 --- a/core/src/sql/value/value.rs +++ b/core/src/sql/value/value.rs @@ -38,6 +38,15 @@ pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Value"; #[non_exhaustive] pub struct Values(pub Vec); +impl From for Values +where + V: Into>, +{ + fn from(value: V) -> Self { + Self(value.into()) + } +} + impl Deref for Values { type Target = Vec; fn deref(&self) -> &Self::Target { diff --git a/deny.toml b/deny.toml index 844fae91..f25c4fb0 100644 --- a/deny.toml +++ b/deny.toml @@ -85,6 +85,7 @@ allow = [ "Apache-2.0", "BSD-2-Clause", "BSD-3-Clause", + "BSD-2-Clause", "Unlicense", ] # The confidence threshold for detecting a license from license text. diff --git a/lib/Cargo.toml b/lib/Cargo.toml index 59d7560b..4e34e300 100644 --- a/lib/Cargo.toml +++ b/lib/Cargo.toml @@ -108,6 +108,7 @@ trice = { version = "0.4.0", optional = true } url = "2.5.0" reblessive = { version = "0.4.0", features = ["tree"] } serde-content = "0.1.0" +arrayvec = "=0.7.4" [dev-dependencies] ciborium = "0.2.1" @@ -155,6 +156,9 @@ tokio = { version = "1.34.0", default-features = false, features = [ tokio-tungstenite = { version = "0.23.1", optional = true, features = ["url"] } uuid = { version = "1.6.1", features = ["serde", "v4", "v7"] } +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(surrealdb_unstable)'] } + [lib] name = "surrealdb" # Needed for the nightly crate as we will be renaming it bench = false diff --git a/src/cnf/mod.rs b/src/cnf/mod.rs index ee6605ca..553d4d1c 100644 --- a/src/cnf/mod.rs +++ b/src/cnf/mod.rs @@ -66,3 +66,6 @@ pub static PKG_VERSION: Lazy = Lazy::new(|| match option_env!("SURREAL_B } _ => env!("CARGO_PKG_VERSION").to_owned(), }); + +pub static GRAPHQL_ENABLE: Lazy = + lazy_env_parse!("SURREAL_EXPERIMENTAL_GRAPHQL", bool, false); diff --git a/src/gql/mod.rs b/src/gql/mod.rs new file mode 100644 index 00000000..88269777 --- /dev/null +++ b/src/gql/mod.rs @@ -0,0 +1,134 @@ +use std::{ + convert::Infallible, + sync::Arc, + task::{Context, Poll}, + time::Duration, +}; + +use async_graphql::{ + http::{create_multipart_mixed_stream, is_accept_multipart_mixed}, + Executor, ParseRequestError, +}; +use async_graphql_axum::{ + rejection::GraphQLRejection, GraphQLBatchRequest, GraphQLRequest, GraphQLResponse, +}; +use axum::{ + body::{Body, HttpBody}, + extract::FromRequest, + http::{Request as HttpRequest, Response as HttpResponse}, + response::IntoResponse, + BoxError, +}; +use bytes::Bytes; +use futures_util::{future::BoxFuture, StreamExt}; +use surrealdb::dbs::Session; +use surrealdb::gql::cache::{Invalidator, SchemaCache}; +use surrealdb::gql::error::resolver_error; +use surrealdb::kvs::Datastore; +use tower_service::Service; + +/// A GraphQL service. +#[derive(Clone)] +pub struct GraphQL { + cache: SchemaCache, + // datastore: Arc, +} + +impl GraphQL { + /// Create a GraphQL handler. + pub fn new(invalidator: I, datastore: Arc) -> Self { + let _ = invalidator; + GraphQL { + cache: SchemaCache::new(datastore), + // datastore, + } + } +} + +impl Service> for GraphQL +where + B: HttpBody + Send + 'static, + B::Data: Into, + B::Error: Into, + I: Invalidator, +{ + type Response = HttpResponse; + type Error = Infallible; + type Future = BoxFuture<'static, Result>; + + fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, req: HttpRequest) -> Self::Future { + let cache = self.cache.clone(); + let req = req.map(Body::new); + + Box::pin(async move { + let session = + req.extensions().get::().expect("session extractor should always succeed"); + + let Some(_ns) = session.ns.as_ref() else { + return Ok(to_rejection(resolver_error("No namespace specified")).into_response()); + }; + let Some(_db) = session.db.as_ref() else { + return Ok(to_rejection(resolver_error("No database specified")).into_response()); + }; + + #[cfg(debug_assertions)] + let state = req.extensions() + .get::() + .expect("state extractor should always succeed"); + debug_assert!(Arc::ptr_eq(&state.datastore, &cache.datastore)); + + let executor = match cache.get_schema(session).await { + Ok(e) => e, + Err(e) => { + info!(?e, "error generating schema"); + return Ok(to_rejection(e).into_response()); + } + }; + + let is_accept_multipart_mixed = req + .headers() + .get("accept") + .and_then(|value| value.to_str().ok()) + .map(is_accept_multipart_mixed) + .unwrap_or_default(); + + if is_accept_multipart_mixed { + let req = match GraphQLRequest::::from_request(req, &()).await { + Ok(req) => req, + Err(err) => return Ok(err.into_response()), + }; + + let stream = Executor::execute_stream(&executor, req.0, None); + let body = Body::from_stream( + create_multipart_mixed_stream( + stream, + tokio_stream::wrappers::IntervalStream::new(tokio::time::interval( + Duration::from_secs(30), + )) + .map(|_| ()), + ) + .map(Ok::<_, std::io::Error>), + ); + Ok(HttpResponse::builder() + .header("content-type", "multipart/mixed; boundary=graphql") + .body(body) + .expect("BUG: invalid response")) + } else { + let req = + match GraphQLBatchRequest::::from_request(req, &()).await { + Ok(req) => req, + Err(err) => return Ok(err.into_response()), + }; + Ok(GraphQLResponse(executor.execute_batch(req.0).await).into_response()) + } + }) + } +} + +fn to_rejection(err: impl std::error::Error + Send + Sync + 'static) -> GraphQLRejection { + GraphQLRejection(ParseRequestError::InvalidRequest(Box::new(err))) +} diff --git a/src/main.rs b/src/main.rs index a800838b..695a6804 100644 --- a/src/main.rs +++ b/src/main.rs @@ -21,6 +21,8 @@ mod cnf; mod dbs; mod env; mod err; +#[cfg(surrealdb_unstable)] +mod gql; mod mem; mod net; mod rpc; diff --git a/src/net/gql.rs b/src/net/gql.rs new file mode 100644 index 00000000..54fb1496 --- /dev/null +++ b/src/net/gql.rs @@ -0,0 +1,17 @@ +use axum::Router; +use std::sync::Arc; + +use axum::routing::post_service; + +use surrealdb::gql::cache::Pessimistic; +use surrealdb::kvs::Datastore; + +use crate::gql::GraphQL; + +pub(super) async fn router(ds: Arc) -> Router +where + S: Clone + Send + Sync + 'static, +{ + let service = GraphQL::new(Pessimistic, ds); + Router::new().route("/graphql", post_service(service)) +} diff --git a/src/net/mod.rs b/src/net/mod.rs index 8667f487..73d29a9e 100644 --- a/src/net/mod.rs +++ b/src/net/mod.rs @@ -1,6 +1,8 @@ mod auth; pub mod client_ip; mod export; +#[cfg(surrealdb_unstable)] +mod gql; pub(crate) mod headers; mod health; mod import; @@ -21,7 +23,7 @@ mod version; mod ml; use crate::cli::CF; -use crate::cnf; +use crate::cnf::{self, GRAPHQL_ENABLE}; use crate::err::Error; use crate::net::signals::graceful_shutdown; use crate::rpc::{notifications, RpcState}; @@ -59,9 +61,9 @@ const LOG: &str = "surrealdb::net"; /// AppState is used to share data between routes. /// #[derive(Clone)] -struct AppState { - client_ip: client_ip::ClientIp, - datastore: Arc, +pub struct AppState { + pub client_ip: client_ip::ClientIp, + pub datastore: Arc, } pub async fn init(ds: Arc, ct: CancellationToken) -> Result<(), Error> { @@ -173,6 +175,21 @@ pub async fn init(ds: Arc, ct: CancellationToken) -> Result<(), Error .merge(signup::router()) .merge(key::router()); + let axum_app = if *GRAPHQL_ENABLE { + #[cfg(surrealdb_unstable)] + { + warn!("❌🔒IMPORTANT: GraphQL is a pre-release feature with known security flaws. This is not recommended for production use.🔒❌"); + axum_app.merge(gql::router(ds.clone()).await) + } + #[cfg(not(surrealdb_unstable))] + { + warn!("GraphQL is a pre-release feature and only available on builds with the surrealdb_unstable flag"); + axum_app + } + } else { + axum_app + }; + #[cfg(feature = "ml")] let axum_app = axum_app.merge(ml::router()); diff --git a/src/rpc/connection.rs b/src/rpc/connection.rs index addbc856..5b00a715 100644 --- a/src/rpc/connection.rs +++ b/src/rpc/connection.rs @@ -17,12 +17,13 @@ use std::collections::BTreeMap; use std::sync::Arc; use surrealdb::channel::{self, Receiver, Sender}; use surrealdb::dbs::Session; +#[cfg(surrealdb_unstable)] +use surrealdb::gql::{Pessimistic, SchemaCache}; use surrealdb::kvs::Datastore; -use surrealdb::rpc::args::Take; use surrealdb::rpc::format::Format; use surrealdb::rpc::method::Method; +use surrealdb::rpc::Data; use surrealdb::rpc::RpcContext; -use surrealdb::rpc::{Data, RpcError}; use surrealdb::sql::Array; use surrealdb::sql::Value; use tokio::sync::{RwLock, Semaphore}; @@ -44,6 +45,8 @@ pub struct Connection { pub(crate) channels: (Sender, Receiver), pub(crate) state: Arc, pub(crate) datastore: Arc, + #[cfg(surrealdb_unstable)] + pub(crate) gql_schema: SchemaCache, } impl Connection { @@ -67,6 +70,8 @@ impl Connection { canceller: CancellationToken::new(), channels: channel::bounded(*WEBSOCKET_MAX_CONCURRENT_REQUESTS), state, + #[cfg(surrealdb_unstable)] + gql_schema: SchemaCache::new(datastore.clone()), datastore, })) } @@ -407,38 +412,10 @@ impl RpcContext for Connection { } } - // reimplimentaions - - async fn signup(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Object(v)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - let out: Result = - surrealdb::iam::signup::signup(&self.datastore, &mut self.session, v) - .await - .map(Into::into) - .map_err(Into::into); - - out - } - - async fn signin(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Object(v)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - let out: Result = - surrealdb::iam::signin::signin(&self.datastore, &mut self.session, v) - .await - .map(Into::into) - .map_err(Into::into); - out - } - - async fn authenticate(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Strand(token)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - surrealdb::iam::verify::token(&self.datastore, &mut self.session, &token.0).await?; - Ok(Value::None) + #[cfg(surrealdb_unstable)] + const GQL_SUPPORT: bool = true; + #[cfg(surrealdb_unstable)] + fn graphql_schema_cache(&self) -> &SchemaCache { + &self.gql_schema } } diff --git a/src/rpc/post_context.rs b/src/rpc/post_context.rs index 1cb62b3f..6bc19295 100644 --- a/src/rpc/post_context.rs +++ b/src/rpc/post_context.rs @@ -1,34 +1,40 @@ use std::collections::BTreeMap; +use std::sync::Arc; use crate::cnf::{PKG_NAME, PKG_VERSION}; use surrealdb::dbs::Session; +#[cfg(surrealdb_unstable)] +use surrealdb::gql::{Pessimistic, SchemaCache}; use surrealdb::kvs::Datastore; -use surrealdb::rpc::args::Take; use surrealdb::rpc::Data; use surrealdb::rpc::RpcContext; use surrealdb::rpc::RpcError; use surrealdb::sql::Array; use surrealdb::sql::Value; -pub struct PostRpcContext<'a> { - pub kvs: &'a Datastore, +pub struct PostRpcContext { + pub kvs: Arc, pub session: Session, pub vars: BTreeMap, + #[cfg(surrealdb_unstable)] + pub gql_schema: SchemaCache, } -impl<'a> PostRpcContext<'a> { - pub fn new(kvs: &'a Datastore, session: Session, vars: BTreeMap) -> Self { +impl PostRpcContext { + pub fn new(kvs: &Arc, session: Session, vars: BTreeMap) -> Self { Self { - kvs, + kvs: kvs.clone(), session, vars, + #[cfg(surrealdb_unstable)] + gql_schema: SchemaCache::new(kvs.clone()), } } } -impl RpcContext for PostRpcContext<'_> { +impl RpcContext for PostRpcContext { fn kvs(&self) -> &Datastore { - self.kvs + &self.kvs } fn session(&self) -> &Session { @@ -52,6 +58,13 @@ impl RpcContext for PostRpcContext<'_> { val } + #[cfg(surrealdb_unstable)] + const GQL_SUPPORT: bool = true; + #[cfg(surrealdb_unstable)] + fn graphql_schema_cache(&self) -> &SchemaCache { + &self.gql_schema + } + // disable: // doesn't do anything so shouldn't be supported @@ -65,39 +78,4 @@ impl RpcContext for PostRpcContext<'_> { let out: Result = Err(RpcError::MethodNotFound); out } - - // reimplimentaions: - - async fn signup(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Object(v)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - let out: Result = - surrealdb::iam::signup::signup(self.kvs, &mut self.session, v) - .await - .map(Into::into) - .map_err(Into::into); - - out - } - - async fn signin(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Object(v)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - let out: Result = - surrealdb::iam::signin::signin(self.kvs, &mut self.session, v) - .await - .map(Into::into) - .map_err(Into::into); - out - } - - async fn authenticate(&mut self, params: Array) -> Result, RpcError> { - let Ok(Value::Strand(token)) = params.needs_one() else { - return Err(RpcError::InvalidParams); - }; - surrealdb::iam::verify::token(self.kvs, &mut self.session, &token.0).await?; - Ok(Value::None) - } } diff --git a/supply-chain/audits.toml b/supply-chain/audits.toml index 9132f2c0..e332a53f 100644 --- a/supply-chain/audits.toml +++ b/supply-chain/audits.toml @@ -22,6 +22,18 @@ user-id = 3987 # Rushmore Mushambi (rushmorem) start = "2021-02-25" end = "2025-01-24" +[[trusted.anyhow]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-10-05" +end = "2025-08-06" + +[[trusted.async-trait]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-07-23" +end = "2025-08-06" + [[trusted.dmp]] criteria = "safe-to-deploy" user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) @@ -118,12 +130,36 @@ user-id = 359 # Sean McArthur (seanmonstar) start = "2019-06-10" end = "2025-08-06" +[[trusted.paste]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-03-19" +end = "2025-08-06" + +[[trusted.prettyplease]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2022-01-04" +end = "2025-08-06" + [[trusted.psl-types]] criteria = "safe-to-deploy" user-id = 3987 # Rushmore Mushambi (rushmorem) start = "2021-03-12" end = "2025-01-24" +[[trusted.ref-cast]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-05-05" +end = "2025-08-06" + +[[trusted.ref-cast-impl]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-05-05" +end = "2025-08-06" + [[trusted.reqwest]] criteria = "safe-to-deploy" user-id = 359 # Sean McArthur (seanmonstar) @@ -142,6 +178,42 @@ user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) start = "2023-08-09" end = "2025-01-24" +[[trusted.ryu]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-05-02" +end = "2025-08-06" + +[[trusted.serde]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-03-01" +end = "2025-08-18" + +[[trusted.serde_bytes]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-02-25" +end = "2025-08-06" + +[[trusted.serde_derive]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-03-01" +end = "2025-08-18" + +[[trusted.serde_json]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-02-28" +end = "2025-08-06" + +[[trusted.serde_path_to_error]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-08-20" +end = "2025-08-06" + [[trusted.storekey]] criteria = "safe-to-deploy" user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) @@ -154,6 +226,12 @@ user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) start = "2022-07-19" end = "2025-01-24" +[[trusted.surrealdb-async-graphql-axum]] +criteria = "safe-to-deploy" +user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) +start = "2024-08-07" +end = "2025-08-07" + [[trusted.surrealdb-core]] criteria = "safe-to-deploy" user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) @@ -166,6 +244,12 @@ user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) start = "2022-02-26" end = "2025-01-24" +[[trusted.surrealdb-jsonwebtoken]] +criteria = "safe-to-deploy" +user-id = 3987 # Rushmore Mushambi (rushmorem) +start = "2023-08-29" +end = "2025-01-24" + [[trusted.surrealdb-tikv-client]] criteria = "safe-to-deploy" user-id = 217605 # Yusuke Kuoka (mumoshu) @@ -184,6 +268,12 @@ user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) start = "2023-10-31" end = "2025-01-24" +[[trusted.syn]] +criteria = "safe-to-deploy" +user-id = 3618 # David Tolnay (dtolnay) +start = "2019-03-01" +end = "2025-08-06" + [[trusted.trice]] criteria = "safe-to-deploy" user-id = 145457 # Tobie Morgan Hitchcock (tobiemh) diff --git a/supply-chain/config.toml b/supply-chain/config.toml index d71c8dbf..8a0ef2dc 100644 --- a/supply-chain/config.toml +++ b/supply-chain/config.toml @@ -31,6 +31,10 @@ audit-as-crates-io = true [policy.surrealdb-core] audit-as-crates-io = true +[[exemptions.Inflector]] +version = "0.11.4" +criteria = "safe-to-deploy" + [[exemptions.actix-codec]] version = "0.5.2" criteria = "safe-to-deploy" @@ -131,10 +135,6 @@ criteria = "safe-to-deploy" version = "0.3.2" criteria = "safe-to-deploy" -[[exemptions.anyhow]] -version = "1.0.81" -criteria = "safe-to-deploy" - [[exemptions.approx]] version = "0.4.0" criteria = "safe-to-deploy" @@ -159,6 +159,10 @@ criteria = "safe-to-deploy" version = "3.0.0" criteria = "safe-to-deploy" +[[exemptions.ascii_utils]] +version = "0.9.3" +criteria = "safe-to-deploy" + [[exemptions.assert_fs]] version = "1.1.1" criteria = "safe-to-run" @@ -179,6 +183,22 @@ criteria = "safe-to-deploy" version = "1.9.1" criteria = "safe-to-deploy" +[[exemptions.async-graphql]] +version = "7.0.7" +criteria = "safe-to-deploy" + +[[exemptions.async-graphql-derive]] +version = "7.0.7" +criteria = "safe-to-deploy" + +[[exemptions.async-graphql-parser]] +version = "7.0.7" +criteria = "safe-to-deploy" + +[[exemptions.async-graphql-value]] +version = "7.0.7" +criteria = "safe-to-deploy" + [[exemptions.async-lock]] version = "3.3.0" criteria = "safe-to-deploy" @@ -195,10 +215,6 @@ criteria = "safe-to-deploy" version = "4.7.0" criteria = "safe-to-deploy" -[[exemptions.async-trait]] -version = "0.1.79" -criteria = "safe-to-deploy" - [[exemptions.async_io_stream]] version = "0.3.3" criteria = "safe-to-deploy" @@ -328,7 +344,7 @@ version = "1.5.0" criteria = "safe-to-deploy" [[exemptions.bytes]] -version = "1.6.0" +version = "1.7.1" criteria = "safe-to-deploy" [[exemptions.bytestring]] @@ -464,15 +480,15 @@ version = "0.8.19" criteria = "safe-to-deploy" [[exemptions.darling]] -version = "0.20.8" +version = "0.20.9" criteria = "safe-to-deploy" [[exemptions.darling_core]] -version = "0.20.8" +version = "0.20.9" criteria = "safe-to-deploy" [[exemptions.darling_macro]] -version = "0.20.8" +version = "0.20.9" criteria = "safe-to-deploy" [[exemptions.dashmap]] @@ -575,6 +591,10 @@ criteria = "safe-to-deploy" version = "0.4.0" criteria = "safe-to-deploy" +[[exemptions.fast_chemail]] +version = "0.9.6" +criteria = "safe-to-deploy" + [[exemptions.figment]] version = "0.10.15" criteria = "safe-to-deploy" @@ -700,7 +720,11 @@ version = "0.9.1" criteria = "safe-to-run" [[exemptions.half]] -version = "2.4.0" +version = "2.3.1" +criteria = "safe-to-deploy" + +[[exemptions.handlebars]] +version = "5.1.2" criteria = "safe-to-deploy" [[exemptions.hash32]] @@ -955,6 +979,10 @@ criteria = "safe-to-deploy" version = "2.1.0" criteria = "safe-to-deploy" +[[exemptions.multer]] +version = "3.1.0" +criteria = "safe-to-deploy" + [[exemptions.multimap]] version = "0.8.3" criteria = "safe-to-deploy" @@ -1059,10 +1087,6 @@ criteria = "safe-to-deploy" version = "0.5.0" criteria = "safe-to-deploy" -[[exemptions.paste]] -version = "1.0.14" -criteria = "safe-to-deploy" - [[exemptions.path-clean]] version = "1.0.1" criteria = "safe-to-deploy" @@ -1083,6 +1107,22 @@ criteria = "safe-to-deploy" version = "3.0.3" criteria = "safe-to-deploy" +[[exemptions.pest]] +version = "2.7.11" +criteria = "safe-to-deploy" + +[[exemptions.pest_derive]] +version = "2.7.11" +criteria = "safe-to-deploy" + +[[exemptions.pest_generator]] +version = "2.7.11" +criteria = "safe-to-deploy" + +[[exemptions.pest_meta]] +version = "2.7.11" +criteria = "safe-to-deploy" + [[exemptions.petgraph]] version = "0.6.4" criteria = "safe-to-deploy" @@ -1147,10 +1187,6 @@ criteria = "safe-to-deploy" version = "3.1.0" criteria = "safe-to-run" -[[exemptions.prettyplease]] -version = "0.2.17" -criteria = "safe-to-deploy" - [[exemptions.proc-macro-crate]] version = "1.3.1" criteria = "safe-to-deploy" @@ -1279,14 +1315,6 @@ criteria = "safe-to-deploy" version = "0.4.5" criteria = "safe-to-deploy" -[[exemptions.ref-cast]] -version = "1.0.22" -criteria = "safe-to-deploy" - -[[exemptions.ref-cast-impl]] -version = "1.0.22" -criteria = "safe-to-deploy" - [[exemptions.regex]] version = "1.10.4" criteria = "safe-to-deploy" @@ -1455,10 +1483,6 @@ criteria = "safe-to-deploy" version = "0.9.0" criteria = "safe-to-deploy" -[[exemptions.ryu]] -version = "1.0.17" -criteria = "safe-to-deploy" - [[exemptions.salsa20]] version = "0.10.2" criteria = "safe-to-deploy" @@ -1507,22 +1531,10 @@ criteria = "safe-to-deploy" version = "0.1.0" criteria = "safe-to-deploy" -[[exemptions.serde_bytes]] -version = "0.11.15" -criteria = "safe-to-deploy" - [[exemptions.serde_html_form]] version = "0.2.6" criteria = "safe-to-deploy" -[[exemptions.serde_json]] -version = "1.0.115" -criteria = "safe-to-deploy" - -[[exemptions.serde_path_to_error]] -version = "0.1.16" -criteria = "safe-to-deploy" - [[exemptions.serde_spanned]] version = "0.6.5" criteria = "safe-to-deploy" @@ -1619,6 +1631,10 @@ criteria = "safe-to-deploy" version = "0.6.0" criteria = "safe-to-deploy" +[[exemptions.static_assertions_next]] +version = "1.1.2" +criteria = "safe-to-deploy" + [[exemptions.str-buf]] version = "1.0.6" criteria = "safe-to-deploy" @@ -1639,6 +1655,14 @@ criteria = "safe-to-deploy" version = "0.11.1" criteria = "safe-to-deploy" +[[exemptions.strum]] +version = "0.26.3" +criteria = "safe-to-deploy" + +[[exemptions.strum_macros]] +version = "0.26.4" +criteria = "safe-to-deploy" + [[exemptions.symbolic-common]] version = "12.8.0" criteria = "safe-to-deploy" @@ -1647,14 +1671,6 @@ criteria = "safe-to-deploy" version = "12.8.0" criteria = "safe-to-deploy" -[[exemptions.syn]] -version = "1.0.109" -criteria = "safe-to-deploy" - -[[exemptions.syn]] -version = "2.0.58" -criteria = "safe-to-deploy" - [[exemptions.syn_derive]] version = "0.1.8" criteria = "safe-to-deploy" @@ -1831,6 +1847,10 @@ criteria = "safe-to-deploy" version = "0.10.4" criteria = "safe-to-deploy" +[[exemptions.ucd-trie]] +version = "0.1.6" +criteria = "safe-to-deploy" + [[exemptions.ulid]] version = "1.1.2" criteria = "safe-to-deploy" diff --git a/supply-chain/imports.lock b/supply-chain/imports.lock index 2db9d57b..3fcbced7 100644 --- a/supply-chain/imports.lock +++ b/supply-chain/imports.lock @@ -16,6 +16,13 @@ user-id = 3987 user-login = "rushmorem" user-name = "Rushmore Mushambi" +[[publisher.anyhow]] +version = "1.0.81" +when = "2024-03-12" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + [[publisher.arbitrary]] version = "1.3.2" when = "2023-10-30" @@ -23,6 +30,13 @@ user-id = 696 user-login = "fitzgen" user-name = "Nick Fitzgerald" +[[publisher.async-trait]] +version = "0.1.81" +when = "2024-07-07" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + [[publisher.bumpalo]] version = "3.15.4" when = "2024-03-07" @@ -198,6 +212,20 @@ user-id = 359 user-login = "seanmonstar" user-name = "Sean McArthur" +[[publisher.paste]] +version = "1.0.14" +when = "2023-07-15" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.prettyplease]] +version = "0.2.17" +when = "2024-03-25" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + [[publisher.psl-types]] version = "2.0.11" when = "2022-08-10" @@ -205,6 +233,20 @@ user-id = 3987 user-login = "rushmorem" user-name = "Rushmore Mushambi" +[[publisher.ref-cast]] +version = "1.0.22" +when = "2024-01-02" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.ref-cast-impl]] +version = "1.0.22" +when = "2024-01-02" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + [[publisher.reqwest]] version = "0.12.5" when = "2024-06-17" @@ -240,6 +282,55 @@ user-id = 145457 user-login = "tobiemh" user-name = "Tobie Morgan Hitchcock" +[[publisher.ryu]] +version = "1.0.17" +when = "2024-02-19" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.serde]] +version = "1.0.208" +when = "2024-08-15" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.serde_bytes]] +version = "0.11.15" +when = "2024-06-25" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.serde_derive]] +version = "1.0.208" +when = "2024-08-15" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.serde_json]] +version = "1.0.122" +when = "2024-08-01" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.serde_json]] +version = "1.0.125" +when = "2024-08-15" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.serde_path_to_error]] +version = "0.1.16" +when = "2024-03-09" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + [[publisher.storekey]] version = "0.5.0" when = "2023-04-28" @@ -254,6 +345,13 @@ user-id = 145457 user-login = "tobiemh" user-name = "Tobie Morgan Hitchcock" +[[publisher.surrealdb-async-graphql-axum]] +version = "7.0.7-surrealdb.1" +when = "2024-08-07" +user-id = 145457 +user-login = "tobiemh" +user-name = "Tobie Morgan Hitchcock" + [[publisher.surrealdb-core]] version = "2.0.0-alpha.2" when = "2024-01-31" @@ -276,8 +374,8 @@ user-login = "mumoshu" user-name = "Yusuke Kuoka" [[publisher.surrealkv]] -version = "0.3.2" -when = "2024-08-12" +version = "0.3.3" +when = "2024-08-14" user-id = 145457 user-login = "tobiemh" user-name = "Tobie Morgan Hitchcock" @@ -289,6 +387,20 @@ user-id = 145457 user-login = "tobiemh" user-name = "Tobie Morgan Hitchcock" +[[publisher.syn]] +version = "1.0.109" +when = "2023-02-24" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + +[[publisher.syn]] +version = "2.0.58" +when = "2024-04-03" +user-id = 3618 +user-login = "dtolnay" +user-name = "David Tolnay" + [[publisher.trice]] version = "0.4.0" when = "2024-01-04" @@ -1169,17 +1281,6 @@ description contains a link to a document with an additional security review. """ aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" -[[audits.google.audits.strsim]] -who = "danakj@chromium.org" -criteria = "safe-to-deploy" -version = "0.10.0" -notes = """ -Reviewed in https://crrev.com/c/5171063 - -Previously reviewed during security review and the audit is grandparented in. -""" -aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" - [[audits.google.audits.take_mut]] who = "David Koloski " criteria = "safe-to-deploy" @@ -1996,6 +2097,24 @@ delta = "0.2.6 -> 0.3.0" notes = "Replaces some `unsafe` code by bumping MSRV to 1.66 (to access `core::hint::black_box`)." aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml" +[[audits.zcash.audits.darling]] +who = "Jack Grigg " +criteria = "safe-to-deploy" +delta = "0.20.9 -> 0.20.10" +aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml" + +[[audits.zcash.audits.darling_core]] +who = "Jack Grigg " +criteria = "safe-to-deploy" +delta = "0.20.9 -> 0.20.10" +aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml" + +[[audits.zcash.audits.darling_macro]] +who = "Jack Grigg " +criteria = "safe-to-deploy" +delta = "0.20.9 -> 0.20.10" +aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml" + [[audits.zcash.audits.errno]] who = "Jack Grigg " criteria = "safe-to-deploy"