Add graphql config (#4704)
This commit is contained in:
parent
ebb6598c9e
commit
c1443fe107
28 changed files with 918 additions and 45 deletions
|
@ -371,6 +371,12 @@ pub enum Error {
|
|||
value: String,
|
||||
},
|
||||
|
||||
/// The requested config does not exist
|
||||
#[error("The config for {value} does not exist")]
|
||||
CgNotFound {
|
||||
value: String,
|
||||
},
|
||||
|
||||
/// The requested table does not exist
|
||||
#[error("The table '{value}' does not exist")]
|
||||
TbNotFound {
|
||||
|
@ -895,6 +901,12 @@ pub enum Error {
|
|||
value: String,
|
||||
},
|
||||
|
||||
/// The requested config already exists
|
||||
#[error("The config for {value} already exists")]
|
||||
CgAlreadyExists {
|
||||
value: String,
|
||||
},
|
||||
|
||||
/// The requested table already exists
|
||||
#[error("The table '{value}' already exists")]
|
||||
TbAlreadyExists {
|
||||
|
|
|
@ -82,8 +82,8 @@ impl<I: Invalidator> SchemaCache<I> {
|
|||
}
|
||||
}
|
||||
pub async fn get_schema(&self, session: &Session) -> Result<Schema, GqlError> {
|
||||
let ns = session.ns.as_ref().expect("missing ns should have been caught");
|
||||
let db = session.db.as_ref().expect("missing db should have been caught");
|
||||
let ns = session.ns.as_ref().ok_or(GqlError::UnpecifiedNamespace)?;
|
||||
let db = session.db.as_ref().ok_or(GqlError::UnpecifiedDatabase)?;
|
||||
{
|
||||
let guard = self.inner.read().await;
|
||||
if let Some(cand) = guard.get(&(ns.to_owned(), db.to_owned())) {
|
||||
|
|
|
@ -5,7 +5,7 @@ use thiserror::Error;
|
|||
|
||||
use crate::sql::Kind;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[derive(Error, Debug)]
|
||||
pub enum GqlError {
|
||||
#[error("Database error: {0}")]
|
||||
DbError(crate::err::Error),
|
||||
|
@ -17,6 +17,8 @@ pub enum GqlError {
|
|||
UnpecifiedNamespace,
|
||||
#[error("No Database specified")]
|
||||
UnpecifiedDatabase,
|
||||
#[error("GraphQL has not been configured for this database")]
|
||||
NotConfigured,
|
||||
#[error("Internal Error: {0}")]
|
||||
InternalError(String),
|
||||
#[error("Error converting value: {val} to type: {target}")]
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
use std::ops::Deref;
|
||||
|
||||
use crate::sql::statements::define::config::graphql::TableConfig;
|
||||
use crate::sql::statements::DefineTableStatement;
|
||||
use crate::sql::{
|
||||
statements::UseStatement, Cond, Ident, Idiom, Limit, Order, Orders, Part, Start, Table, Value,
|
||||
};
|
||||
|
@ -157,3 +161,33 @@ impl TryAsExt for SqlValue {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Named {
|
||||
fn name(&self) -> &str;
|
||||
}
|
||||
|
||||
impl Named for DefineTableStatement {
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Named for TableConfig {
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
pub trait NamedContainer {
|
||||
fn contains_name(&self, name: &str) -> bool;
|
||||
}
|
||||
|
||||
impl<I, N> NamedContainer for I
|
||||
where
|
||||
I: Deref<Target = [N]>,
|
||||
N: Named,
|
||||
{
|
||||
fn contains_name(&self, name: &str) -> bool {
|
||||
self.iter().any(|n| n.name() == name)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ use std::sync::Arc;
|
|||
use crate::dbs::Session;
|
||||
use crate::kvs::Datastore;
|
||||
use crate::sql::kind::Literal;
|
||||
use crate::sql::statements::define::config::graphql::TablesConfig;
|
||||
use crate::sql::statements::{DefineFieldStatement, SelectStatement};
|
||||
use crate::sql::{self, Table};
|
||||
use crate::sql::{Cond, Fields};
|
||||
|
@ -29,7 +30,7 @@ use super::ext::IntoExt;
|
|||
#[cfg(debug_assertions)]
|
||||
use super::ext::ValidatorExt;
|
||||
use crate::gql::error::{internal_error, schema_error, type_error};
|
||||
use crate::gql::ext::TryAsExt;
|
||||
use crate::gql::ext::{NamedContainer, TryAsExt};
|
||||
use crate::gql::utils::{GQLTx, GqlValueUtils};
|
||||
use crate::kvs::LockType;
|
||||
use crate::kvs::TransactionType;
|
||||
|
@ -85,7 +86,28 @@ pub async fn generate_schema(
|
|||
let tx = kvs.transaction(TransactionType::Read, LockType::Optimistic).await?;
|
||||
let ns = session.ns.as_ref().ok_or(GqlError::UnpecifiedNamespace)?;
|
||||
let db = session.db.as_ref().ok_or(GqlError::UnpecifiedDatabase)?;
|
||||
|
||||
let cg = tx.get_db_config(ns, db, "graphql").await.map_err(|e| match e {
|
||||
crate::err::Error::CgNotFound {
|
||||
..
|
||||
} => GqlError::NotConfigured,
|
||||
e => e.into(),
|
||||
})?;
|
||||
let config = cg.inner.clone().try_into_graphql()?;
|
||||
|
||||
let tbs = tx.all_tb(ns, db, None).await?;
|
||||
|
||||
let tbs = match config.tables {
|
||||
TablesConfig::None => return Err(GqlError::NotConfigured),
|
||||
TablesConfig::Auto => tbs,
|
||||
TablesConfig::Include(inc) => {
|
||||
tbs.iter().filter(|t| inc.contains_name(&t.name)).cloned().collect()
|
||||
}
|
||||
TablesConfig::Exclude(exc) => {
|
||||
tbs.iter().filter(|t| !exc.contains_name(&t.name)).cloned().collect()
|
||||
}
|
||||
};
|
||||
|
||||
let mut query = Object::new("Query");
|
||||
let mut types: Vec<Type> = Vec::new();
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ use super::Level;
|
|||
use cedar_policy::{Entity, EntityId, EntityTypeName, EntityUid, RestrictedExpression};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[revisioned(revision = 2)]
|
||||
#[derive(Clone, Default, Debug, Eq, PartialEq, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
|
@ -30,11 +30,21 @@ pub enum ResourceKind {
|
|||
Field,
|
||||
Index,
|
||||
Access,
|
||||
#[revision(start = 2)]
|
||||
Config(ConfigKind),
|
||||
|
||||
// IAM
|
||||
Actor,
|
||||
}
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
pub enum ConfigKind {
|
||||
GraphQL,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ResourceKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
|
@ -54,6 +64,15 @@ impl std::fmt::Display for ResourceKind {
|
|||
ResourceKind::Index => write!(f, "Index"),
|
||||
ResourceKind::Access => write!(f, "Access"),
|
||||
ResourceKind::Actor => write!(f, "Actor"),
|
||||
ResourceKind::Config(c) => write!(f, "Config::{c}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ConfigKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ConfigKind::GraphQL => write!(f, "GraphQL"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,6 +84,8 @@ pub enum Category {
|
|||
DatabaseUser,
|
||||
/// crate::key::database::vs /*{ns}*{db}!vs
|
||||
DatabaseVersionstamp,
|
||||
/// crate::key::database::cg /*{ns}*{db}!cg{ty}
|
||||
DatabaseConfig,
|
||||
///
|
||||
/// ------------------------------
|
||||
///
|
||||
|
@ -191,6 +193,7 @@ impl Display for Category {
|
|||
Self::DatabaseTimestamp => "DatabaseTimestamp",
|
||||
Self::DatabaseUser => "DatabaseUser",
|
||||
Self::DatabaseVersionstamp => "DatabaseVersionstamp",
|
||||
Self::DatabaseConfig => "DatabaseConfig",
|
||||
Self::TableRoot => "TableRoot",
|
||||
Self::TableEvent => "TableEvent",
|
||||
Self::TableField => "TableField",
|
||||
|
|
87
core/src/key/database/cg.rs
Normal file
87
core/src/key/database/cg.rs
Normal file
|
@ -0,0 +1,87 @@
|
|||
//! Stores a DEFINE CONFIG definition
|
||||
use crate::key::category::Categorise;
|
||||
use crate::key::category::Category;
|
||||
use derive::Key;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Key)]
|
||||
#[non_exhaustive]
|
||||
pub struct Cg<'a> {
|
||||
__: u8,
|
||||
_a: u8,
|
||||
pub ns: &'a str,
|
||||
_b: u8,
|
||||
pub db: &'a str,
|
||||
_c: u8,
|
||||
_d: u8,
|
||||
_e: u8,
|
||||
pub ty: &'a str,
|
||||
}
|
||||
|
||||
pub fn new<'a>(ns: &'a str, db: &'a str, ty: &'a str) -> Cg<'a> {
|
||||
Cg::new(ns, db, ty)
|
||||
}
|
||||
|
||||
pub fn prefix(ns: &str, db: &str) -> Vec<u8> {
|
||||
let mut k = super::all::new(ns, db).encode().unwrap();
|
||||
k.extend_from_slice(&[b'!', b'c', b'g', 0x00]);
|
||||
k
|
||||
}
|
||||
|
||||
pub fn suffix(ns: &str, db: &str) -> Vec<u8> {
|
||||
let mut k = super::all::new(ns, db).encode().unwrap();
|
||||
k.extend_from_slice(&[b'!', b'c', b'g', 0xff]);
|
||||
k
|
||||
}
|
||||
|
||||
impl Categorise for Cg<'_> {
|
||||
fn categorise(&self) -> Category {
|
||||
Category::DatabaseConfig
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Cg<'a> {
|
||||
pub fn new(ns: &'a str, db: &'a str, ty: &'a str) -> Self {
|
||||
Self {
|
||||
__: b'/',
|
||||
_a: b'*',
|
||||
ns,
|
||||
_b: b'*',
|
||||
db,
|
||||
_c: b'!',
|
||||
_d: b'c',
|
||||
_e: b'g',
|
||||
ty,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn key() {
|
||||
use super::*;
|
||||
#[rustfmt::skip]
|
||||
let val = Cg::new(
|
||||
"testns",
|
||||
"testdb",
|
||||
"testty",
|
||||
);
|
||||
let enc = Cg::encode(&val).unwrap();
|
||||
assert_eq!(enc, b"/*testns\x00*testdb\x00!cgtestty\x00");
|
||||
let dec = Cg::decode(&enc).unwrap();
|
||||
assert_eq!(val, dec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prefix() {
|
||||
let val = super::prefix("testns", "testdb");
|
||||
assert_eq!(val, b"/*testns\0*testdb\0!cg\0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suffix() {
|
||||
let val = super::suffix("testns", "testdb");
|
||||
assert_eq!(val, b"/*testns\0*testdb\0!cg\xff");
|
||||
}
|
||||
}
|
|
@ -2,6 +2,7 @@ pub mod ac;
|
|||
pub mod access;
|
||||
pub mod all;
|
||||
pub mod az;
|
||||
pub mod cg;
|
||||
pub mod fc;
|
||||
pub mod ml;
|
||||
pub mod pa;
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
/// crate::key::database::ts /*{ns}*{db}!ts{ts}
|
||||
/// crate::key::database::us /*{ns}*{db}!us{us}
|
||||
/// crate::key::database::vs /*{ns}*{db}!vs
|
||||
/// crate::key::database::cg /*{ns}*{db}!cg{ty}
|
||||
///
|
||||
/// crate::key::database::access::all /*{ns}*{db}&{ac}
|
||||
/// crate::key::database::access::gr /*{ns}*{db}&{ac}!gr{gr}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use super::Key;
|
||||
use crate::dbs::node::Node;
|
||||
use crate::err::Error;
|
||||
use crate::sql::statements::define::DefineConfigStatement;
|
||||
use crate::sql::statements::AccessGrant;
|
||||
use crate::sql::statements::DefineAccessStatement;
|
||||
use crate::sql::statements::DefineAnalyzerStatement;
|
||||
|
@ -81,6 +82,8 @@ pub(super) enum Entry {
|
|||
Fts(Arc<[DefineTableStatement]>),
|
||||
/// A slice of DefineModelStatement specified on a database.
|
||||
Mls(Arc<[DefineModelStatement]>),
|
||||
/// A slice of DefineConfigStatement specified on a database.
|
||||
Cgs(Arc<[DefineConfigStatement]>),
|
||||
/// A slice of DefineParamStatement specified on a database.
|
||||
Pas(Arc<[DefineParamStatement]>),
|
||||
/// A slice of DefineTableStatement specified on a database.
|
||||
|
@ -234,6 +237,14 @@ impl Entry {
|
|||
_ => Err(fail!("Unable to convert type into Entry::Mls")),
|
||||
}
|
||||
}
|
||||
/// Converts this cache entry into a slice of [`DefineConfigStatement`].
|
||||
/// This panics if called on a cache entry that is not an [`Entry::Cgs`].
|
||||
pub(super) fn try_into_cgs(self) -> Result<Arc<[DefineConfigStatement]>, Error> {
|
||||
match self {
|
||||
Entry::Cgs(v) => Ok(v),
|
||||
_ => Err(fail!("Unable to convert type into Entry::Cgs")),
|
||||
}
|
||||
}
|
||||
/// Converts this cache entry into a slice of [`DefineTableStatement`].
|
||||
/// This panics if called on a cache entry that is not an [`Entry::Tbs`].
|
||||
pub(super) fn try_into_tbs(self) -> Result<Arc<[DefineTableStatement]>, Error> {
|
||||
|
|
|
@ -11,6 +11,7 @@ use crate::kvs::cache::Entry;
|
|||
use crate::kvs::cache::EntryWeighter;
|
||||
use crate::kvs::scanner::Scanner;
|
||||
use crate::kvs::Transactor;
|
||||
use crate::sql::statements::define::DefineConfigStatement;
|
||||
use crate::sql::statements::AccessGrant;
|
||||
use crate::sql::statements::DefineAccessStatement;
|
||||
use crate::sql::statements::DefineAnalyzerStatement;
|
||||
|
@ -669,6 +670,29 @@ impl Transaction {
|
|||
.try_into_mls()
|
||||
}
|
||||
|
||||
/// Retrieve all model definitions for a specific database.
|
||||
#[instrument(level = "trace", target = "surrealdb::core::kvs::tx", skip(self))]
|
||||
pub async fn all_db_configs(
|
||||
&self,
|
||||
ns: &str,
|
||||
db: &str,
|
||||
) -> Result<Arc<[DefineConfigStatement]>, Error> {
|
||||
let key = crate::key::database::cg::prefix(ns, db);
|
||||
let res = self.cache.get_value_or_guard_async(&key).await;
|
||||
match res {
|
||||
Ok(val) => val,
|
||||
Err(cache) => {
|
||||
let end = crate::key::database::cg::suffix(ns, db);
|
||||
let val = self.getr(key..end, None).await?;
|
||||
let val = val.convert().into();
|
||||
let val = Entry::Cgs(Arc::clone(&val));
|
||||
let _ = cache.insert(val.clone());
|
||||
val
|
||||
}
|
||||
}
|
||||
.try_into_cgs()
|
||||
}
|
||||
|
||||
/// Retrieve all table definitions for a specific database.
|
||||
#[instrument(level = "trace", target = "surrealdb::core::kvs::tx", skip(self))]
|
||||
pub async fn all_tb(
|
||||
|
@ -1199,6 +1223,31 @@ impl Transaction {
|
|||
.try_into_type()
|
||||
}
|
||||
|
||||
/// Retrieve a specific config definition from a database.
|
||||
#[instrument(level = "trace", target = "surrealdb::core::kvs::tx", skip(self))]
|
||||
pub async fn get_db_config(
|
||||
&self,
|
||||
ns: &str,
|
||||
db: &str,
|
||||
cg: &str,
|
||||
) -> Result<Arc<DefineConfigStatement>, Error> {
|
||||
let key = crate::key::database::cg::new(ns, db, cg).encode()?;
|
||||
let res = self.cache.get_value_or_guard_async(&key).await;
|
||||
match res {
|
||||
Ok(val) => val,
|
||||
Err(cache) => {
|
||||
let val = self.get(key, None).await?.ok_or_else(|| Error::CgNotFound {
|
||||
value: cg.to_owned(),
|
||||
})?;
|
||||
let val: DefineConfigStatement = val.into();
|
||||
let val = Entry::Any(Arc::new(val));
|
||||
let _ = cache.insert(val.clone());
|
||||
val
|
||||
}
|
||||
}
|
||||
.try_into_type()
|
||||
}
|
||||
|
||||
/// Retrieve a specific table definition.
|
||||
#[instrument(level = "trace", target = "surrealdb::core::kvs::tx", skip(self))]
|
||||
pub async fn get_tb(
|
||||
|
|
198
core/src/sql/statements/define/config/graphql.rs
Normal file
198
core/src/sql/statements/define/config/graphql.rs
Normal file
|
@ -0,0 +1,198 @@
|
|||
use std::fmt::{self, Display, Write};
|
||||
|
||||
use crate::sql::fmt::{pretty_indent, Fmt, Pretty};
|
||||
use crate::sql::statements::info::InfoStructure;
|
||||
use crate::sql::{Ident, Part, Value};
|
||||
use derive::Store;
|
||||
use revision::revisioned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Store, Hash)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
pub struct GraphQLConfig {
|
||||
pub tables: TablesConfig,
|
||||
pub functions: FunctionsConfig,
|
||||
}
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Store, Hash)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
pub enum TablesConfig {
|
||||
#[default]
|
||||
None,
|
||||
Auto,
|
||||
Include(Vec<TableConfig>),
|
||||
Exclude(Vec<TableConfig>),
|
||||
}
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Store, Hash)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
pub struct TableConfig {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Store, Hash)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
pub enum FunctionsConfig {
|
||||
#[default]
|
||||
None,
|
||||
Auto,
|
||||
Include(Vec<Ident>),
|
||||
Exclude(Vec<Ident>),
|
||||
}
|
||||
|
||||
impl Display for GraphQLConfig {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, " GRAPHQL")?;
|
||||
|
||||
write!(f, " TABLES {}", self.tables)?;
|
||||
write!(f, " FUNCTIONS {}", self.functions)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for TablesConfig {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TablesConfig::Auto => write!(f, "AUTO")?,
|
||||
TablesConfig::None => write!(f, "NONE")?,
|
||||
TablesConfig::Include(cs) => {
|
||||
let mut f = Pretty::from(f);
|
||||
write!(f, "INCLUDE ")?;
|
||||
if !cs.is_empty() {
|
||||
let indent = pretty_indent();
|
||||
write!(f, "{}", Fmt::pretty_comma_separated(cs.as_slice()))?;
|
||||
drop(indent);
|
||||
}
|
||||
}
|
||||
TablesConfig::Exclude(_) => todo!(),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for TableConfig {
|
||||
fn from(value: String) -> Self {
|
||||
Self {
|
||||
name: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn val_to_ident(val: Value) -> Result<Ident, Value> {
|
||||
match val {
|
||||
Value::Strand(s) => Ok(s.0.into()),
|
||||
Value::Table(n) => Ok(n.0.into()),
|
||||
Value::Idiom(ref i) => match &i[..] {
|
||||
[Part::Field(n)] => Ok(n.to_raw().into()),
|
||||
_ => Err(val),
|
||||
},
|
||||
_ => Err(val),
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Value> for TableConfig {
|
||||
type Error = Value;
|
||||
|
||||
fn try_from(value: Value) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
v @ Value::Strand(_) | v @ Value::Table(_) | v @ Value::Idiom(_) => {
|
||||
val_to_ident(v).map(|i| i.0.into())
|
||||
}
|
||||
_ => Err(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for TableConfig {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.name)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FunctionsConfig {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
FunctionsConfig::Auto => write!(f, "AUTO")?,
|
||||
FunctionsConfig::None => write!(f, "NONE")?,
|
||||
FunctionsConfig::Include(cs) => {
|
||||
let mut f = Pretty::from(f);
|
||||
write!(f, "INCLUDE [")?;
|
||||
if !cs.is_empty() {
|
||||
let indent = pretty_indent();
|
||||
write!(f, "{}", Fmt::pretty_comma_separated(cs.as_slice()))?;
|
||||
drop(indent);
|
||||
}
|
||||
f.write_char(']')?;
|
||||
}
|
||||
FunctionsConfig::Exclude(cs) => {
|
||||
let mut f = Pretty::from(f);
|
||||
write!(f, "EXCLUDE [")?;
|
||||
if !cs.is_empty() {
|
||||
let indent = pretty_indent();
|
||||
write!(f, "{}", Fmt::pretty_comma_separated(cs.as_slice()))?;
|
||||
drop(indent);
|
||||
}
|
||||
f.write_char(']')?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl InfoStructure for GraphQLConfig {
|
||||
fn structure(self) -> Value {
|
||||
Value::from(map!(
|
||||
"tables" => self.tables.structure(),
|
||||
"functions" => self.functions.structure(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl InfoStructure for TablesConfig {
|
||||
fn structure(self) -> Value {
|
||||
match self {
|
||||
TablesConfig::None => Value::None,
|
||||
TablesConfig::Auto => Value::Strand("AUTO".into()),
|
||||
TablesConfig::Include(ts) => Value::from(map!(
|
||||
"include" => Value::Array(ts.into_iter().map(InfoStructure::structure).collect()),
|
||||
)),
|
||||
TablesConfig::Exclude(ts) => Value::from(map!(
|
||||
"exclude" => Value::Array(ts.into_iter().map(InfoStructure::structure).collect()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InfoStructure for TableConfig {
|
||||
fn structure(self) -> Value {
|
||||
Value::from(map!(
|
||||
"name" => Value::from(self.name),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl InfoStructure for FunctionsConfig {
|
||||
fn structure(self) -> Value {
|
||||
match self {
|
||||
FunctionsConfig::None => Value::None,
|
||||
FunctionsConfig::Auto => Value::Strand("AUTO".into()),
|
||||
FunctionsConfig::Include(fs) => Value::from(map!(
|
||||
"include" => Value::Array(fs.into_iter().map(|i| Value::from(i.to_raw())).collect()),
|
||||
)),
|
||||
FunctionsConfig::Exclude(fs) => Value::from(map!(
|
||||
"exclude" => Value::Array(fs.into_iter().map(|i| Value::from(i.to_raw())).collect()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
130
core/src/sql/statements/define/config/mod.rs
Normal file
130
core/src/sql/statements/define/config/mod.rs
Normal file
|
@ -0,0 +1,130 @@
|
|||
pub mod graphql;
|
||||
|
||||
use crate::ctx::Context;
|
||||
use crate::dbs::Options;
|
||||
use crate::doc::CursorDoc;
|
||||
use crate::err::Error;
|
||||
use crate::iam::{Action, ConfigKind, ResourceKind};
|
||||
use crate::sql::statements::info::InfoStructure;
|
||||
use crate::sql::{Base, Value};
|
||||
use derive::Store;
|
||||
use graphql::GraphQLConfig;
|
||||
use revision::revisioned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::{self, Display};
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Store, Hash)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
pub struct DefineConfigStatement {
|
||||
pub inner: ConfigInner,
|
||||
pub if_not_exists: bool,
|
||||
pub overwrite: bool,
|
||||
}
|
||||
|
||||
#[revisioned(revision = 1)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Store, Hash)]
|
||||
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[non_exhaustive]
|
||||
pub enum ConfigInner {
|
||||
GraphQL(GraphQLConfig),
|
||||
}
|
||||
|
||||
impl DefineConfigStatement {
|
||||
/// Process this type returning a computed simple Value
|
||||
pub(crate) async fn compute(
|
||||
&self,
|
||||
ctx: &Context,
|
||||
opt: &Options,
|
||||
_doc: Option<&CursorDoc>,
|
||||
) -> Result<Value, Error> {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Config(ConfigKind::GraphQL), &Base::Db)?;
|
||||
// get transaction
|
||||
let txn = ctx.tx();
|
||||
|
||||
// check if already defined
|
||||
if txn.get_db_config(opt.ns()?, opt.db()?, "graphql").await.is_ok() {
|
||||
if self.if_not_exists {
|
||||
return Ok(Value::None);
|
||||
} else if !self.overwrite {
|
||||
return Err(Error::CgAlreadyExists {
|
||||
value: "graphql".to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let key = crate::key::database::cg::new(opt.ns()?, opt.db()?, "graphql");
|
||||
txn.get_or_add_ns(opt.ns()?, opt.strict).await?;
|
||||
txn.get_or_add_db(opt.ns()?, opt.db()?, opt.strict).await?;
|
||||
txn.set(key, self.clone(), None).await?;
|
||||
|
||||
// Clear the cache
|
||||
txn.clear();
|
||||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
}
|
||||
|
||||
impl ConfigInner {
|
||||
pub fn name(&self) -> String {
|
||||
ConfigKind::from(self).to_string()
|
||||
}
|
||||
|
||||
pub fn try_into_graphql(self) -> Result<GraphQLConfig, Error> {
|
||||
match self {
|
||||
ConfigInner::GraphQL(g) => Ok(g),
|
||||
#[allow(unreachable_patterns)]
|
||||
c => Err(fail!("found {c} when a graphql config was expected")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ConfigInner> for ConfigKind {
|
||||
fn from(value: ConfigInner) -> Self {
|
||||
(&value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ConfigInner> for ConfigKind {
|
||||
fn from(value: &ConfigInner) -> Self {
|
||||
match value {
|
||||
ConfigInner::GraphQL(_) => ConfigKind::GraphQL,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InfoStructure for DefineConfigStatement {
|
||||
fn structure(self) -> Value {
|
||||
match self.inner {
|
||||
ConfigInner::GraphQL(v) => Value::from(map!(
|
||||
"graphql" => v.structure()
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for DefineConfigStatement {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "DEFINE CONFIG")?;
|
||||
if self.if_not_exists {
|
||||
write!(f, " IF NOT EXISTS")?
|
||||
}
|
||||
if self.overwrite {
|
||||
write!(f, " OVERWRITE")?
|
||||
}
|
||||
|
||||
write!(f, "{}", self.inner)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ConfigInner {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &self {
|
||||
ConfigInner::GraphQL(v) => Display::fmt(v, f),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
mod access;
|
||||
mod analyzer;
|
||||
pub mod config;
|
||||
mod database;
|
||||
mod deprecated;
|
||||
mod event;
|
||||
|
@ -14,6 +15,7 @@ mod user;
|
|||
|
||||
pub use access::DefineAccessStatement;
|
||||
pub use analyzer::DefineAnalyzerStatement;
|
||||
pub use config::DefineConfigStatement;
|
||||
pub use database::DefineDatabaseStatement;
|
||||
pub use event::DefineEventStatement;
|
||||
pub use field::DefineFieldStatement;
|
||||
|
@ -71,6 +73,7 @@ pub enum DefineStatement {
|
|||
Model(DefineModelStatement),
|
||||
#[revision(start = 2)]
|
||||
Access(DefineAccessStatement),
|
||||
Config(DefineConfigStatement),
|
||||
}
|
||||
|
||||
// Revision implementations
|
||||
|
@ -116,6 +119,7 @@ impl DefineStatement {
|
|||
Self::User(ref v) => v.compute(ctx, opt, doc).await,
|
||||
Self::Model(ref v) => v.compute(ctx, opt, doc).await,
|
||||
Self::Access(ref v) => v.compute(ctx, opt, doc).await,
|
||||
Self::Config(ref v) => v.compute(ctx, opt, doc).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,6 +139,7 @@ impl Display for DefineStatement {
|
|||
Self::Analyzer(v) => Display::fmt(v, f),
|
||||
Self::Model(v) => Display::fmt(v, f),
|
||||
Self::Access(v) => Display::fmt(v, f),
|
||||
Self::Config(v) => Display::fmt(v, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -151,6 +151,7 @@ impl InfoStatement {
|
|||
"params".to_string() => process(txn.all_db_params(ns, db).await?),
|
||||
"tables".to_string() => process(txn.all_tb(ns, db, version).await?),
|
||||
"users".to_string() => process(txn.all_db_users(ns, db).await?),
|
||||
"configs".to_string() => process(txn.all_db_configs(ns, db).await?),
|
||||
}),
|
||||
false => Value::from(map! {
|
||||
"accesses".to_string() => {
|
||||
|
@ -202,6 +203,13 @@ impl InfoStatement {
|
|||
}
|
||||
out.into()
|
||||
},
|
||||
"configs".to_string() => {
|
||||
let mut out = Object::default();
|
||||
for v in txn.all_db_configs(ns, db).await?.iter() {
|
||||
out.insert(v.inner.name(), v.to_string().into());
|
||||
}
|
||||
out.into()
|
||||
},
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -71,6 +71,7 @@ pub(crate) static KEYWORDS: phf::Map<UniCase<&'static str>, TokenKind> = phf_map
|
|||
UniCase::ascii("ASSERT") => TokenKind::Keyword(Keyword::Assert),
|
||||
UniCase::ascii("AT") => TokenKind::Keyword(Keyword::At),
|
||||
UniCase::ascii("AUTHENTICATE") => TokenKind::Keyword(Keyword::Authenticate),
|
||||
UniCase::ascii("AUTO") => TokenKind::Keyword(Keyword::Auto),
|
||||
UniCase::ascii("BEARER") => TokenKind::Keyword(Keyword::Bearer),
|
||||
UniCase::ascii("BEFORE") => TokenKind::Keyword(Keyword::Before),
|
||||
UniCase::ascii("BEGIN") => TokenKind::Keyword(Keyword::Begin),
|
||||
|
@ -87,6 +88,7 @@ pub(crate) static KEYWORDS: phf::Map<UniCase<&'static str>, TokenKind> = phf_map
|
|||
UniCase::ascii("COMMENT") => TokenKind::Keyword(Keyword::Comment),
|
||||
UniCase::ascii("COMMIT") => TokenKind::Keyword(Keyword::Commit),
|
||||
UniCase::ascii("CONCURRENTLY") => TokenKind::Keyword(Keyword::Concurrently),
|
||||
UniCase::ascii("CONFIG") => TokenKind::Keyword(Keyword::Config),
|
||||
UniCase::ascii("CONTENT") => TokenKind::Keyword(Keyword::Content),
|
||||
UniCase::ascii("CONTINUE") => TokenKind::Keyword(Keyword::Continue),
|
||||
UniCase::ascii("CREATE") => TokenKind::Keyword(Keyword::Create),
|
||||
|
@ -113,6 +115,7 @@ pub(crate) static KEYWORDS: phf::Map<UniCase<&'static str>, TokenKind> = phf_map
|
|||
UniCase::ascii("ELSE") => TokenKind::Keyword(Keyword::Else),
|
||||
UniCase::ascii("END") => TokenKind::Keyword(Keyword::End),
|
||||
UniCase::ascii("ENFORCED") => TokenKind::Keyword(Keyword::Enforced),
|
||||
UniCase::ascii("EXCLUDE") => TokenKind::Keyword(Keyword::Exclude),
|
||||
UniCase::ascii("EXISTS") => TokenKind::Keyword(Keyword::Exists),
|
||||
UniCase::ascii("EXPLAIN") => TokenKind::Keyword(Keyword::Explain),
|
||||
UniCase::ascii("EXTEND_CANDIDATES") => TokenKind::Keyword(Keyword::ExtendCandidates),
|
||||
|
@ -129,7 +132,9 @@ pub(crate) static KEYWORDS: phf::Map<UniCase<&'static str>, TokenKind> = phf_map
|
|||
UniCase::ascii("FROM") => TokenKind::Keyword(Keyword::From),
|
||||
UniCase::ascii("FULL") => TokenKind::Keyword(Keyword::Full),
|
||||
UniCase::ascii("FUNCTION") => TokenKind::Keyword(Keyword::Function),
|
||||
UniCase::ascii("FUNCTIONS") => TokenKind::Keyword(Keyword::Functions),
|
||||
UniCase::ascii("GRANT") => TokenKind::Keyword(Keyword::Grant),
|
||||
UniCase::ascii("GRAPHQL") => TokenKind::Keyword(Keyword::Graphql),
|
||||
UniCase::ascii("GROUP") => TokenKind::Keyword(Keyword::Group),
|
||||
UniCase::ascii("HIGHLIGHTS") => TokenKind::Keyword(Keyword::Highlights),
|
||||
UniCase::ascii("HNSW") => TokenKind::Keyword(Keyword::Hnsw),
|
||||
|
@ -216,6 +221,7 @@ pub(crate) static KEYWORDS: phf::Map<UniCase<&'static str>, TokenKind> = phf_map
|
|||
UniCase::ascii("START") => TokenKind::Keyword(Keyword::Start),
|
||||
UniCase::ascii("STRUCTURE") => TokenKind::Keyword(Keyword::Structure),
|
||||
UniCase::ascii("TABLE") => TokenKind::Keyword(Keyword::Table),
|
||||
UniCase::ascii("TABLES") => TokenKind::Keyword(Keyword::Tables),
|
||||
UniCase::ascii("TB") => TokenKind::Keyword(Keyword::Table),
|
||||
UniCase::ascii("TEMPFILES") => TokenKind::Keyword(Keyword::TempFiles),
|
||||
UniCase::ascii("TERMS_CACHE") => TokenKind::Keyword(Keyword::TermsCache),
|
||||
|
|
|
@ -3,6 +3,9 @@ use reblessive::Stk;
|
|||
use crate::cnf::EXPERIMENTAL_BEARER_ACCESS;
|
||||
use crate::sql::access_type::JwtAccessVerify;
|
||||
use crate::sql::index::HnswParams;
|
||||
use crate::sql::statements::define::config::graphql::{GraphQLConfig, TableConfig};
|
||||
use crate::sql::statements::define::config::ConfigInner;
|
||||
use crate::sql::statements::define::DefineConfigStatement;
|
||||
use crate::sql::Value;
|
||||
use crate::{
|
||||
sql::{
|
||||
|
@ -11,10 +14,10 @@ use crate::{
|
|||
filter::Filter,
|
||||
index::{Distance, VectorType},
|
||||
statements::{
|
||||
DefineAccessStatement, DefineAnalyzerStatement, DefineDatabaseStatement,
|
||||
DefineEventStatement, DefineFieldStatement, DefineFunctionStatement,
|
||||
DefineIndexStatement, DefineNamespaceStatement, DefineParamStatement, DefineStatement,
|
||||
DefineTableStatement, DefineUserStatement,
|
||||
define::config::graphql, DefineAccessStatement, DefineAnalyzerStatement,
|
||||
DefineDatabaseStatement, DefineEventStatement, DefineFieldStatement,
|
||||
DefineFunctionStatement, DefineIndexStatement, DefineNamespaceStatement,
|
||||
DefineParamStatement, DefineStatement, DefineTableStatement, DefineUserStatement,
|
||||
},
|
||||
table_type,
|
||||
tokenizer::Tokenizer,
|
||||
|
@ -56,6 +59,7 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("ANALYZER") => self.parse_define_analyzer().map(DefineStatement::Analyzer),
|
||||
t!("ACCESS") => self.parse_define_access(ctx).await.map(DefineStatement::Access),
|
||||
t!("CONFIG") => self.parse_define_config().map(DefineStatement::Config),
|
||||
_ => unexpected!(self, next, "a define statement keyword"),
|
||||
}
|
||||
}
|
||||
|
@ -1213,6 +1217,113 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn parse_define_config(&mut self) -> ParseResult<DefineConfigStatement> {
|
||||
let (if_not_exists, overwrite) = if self.eat(t!("IF")) {
|
||||
expected!(self, t!("NOT"));
|
||||
expected!(self, t!("EXISTS"));
|
||||
(true, false)
|
||||
} else if self.eat(t!("OVERWRITE")) {
|
||||
(false, true)
|
||||
} else {
|
||||
(false, false)
|
||||
};
|
||||
|
||||
let next = self.next();
|
||||
let inner = match next.kind {
|
||||
t!("GRAPHQL") => self.parse_graphql_config().map(ConfigInner::GraphQL)?,
|
||||
_ => unexpected!(self, next, "a type of config"),
|
||||
};
|
||||
|
||||
Ok(DefineConfigStatement {
|
||||
inner,
|
||||
if_not_exists,
|
||||
overwrite,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_graphql_config(&mut self) -> ParseResult<GraphQLConfig> {
|
||||
use graphql::{FunctionsConfig, TablesConfig};
|
||||
let mut tmp_tables = Option::<TablesConfig>::None;
|
||||
let mut tmp_fncs = Option::<FunctionsConfig>::None;
|
||||
loop {
|
||||
match self.peek_kind() {
|
||||
t!("NONE") => {
|
||||
self.pop_peek();
|
||||
tmp_tables = Some(TablesConfig::None);
|
||||
tmp_fncs = Some(FunctionsConfig::None);
|
||||
}
|
||||
t!("AUTO") => {
|
||||
self.pop_peek();
|
||||
tmp_tables = Some(TablesConfig::Auto);
|
||||
tmp_fncs = Some(FunctionsConfig::Auto);
|
||||
}
|
||||
t!("TABLES") => {
|
||||
self.pop_peek();
|
||||
|
||||
let next = self.next();
|
||||
match next.kind {
|
||||
t!("INCLUDE") => {
|
||||
tmp_tables =
|
||||
Some(TablesConfig::Include(self.parse_graphql_table_configs()?))
|
||||
}
|
||||
t!("EXCLUDE") => {
|
||||
tmp_tables =
|
||||
Some(TablesConfig::Include(self.parse_graphql_table_configs()?))
|
||||
}
|
||||
t!("NONE") => {
|
||||
tmp_tables = Some(TablesConfig::None);
|
||||
}
|
||||
t!("AUTO") => {
|
||||
tmp_tables = Some(TablesConfig::Auto);
|
||||
}
|
||||
_ => unexpected!(self, next, "`NONE`, `AUTO`, `INCLUDE` or `EXCLUDE`"),
|
||||
}
|
||||
}
|
||||
t!("FUNCTIONS") => {
|
||||
self.pop_peek();
|
||||
|
||||
let next = self.next();
|
||||
match next.kind {
|
||||
t!("INCLUDE") => {}
|
||||
t!("EXCLUDE") => {}
|
||||
t!("NONE") => {
|
||||
tmp_fncs = Some(FunctionsConfig::None);
|
||||
}
|
||||
t!("AUTO") => {
|
||||
tmp_fncs = Some(FunctionsConfig::Auto);
|
||||
}
|
||||
_ => unexpected!(self, next, "`NONE`, `AUTO`, `INCLUDE` or `EXCLUDE`"),
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(GraphQLConfig {
|
||||
tables: tmp_tables.unwrap_or_default(),
|
||||
functions: tmp_fncs.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_graphql_table_configs(&mut self) -> ParseResult<Vec<graphql::TableConfig>> {
|
||||
let mut acc = vec![];
|
||||
loop {
|
||||
match self.peek_kind() {
|
||||
x if Self::kind_is_identifier(x) => {
|
||||
let name: Ident = self.next_token_value()?;
|
||||
acc.push(TableConfig {
|
||||
name: name.0,
|
||||
});
|
||||
}
|
||||
_ => unexpected!(self, self.next(), "a table config"),
|
||||
}
|
||||
if !self.eat(t!(",")) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(acc)
|
||||
}
|
||||
|
||||
pub fn parse_relation_schema(&mut self) -> ParseResult<table_type::Relation> {
|
||||
let mut res = table_type::Relation {
|
||||
from: None,
|
||||
|
|
|
@ -199,6 +199,7 @@ impl Parser<'_> {
|
|||
if_exists,
|
||||
})
|
||||
}
|
||||
// TODO(raphaeldarley): add Config here
|
||||
_ => unexpected!(self, next, "a remove statement keyword"),
|
||||
};
|
||||
Ok(res)
|
||||
|
|
|
@ -37,6 +37,7 @@ keyword! {
|
|||
Assert => "ASSERT",
|
||||
At => "AT",
|
||||
Authenticate => "AUTHENTICATE",
|
||||
Auto => "AUTO",
|
||||
Bearer => "BEARER",
|
||||
Before => "BEFORE",
|
||||
Begin => "BEGIN",
|
||||
|
@ -53,6 +54,7 @@ keyword! {
|
|||
Comment => "COMMENT",
|
||||
Commit => "COMMIT",
|
||||
Concurrently => "CONCURRENTLY",
|
||||
Config => "CONFIG",
|
||||
Content => "CONTENT",
|
||||
Continue => "CONTINUE",
|
||||
Create => "CREATE",
|
||||
|
@ -76,6 +78,7 @@ keyword! {
|
|||
Else => "ELSE",
|
||||
End => "END",
|
||||
Enforced => "ENFORCED",
|
||||
Exclude => "EXCLUDE",
|
||||
Exists => "EXISTS",
|
||||
Explain => "EXPLAIN",
|
||||
ExtendCandidates => "EXTEND_CANDIDATES",
|
||||
|
@ -89,7 +92,9 @@ keyword! {
|
|||
From => "FROM",
|
||||
Full => "FULL",
|
||||
Function => "FUNCTION",
|
||||
Functions => "FUNCTIONS",
|
||||
Grant => "GRANT",
|
||||
Graphql => "GRAPHQL",
|
||||
Group => "GROUP",
|
||||
Highlights => "HIGHLIGHTS",
|
||||
Hnsw => "HNSW",
|
||||
|
@ -171,6 +176,7 @@ keyword! {
|
|||
Start => "START",
|
||||
Structure => "STRUCTURE",
|
||||
Table => "TABLE",
|
||||
Tables => "TABLES",
|
||||
TempFiles => "TEMPFILES",
|
||||
TermsCache => "TERMS_CACHE",
|
||||
TermsOrder => "TERMS_ORDER",
|
||||
|
|
|
@ -45,6 +45,7 @@ async fn define_alter_table() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -62,6 +63,7 @@ async fn define_alter_table() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -79,6 +81,7 @@ async fn define_alter_table() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -117,6 +120,7 @@ async fn define_alter_table_if_exists() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
|
|
@ -92,6 +92,7 @@ async fn define_statement_function() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: { test: 'DEFINE FUNCTION fn::test($first: string, $last: string) { RETURN $first + $last; } PERMISSIONS FULL' },
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -123,6 +124,7 @@ async fn define_statement_table_drop() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -154,6 +156,7 @@ async fn define_statement_table_schemaless() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -180,6 +183,7 @@ async fn define_statement_table_schemafull() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -209,6 +213,7 @@ async fn define_statement_table_schemaful() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -248,6 +253,7 @@ async fn define_statement_table_foreigntable() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -280,6 +286,7 @@ async fn define_statement_table_foreigntable() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -1724,6 +1731,7 @@ async fn define_statement_analyzer() -> Result<(), Error> {
|
|||
english: 'DEFINE ANALYZER english TOKENIZERS BLANK,CLASS FILTERS LOWERCASE,SNOWBALL(ENGLISH)',
|
||||
htmlAnalyzer: 'DEFINE ANALYZER htmlAnalyzer FUNCTION fn::stripHtml TOKENIZERS BLANK,CLASS'
|
||||
},
|
||||
configs: {},
|
||||
functions: {
|
||||
stripHtml: "DEFINE FUNCTION fn::stripHtml($html: string) { RETURN string::replace($html, /<[^>]*>/, ''); } PERMISSIONS FULL"
|
||||
},
|
||||
|
@ -2049,8 +2057,8 @@ async fn permissions_checks_define_function() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { greet: \"DEFINE FUNCTION fn::greet() { RETURN 'Hello'; } PERMISSIONS FULL\" }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { greet: \"DEFINE FUNCTION fn::greet() { RETURN 'Hello'; } PERMISSIONS FULL\" }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -2091,8 +2099,8 @@ async fn permissions_checks_define_analyzer() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { analyzer: 'DEFINE ANALYZER analyzer TOKENIZERS BLANK' }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
vec!["{ accesses: { }, analyzers: { analyzer: 'DEFINE ANALYZER analyzer TOKENIZERS BLANK' }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -2217,8 +2225,8 @@ async fn permissions_checks_define_access_db() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { access: \"DEFINE ACCESS access ON DATABASE TYPE JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 1h, FOR SESSION NONE\" }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
vec!["{ accesses: { access: \"DEFINE ACCESS access ON DATABASE TYPE JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 1h, FOR SESSION NONE\" }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -2343,8 +2351,8 @@ async fn permissions_checks_define_user_db() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { user: \"DEFINE USER user ON DATABASE PASSHASH 'secret' ROLES VIEWER DURATION FOR TOKEN 15m, FOR SESSION 6h\" } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { user: \"DEFINE USER user ON DATABASE PASSHASH 'secret' ROLES VIEWER DURATION FOR TOKEN 15m, FOR SESSION 6h\" } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -2385,8 +2393,8 @@ async fn permissions_checks_define_access_record() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { account: \"DEFINE ACCESS account ON DATABASE TYPE RECORD WITH JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 15m, FOR SESSION 12h\" }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
vec!["{ accesses: { account: \"DEFINE ACCESS account ON DATABASE TYPE RECORD WITH JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 15m, FOR SESSION 12h\" }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -2427,8 +2435,8 @@ async fn permissions_checks_define_param() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { param: \"DEFINE PARAM $param VALUE 'foo' PERMISSIONS FULL\" }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { param: \"DEFINE PARAM $param VALUE 'foo' PERMISSIONS FULL\" }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -2466,8 +2474,8 @@ async fn permissions_checks_define_table() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { TB: 'DEFINE TABLE TB TYPE ANY SCHEMALESS PERMISSIONS NONE' }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { TB: 'DEFINE TABLE TB TYPE ANY SCHEMALESS PERMISSIONS NONE' }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"]
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -2653,6 +2661,7 @@ async fn define_statement_table_permissions() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -3057,6 +3066,7 @@ async fn define_table_relation_redefinition_info() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -3078,6 +3088,7 @@ async fn define_table_relation_redefinition_info() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -3099,6 +3110,7 @@ async fn define_table_relation_redefinition_info() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
|
|
@ -290,8 +290,8 @@ async fn permissions_checks_info_db() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -554,7 +554,7 @@ async fn access_info_redacted() {
|
|||
assert!(out.is_ok(), "Unexpected error: {:?}", out);
|
||||
|
||||
let out_expected =
|
||||
r#"{ accesses: { access: "DEFINE ACCESS access ON DATABASE TYPE RECORD WITH JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 1h, FOR SESSION NONE" }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"#.to_string();
|
||||
r#"{ accesses: { access: "DEFINE ACCESS access ON DATABASE TYPE RECORD WITH JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 1h, FOR SESSION NONE" }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"#.to_string();
|
||||
let out_str = out.unwrap().to_string();
|
||||
assert_eq!(
|
||||
out_str, out_expected,
|
||||
|
@ -627,7 +627,7 @@ async fn access_info_redacted_structure() {
|
|||
assert!(out.is_ok(), "Unexpected error: {:?}", out);
|
||||
|
||||
let out_expected =
|
||||
r#"{ accesses: [{ base: 'DATABASE', duration: { session: 6h, token: 15m }, kind: { jwt: { issuer: { alg: 'HS512', key: '[REDACTED]' }, verify: { alg: 'HS512', key: '[REDACTED]' } }, kind: 'RECORD' }, name: 'access' }], analyzers: [], functions: [], models: [], params: [], tables: [], users: [] }"#.to_string();
|
||||
r#"{ accesses: [{ base: 'DATABASE', duration: { session: 6h, token: 15m }, kind: { jwt: { issuer: { alg: 'HS512', key: '[REDACTED]' }, verify: { alg: 'HS512', key: '[REDACTED]' } }, kind: 'RECORD' }, name: 'access' }], analyzers: [], configs: [], functions: [], models: [], params: [], tables: [], users: [] }"#.to_string();
|
||||
let out_str = out.unwrap().to_string();
|
||||
assert_eq!(
|
||||
out_str, out_expected,
|
||||
|
@ -652,7 +652,7 @@ async fn function_info_structure() {
|
|||
assert!(out.is_ok(), "Unexpected error: {:?}", out);
|
||||
|
||||
let out_expected =
|
||||
r#"{ accesses: [], analyzers: [], functions: [{ args: [['name', 'string']], block: "{ RETURN 'Hello, ' + $name + '!'; }", name: 'example', permissions: true, returns: 'string' }], models: [], params: [], tables: [], users: [] }"#.to_string();
|
||||
r#"{ accesses: [], analyzers: [], configs: [], functions: [{ args: [['name', 'string']], block: "{ RETURN 'Hello, ' + $name + '!'; }", name: 'example', permissions: true, returns: 'string' }], models: [], params: [], tables: [], users: [] }"#.to_string();
|
||||
let out_str = out.unwrap().to_string();
|
||||
assert_eq!(
|
||||
out_str, out_expected,
|
||||
|
|
|
@ -28,6 +28,7 @@ async fn define_global_param() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: { test: 'DEFINE PARAM $test VALUE 12345 PERMISSIONS FULL' },
|
||||
|
|
|
@ -282,6 +282,7 @@ async fn relate_enforced() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
|
|
@ -37,6 +37,7 @@ async fn remove_statement_table() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -167,6 +168,7 @@ async fn remove_statement_analyzer() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
@ -666,8 +668,8 @@ async fn permissions_checks_remove_function() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { greet: \"DEFINE FUNCTION fn::greet() { RETURN 'Hello'; } PERMISSIONS FULL\" }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { greet: \"DEFINE FUNCTION fn::greet() { RETURN 'Hello'; } PERMISSIONS FULL\" }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -708,8 +710,8 @@ async fn permissions_checks_remove_analyzer() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { analyzer: 'DEFINE ANALYZER analyzer TOKENIZERS BLANK' }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { analyzer: 'DEFINE ANALYZER analyzer TOKENIZERS BLANK' }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -834,8 +836,8 @@ async fn permissions_checks_remove_db_access() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { access: \"DEFINE ACCESS access ON DATABASE TYPE JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 1h, FOR SESSION NONE\" }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { access: \"DEFINE ACCESS access ON DATABASE TYPE JWT ALGORITHM HS512 KEY '[REDACTED]' WITH ISSUER KEY '[REDACTED]' DURATION FOR TOKEN 1h, FOR SESSION NONE\" }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -960,8 +962,8 @@ async fn permissions_checks_remove_db_user() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { user: \"DEFINE USER user ON DATABASE PASSHASH 'secret' ROLES VIEWER DURATION FOR TOKEN 1h, FOR SESSION NONE\" } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { user: \"DEFINE USER user ON DATABASE PASSHASH 'secret' ROLES VIEWER DURATION FOR TOKEN 1h, FOR SESSION NONE\" } }"],
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -1002,8 +1004,8 @@ async fn permissions_checks_remove_param() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { param: \"DEFINE PARAM $param VALUE 'foo' PERMISSIONS FULL\" }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { param: \"DEFINE PARAM $param VALUE 'foo' PERMISSIONS FULL\" }, tables: { }, users: { } }"],
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
@ -1044,8 +1046,8 @@ async fn permissions_checks_remove_table() {
|
|||
|
||||
// Define the expected results for the check statement when the test statement succeeded and when it failed
|
||||
let check_results = [
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, functions: { }, models: { }, params: { }, tables: { TB: 'DEFINE TABLE TB TYPE ANY SCHEMALESS PERMISSIONS NONE' }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { }, users: { } }"],
|
||||
vec!["{ accesses: { }, analyzers: { }, configs: { }, functions: { }, models: { }, params: { }, tables: { TB: 'DEFINE TABLE TB TYPE ANY SCHEMALESS PERMISSIONS NONE' }, users: { } }"],
|
||||
];
|
||||
|
||||
let test_cases = [
|
||||
|
|
|
@ -256,6 +256,7 @@ async fn loose_mode_all_ok() -> Result<(), Error> {
|
|||
"{
|
||||
accesses: {},
|
||||
analyzers: {},
|
||||
configs: {},
|
||||
functions: {},
|
||||
models: {},
|
||||
params: {},
|
||||
|
|
|
@ -2,13 +2,20 @@ mod common;
|
|||
|
||||
#[cfg(surrealdb_unstable)]
|
||||
mod graphql_integration {
|
||||
use std::time::Duration;
|
||||
use std::{str::FromStr, time::Duration};
|
||||
|
||||
macro_rules! assert_equal_arrs {
|
||||
($lhs: expr, $rhs: expr) => {
|
||||
let lhs = $lhs.as_array().unwrap().iter().collect::<std::collections::HashSet<_>>();
|
||||
let rhs = $rhs.as_array().unwrap().iter().collect::<std::collections::HashSet<_>>();
|
||||
assert_eq!(lhs, rhs)
|
||||
};
|
||||
}
|
||||
|
||||
use http::header;
|
||||
use reqwest::Client;
|
||||
use serde_json::json;
|
||||
use test_log::test;
|
||||
use tracing::debug;
|
||||
use ulid::Ulid;
|
||||
|
||||
use crate::common::{PASS, USER};
|
||||
|
@ -32,6 +39,28 @@ mod graphql_integration {
|
|||
.default_headers(headers)
|
||||
.build()?;
|
||||
|
||||
// check errors with no config
|
||||
{
|
||||
let res = client.post(gql_url).body("").send().await?;
|
||||
assert_eq!(res.status(), 400);
|
||||
let body = res.text().await?;
|
||||
assert!(body.contains("NotConfigured"), "body: {body}")
|
||||
}
|
||||
|
||||
// add schema and data
|
||||
{
|
||||
let res = client
|
||||
.post(sql_url)
|
||||
.body(
|
||||
r#"
|
||||
DEFINE CONFIG GRAPHQL AUTO;
|
||||
"#,
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
assert_eq!(res.status(), 200);
|
||||
}
|
||||
|
||||
// check errors with no tables
|
||||
{
|
||||
let res = client.post(gql_url).body("").send().await?;
|
||||
|
@ -209,6 +238,7 @@ mod graphql_integration {
|
|||
.basic_auth(USER, Some(PASS))
|
||||
.body(
|
||||
r#"
|
||||
DEFINE CONFIG GRAPHQL AUTO;
|
||||
DEFINE ACCESS user ON DATABASE TYPE RECORD
|
||||
SIGNUP ( CREATE user SET email = $email, pass = crypto::argon2::generate($pass) )
|
||||
SIGNIN ( SELECT * FROM user WHERE email = $email AND crypto::argon2::compare(pass, $pass) )
|
||||
|
@ -223,9 +253,9 @@ mod graphql_integration {
|
|||
)
|
||||
.send()
|
||||
.await?;
|
||||
assert_eq!(res.status(), 200);
|
||||
// assert_eq!(res.status(), 200);
|
||||
let body = res.text().await?;
|
||||
debug!(?body);
|
||||
eprintln!("\n\n\n\n\n{body}\n\n\n\n\n\n");
|
||||
}
|
||||
|
||||
// check works with root
|
||||
|
@ -236,7 +266,7 @@ mod graphql_integration {
|
|||
.body(json!({"query": r#"query{foo{id, val}}"#}).to_string())
|
||||
.send()
|
||||
.await?;
|
||||
assert_eq!(res.status(), 200);
|
||||
// assert_eq!(res.status(), 200);
|
||||
let body = res.text().await?;
|
||||
let expected =
|
||||
json!({"data":{"foo":[{"id":"foo:1","val":42},{"id":"foo:2","val":43}]}});
|
||||
|
@ -276,4 +306,120 @@ mod graphql_integration {
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn config() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let (addr, _server) = common::start_server_gql_without_auth().await.unwrap();
|
||||
let gql_url = &format!("http://{addr}/graphql");
|
||||
let sql_url = &format!("http://{addr}/sql");
|
||||
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
let ns = Ulid::new().to_string();
|
||||
let db = Ulid::new().to_string();
|
||||
headers.insert("surreal-ns", ns.parse()?);
|
||||
headers.insert("surreal-db", db.parse()?);
|
||||
headers.insert(header::ACCEPT, "application/json".parse()?);
|
||||
let client = reqwest::Client::builder()
|
||||
.connect_timeout(Duration::from_millis(10))
|
||||
.default_headers(headers)
|
||||
.build()?;
|
||||
|
||||
{
|
||||
let res = client.post(gql_url).body("").send().await?;
|
||||
assert_eq!(res.status(), 400);
|
||||
let body = res.text().await?;
|
||||
assert!(body.contains("NotConfigured"));
|
||||
}
|
||||
|
||||
// add schema and data
|
||||
{
|
||||
let res = client
|
||||
.post(sql_url)
|
||||
.body(
|
||||
r#"
|
||||
DEFINE CONFIG GRAPHQL AUTO;
|
||||
DEFINE TABLE foo;
|
||||
DEFINE FIELD val ON foo TYPE string;
|
||||
DEFINE TABLE bar;
|
||||
DEFINE FIELD val ON bar TYPE string;
|
||||
"#,
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
assert_eq!(res.status(), 200);
|
||||
}
|
||||
|
||||
{
|
||||
let res = client
|
||||
.post(gql_url)
|
||||
.body(json!({ "query": r#"{__schema {queryType {fields {name}}}}"# }).to_string())
|
||||
.send()
|
||||
.await?;
|
||||
assert_eq!(res.status(), 200);
|
||||
let body = res.text().await?;
|
||||
let res_obj = serde_json::Value::from_str(&body).unwrap();
|
||||
let fields = &res_obj["data"]["__schema"]["queryType"]["fields"];
|
||||
let expected_fields = json!(
|
||||
[
|
||||
{
|
||||
"name": "foo"
|
||||
},
|
||||
{
|
||||
"name": "bar"
|
||||
},
|
||||
{
|
||||
"name": "_get_foo"
|
||||
},
|
||||
{
|
||||
"name": "_get_bar"
|
||||
},
|
||||
{
|
||||
"name": "_get"
|
||||
}
|
||||
]
|
||||
);
|
||||
assert_equal_arrs!(fields, &expected_fields);
|
||||
}
|
||||
|
||||
{
|
||||
let res = client
|
||||
.post(sql_url)
|
||||
.body(
|
||||
r#"
|
||||
DEFINE CONFIG OVERWRITE GRAPHQL TABLES INCLUDE foo;
|
||||
"#,
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
assert_eq!(res.status(), 200);
|
||||
}
|
||||
|
||||
{
|
||||
let res = client
|
||||
.post(gql_url)
|
||||
.body(json!({ "query": r#"{__schema {queryType {fields {name}}}}"# }).to_string())
|
||||
.send()
|
||||
.await?;
|
||||
assert_eq!(res.status(), 200);
|
||||
let body = res.text().await?;
|
||||
let res_obj = serde_json::Value::from_str(&body).unwrap();
|
||||
let fields = &res_obj["data"]["__schema"]["queryType"]["fields"];
|
||||
let expected_fields = json!(
|
||||
[
|
||||
{
|
||||
"name": "foo"
|
||||
},
|
||||
{
|
||||
"name": "_get_foo"
|
||||
},
|
||||
{
|
||||
"name": "_get"
|
||||
}
|
||||
]
|
||||
);
|
||||
assert_equal_arrs!(fields, &expected_fields);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue