Drop the old parser (#3814)
This commit is contained in:
parent
9f97f0bc48
commit
e842515882
139 changed files with 584 additions and 13515 deletions
|
@ -49,7 +49,6 @@ arbitrary = [
|
|||
"geo-types/arbitrary",
|
||||
"uuid/arbitrary",
|
||||
]
|
||||
experimental-parser = ["dep:phf", "dep:unicase"]
|
||||
# Private features
|
||||
kv-fdb = ["tokio/time"]
|
||||
|
||||
|
@ -109,7 +108,7 @@ num_cpus = "1.16.0"
|
|||
object_store = { version = "0.8.0", optional = false }
|
||||
once_cell = "1.18.0"
|
||||
pbkdf2 = { version = "0.12.2", features = ["simple"] }
|
||||
phf = { version = "0.11.2", features = ["macros", "unicase"], optional = true }
|
||||
phf = { version = "0.11.2", features = ["macros", "unicase"] }
|
||||
pin-project-lite = "0.2.13"
|
||||
quick_cache = "0.4.0"
|
||||
radix_trie = { version = "0.2.1", features = ["serde"] }
|
||||
|
@ -144,7 +143,7 @@ tikv = { version = "0.2.0-surreal.2", default-features = false, package = "surre
|
|||
tracing = "0.1.40"
|
||||
trice = "0.4.0"
|
||||
ulid = { version = "1.1.0", features = ["serde"] }
|
||||
unicase = { version = "2.7.0", optional = true }
|
||||
unicase = "2.7.0"
|
||||
url = "2.5.0"
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -53,30 +53,6 @@ pub fn quote_str(s: &str) -> String {
|
|||
|
||||
#[inline]
|
||||
pub fn quote_plain_str(s: &str) -> String {
|
||||
#[cfg(not(feature = "experimental-parser"))]
|
||||
{
|
||||
if crate::syn::thing(s).is_ok() {
|
||||
let mut ret = quote_str(s);
|
||||
ret.insert(0, 's');
|
||||
return ret;
|
||||
}
|
||||
|
||||
let mut ret = quote_str(s);
|
||||
// HACK: We need to prefix strands which look like records, uuids, or datetimes with an `s`
|
||||
// otherwise the strands will parsed as a different type when parsed again.
|
||||
// This is not required for the new parser.
|
||||
// Because this only required for the old parse we just reference the partial parsers
|
||||
// directly to avoid having to create a common interface between the old and new parser.
|
||||
if crate::syn::v1::literal::uuid(&ret).is_ok()
|
||||
|| crate::syn::v1::literal::datetime(&ret).is_ok()
|
||||
|| crate::syn::thing(&ret).is_ok()
|
||||
{
|
||||
ret.insert(0, 's');
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
quote_str(s)
|
||||
}
|
||||
|
||||
|
@ -95,7 +71,6 @@ pub fn escape_rid(s: &str) -> Cow<'_, str> {
|
|||
#[inline]
|
||||
/// Escapes an ident if necessary
|
||||
pub fn escape_ident(s: &str) -> Cow<'_, str> {
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
if let Some(x) = escape_reserved_keyword(s) {
|
||||
return Cow::Owned(x);
|
||||
}
|
||||
|
@ -115,37 +90,10 @@ pub fn escape_normal<'a>(s: &'a str, l: char, r: char, e: &str) -> Cow<'a, str>
|
|||
Cow::Borrowed(s)
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-parser"))]
|
||||
#[inline]
|
||||
pub fn escape_numeric<'a>(s: &'a str, l: char, r: char, e: &str) -> Cow<'a, str> {
|
||||
// Presume this is numeric
|
||||
let mut numeric = true;
|
||||
// Loop over each character
|
||||
for x in s.bytes() {
|
||||
// Check if character is allowed
|
||||
if !(x.is_ascii_alphanumeric() || x == b'_') {
|
||||
return Cow::Owned(format!("{l}{}{r}", s.replace(r, e)));
|
||||
}
|
||||
// Check if character is non-numeric
|
||||
if !x.is_ascii_digit() {
|
||||
numeric = false;
|
||||
}
|
||||
}
|
||||
// Output the id value
|
||||
match numeric {
|
||||
// This is numeric so escape it
|
||||
true => Cow::Owned(format!("{l}{}{r}", s.replace(r, e))),
|
||||
// No need to escape the value
|
||||
_ => Cow::Borrowed(s),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
pub fn escape_reserved_keyword(s: &str) -> Option<String> {
|
||||
crate::syn::v2::could_be_reserved_keyword(s).then(|| format!("`{}`", s))
|
||||
crate::syn::could_be_reserved_keyword(s).then(|| format!("`{}`", s))
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
#[inline]
|
||||
pub fn escape_numeric<'a>(s: &'a str, l: char, r: char, e: &str) -> Cow<'a, str> {
|
||||
// Loop over each character
|
||||
|
|
|
@ -127,22 +127,3 @@ pub(crate) mod no_nul_bytes {
|
|||
deserializer.deserialize_string(NoNulBytesVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
#[cfg(not(feature = "experimental-parser"))]
|
||||
#[test]
|
||||
fn ensure_strands_are_prefixed() {
|
||||
use super::Strand;
|
||||
|
||||
let strand = Strand("a:b".to_owned());
|
||||
assert_eq!(strand.to_string().as_str(), "s'a:b'");
|
||||
|
||||
let strand = Strand("2012-04-23T18:25:43.0000511Z".to_owned());
|
||||
assert_eq!(strand.to_string().as_str(), "s'2012-04-23T18:25:43.0000511Z'");
|
||||
|
||||
let strand = Strand("b19bc00b-aa98-486c-ae37-c8e1c54295b1".to_owned());
|
||||
assert_eq!(strand.to_string().as_str(), "s'b19bc00b-aa98-486c-ae37-c8e1c54295b1'");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
#[cfg(feature = "experimental-parser")]
|
||||
use super::v2::token::Span;
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
use super::token::Span;
|
||||
use std::ops::Range;
|
||||
|
||||
/// A human readable location inside a string.
|
||||
|
@ -49,7 +47,6 @@ impl Location {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
pub fn of_offset(source: &str, offset: usize) -> Self {
|
||||
assert!(offset <= source.len(), "tried to find location of substring in unrelated string");
|
||||
// Bytes of input prior to line being iterated.
|
||||
|
@ -77,7 +74,6 @@ impl Location {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
pub fn of_span_start(source: &str, span: Span) -> Self {
|
||||
// Bytes of input before substr.
|
||||
|
||||
|
@ -85,14 +81,12 @@ impl Location {
|
|||
Self::of_offset(source, offset)
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
pub fn of_span_end(source: &str, span: Span) -> Self {
|
||||
// Bytes of input before substr.
|
||||
let offset = span.offset as usize + span.len as usize;
|
||||
Self::of_offset(source, offset)
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
pub fn range_of_span(source: &str, span: Span) -> Range<Self> {
|
||||
// Bytes of input before substr.
|
||||
let offset = span.offset as usize;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::syn::v2::{
|
||||
use crate::syn::{
|
||||
lexer::{
|
||||
unicode::{byte, chars},
|
||||
Error, Lexer,
|
|
@ -1,4 +1,4 @@
|
|||
use crate::syn::v2::{
|
||||
use crate::syn::{
|
||||
lexer::{CharError, Lexer},
|
||||
token::{t, Token},
|
||||
};
|
|
@ -5,7 +5,7 @@ use thiserror::Error;
|
|||
|
||||
use crate::{
|
||||
sql::Datetime,
|
||||
syn::v2::token::{Token, TokenKind},
|
||||
syn::token::{Token, TokenKind},
|
||||
};
|
||||
|
||||
use super::{Error as LexError, Lexer};
|
|
@ -6,7 +6,7 @@ use crate::{
|
|||
Duration, SECONDS_PER_DAY, SECONDS_PER_HOUR, SECONDS_PER_MINUTE, SECONDS_PER_WEEK,
|
||||
SECONDS_PER_YEAR,
|
||||
},
|
||||
syn::v2::token::{Token, TokenKind},
|
||||
syn::token::{Token, TokenKind},
|
||||
};
|
||||
|
||||
use super::{Error as LexError, Lexer};
|
|
@ -2,8 +2,8 @@ use std::mem;
|
|||
|
||||
use unicase::UniCase;
|
||||
|
||||
use crate::syn::v2::lexer::{keywords::KEYWORDS, Error, Lexer};
|
||||
use crate::syn::v2::token::{NumberKind, Token, TokenKind};
|
||||
use crate::syn::lexer::{keywords::KEYWORDS, Error, Lexer};
|
||||
use crate::syn::token::{NumberKind, Token, TokenKind};
|
||||
|
||||
use super::unicode::{chars, U8Ext};
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use crate::syn::v2::token::Span;
|
||||
use crate::syn::token::Span;
|
||||
|
||||
use super::{unicode::chars::JS_LINE_TERIMATORS, Error, Lexer};
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
use crate::{
|
||||
sql::change_feed_include::ChangeFeedInclude,
|
||||
sql::{language::Language, Algorithm},
|
||||
syn::v2::token::{DistanceKind, Keyword, TokenKind},
|
||||
syn::token::{DistanceKind, Keyword, TokenKind},
|
||||
};
|
||||
use phf::{phf_map, phf_set};
|
||||
use unicase::UniCase;
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
sql::{Datetime, Duration, Regex, Uuid},
|
||||
syn::v2::token::{Span, Token, TokenKind},
|
||||
syn::token::{Span, Token, TokenKind},
|
||||
};
|
||||
use thiserror::Error;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use crate::syn::v2::{
|
||||
use crate::syn::{
|
||||
lexer::{unicode::U8Ext, Error as LexError, Lexer},
|
||||
token::{NumberKind, Token, TokenKind},
|
||||
};
|
|
@ -1,6 +1,6 @@
|
|||
use thiserror::Error;
|
||||
|
||||
use crate::syn::v2::token::Span;
|
||||
use crate::syn::token::Span;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Error, Debug)]
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::mem;
|
||||
|
||||
use crate::syn::v2::token::{Token, TokenKind};
|
||||
use crate::syn::token::{Token, TokenKind};
|
||||
|
||||
use super::{unicode::chars, Error, Lexer};
|
||||
|
|
@ -1,10 +1,10 @@
|
|||
use chrono::{FixedOffset, NaiveDate, Offset, TimeZone, Utc};
|
||||
|
||||
use crate::syn::v2::token::{t, NumberKind, TokenKind};
|
||||
use crate::syn::token::{t, NumberKind, TokenKind};
|
||||
|
||||
macro_rules! test_case(
|
||||
($source:expr => [$($token:expr),*$(,)?]) => {
|
||||
let mut lexer = crate::syn::v2::lexer::Lexer::new($source.as_bytes());
|
||||
let mut lexer = crate::syn::lexer::Lexer::new($source.as_bytes());
|
||||
let mut i = 0;
|
||||
$(
|
||||
let next = lexer.next();
|
||||
|
@ -253,9 +253,8 @@ fn keyword() {
|
|||
|
||||
#[test]
|
||||
fn uuid() {
|
||||
let mut lexer = crate::syn::v2::lexer::Lexer::new(
|
||||
r#" u"e72bee20-f49b-11ec-b939-0242ac120002" "#.as_bytes(),
|
||||
);
|
||||
let mut lexer =
|
||||
crate::syn::lexer::Lexer::new(r#" u"e72bee20-f49b-11ec-b939-0242ac120002" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {} @ ", error);
|
||||
|
@ -264,9 +263,8 @@ fn uuid() {
|
|||
let uuid = lexer.uuid.take().unwrap();
|
||||
assert_eq!(uuid.0.to_string(), "e72bee20-f49b-11ec-b939-0242ac120002");
|
||||
|
||||
let mut lexer = crate::syn::v2::lexer::Lexer::new(
|
||||
r#" u"b19bc00b-aa98-486c-ae37-c8e1c54295b1" "#.as_bytes(),
|
||||
);
|
||||
let mut lexer =
|
||||
crate::syn::lexer::Lexer::new(r#" u"b19bc00b-aa98-486c-ae37-c8e1c54295b1" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {} @ ", error);
|
||||
|
@ -278,7 +276,7 @@ fn uuid() {
|
|||
|
||||
#[test]
|
||||
fn date_time_just_date() {
|
||||
let mut lexer = crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23" "#.as_bytes());
|
||||
let mut lexer = crate::syn::lexer::Lexer::new(r#" d"2012-04-23" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {} @ ", error);
|
||||
|
@ -299,7 +297,7 @@ fn date_time_just_date() {
|
|||
|
||||
#[test]
|
||||
fn date_zone_time() {
|
||||
let mut lexer = crate::syn::v2::lexer::Lexer::new(r#" d"2020-01-01T00:00:00Z" "#.as_bytes());
|
||||
let mut lexer = crate::syn::lexer::Lexer::new(r#" d"2020-01-01T00:00:00Z" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {} @ ", error);
|
||||
|
@ -320,7 +318,7 @@ fn date_zone_time() {
|
|||
|
||||
#[test]
|
||||
fn date_time_with_time() {
|
||||
let mut lexer = crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23T18:25:43Z" "#.as_bytes());
|
||||
let mut lexer = crate::syn::lexer::Lexer::new(r#" d"2012-04-23T18:25:43Z" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {} @ ", error);
|
||||
|
@ -341,8 +339,7 @@ fn date_time_with_time() {
|
|||
|
||||
#[test]
|
||||
fn date_time_nanos() {
|
||||
let mut lexer =
|
||||
crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.5631Z" "#.as_bytes());
|
||||
let mut lexer = crate::syn::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.5631Z" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {} @ ", error);
|
||||
|
@ -366,7 +363,7 @@ fn date_time_nanos() {
|
|||
#[test]
|
||||
fn date_time_timezone_utc() {
|
||||
let mut lexer =
|
||||
crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.0000511Z" "#.as_bytes());
|
||||
crate::syn::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.0000511Z" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {}", error);
|
||||
|
@ -390,7 +387,7 @@ fn date_time_timezone_utc() {
|
|||
#[test]
|
||||
fn date_time_timezone_pacific() {
|
||||
let mut lexer =
|
||||
crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.511-08:00" "#.as_bytes());
|
||||
crate::syn::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.511-08:00" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {}", error);
|
||||
|
@ -414,7 +411,7 @@ fn date_time_timezone_pacific() {
|
|||
#[test]
|
||||
fn date_time_timezone_pacific_partial() {
|
||||
let mut lexer =
|
||||
crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.511+08:30" "#.as_bytes());
|
||||
crate::syn::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.511+08:30" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {}", error);
|
||||
|
@ -438,7 +435,7 @@ fn date_time_timezone_pacific_partial() {
|
|||
#[test]
|
||||
fn date_time_timezone_utc_nanoseconds() {
|
||||
let mut lexer =
|
||||
crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.5110000Z" "#.as_bytes());
|
||||
crate::syn::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.5110000Z" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {}", error);
|
||||
|
@ -462,7 +459,7 @@ fn date_time_timezone_utc_nanoseconds() {
|
|||
#[test]
|
||||
fn date_time_timezone_utc_sub_nanoseconds() {
|
||||
let mut lexer =
|
||||
crate::syn::v2::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.0000511Z" "#.as_bytes());
|
||||
crate::syn::lexer::Lexer::new(r#" d"2012-04-23T18:25:43.0000511Z" "#.as_bytes());
|
||||
let token = lexer.next_token();
|
||||
if let Some(error) = lexer.error {
|
||||
println!("ERROR: {}", error);
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
sql::Uuid,
|
||||
syn::v2::token::{Token, TokenKind},
|
||||
syn::token::{Token, TokenKind},
|
||||
};
|
||||
|
||||
use super::{Error as LexError, Lexer};
|
|
@ -1,20 +1,17 @@
|
|||
#![allow(dead_code)]
|
||||
|
||||
//! Module containing the implementation of the surrealql tokens, lexer, and parser.
|
||||
|
||||
use crate::{
|
||||
err::Error,
|
||||
sql::{Datetime, Duration, Idiom, Query, Range, Subquery, Thing, Value},
|
||||
};
|
||||
|
||||
pub mod common;
|
||||
pub mod error;
|
||||
|
||||
#[cfg(not(feature = "experimental-parser"))]
|
||||
pub mod v1;
|
||||
#[cfg(not(feature = "experimental-parser"))]
|
||||
pub use v1::{datetime_raw, duration, idiom, json, parse, range, subquery, thing, value};
|
||||
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
pub mod v2;
|
||||
#[cfg(feature = "experimental-parser")]
|
||||
pub use v2::{
|
||||
datetime_raw, duration, idiom, json, json_legacy_strand, parse, range, subquery, thing, value,
|
||||
value_legacy_strand,
|
||||
};
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
||||
pub mod token;
|
||||
|
||||
#[cfg(test)]
|
||||
pub trait Parse<T> {
|
||||
|
@ -22,15 +19,165 @@ pub trait Parse<T> {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::parse;
|
||||
mod test;
|
||||
|
||||
#[test]
|
||||
fn test_error_in_lineterminator() {
|
||||
let q = r#"
|
||||
select * from person
|
||||
CREATE person CONTENT { foo:'bar'};
|
||||
"#;
|
||||
parse(q).unwrap_err();
|
||||
}
|
||||
use lexer::Lexer;
|
||||
use parser::{ParseError, ParseErrorKind, Parser};
|
||||
use reblessive::Stack;
|
||||
|
||||
/// Takes a string and returns if it could be a reserved keyword in certain contexts.
|
||||
pub fn could_be_reserved_keyword(s: &str) -> bool {
|
||||
lexer::keywords::could_be_reserved(s)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Query`]
|
||||
///
|
||||
/// During query parsing, the total depth of calls to parse values (including arrays, expressions,
|
||||
/// functions, objects, sub-queries), Javascript values, and geometry collections count against
|
||||
/// a computation depth limit. If the limit is reached, parsing will return
|
||||
/// [`Error::ComputationDepthExceeded`], as opposed to spending more time and potentially
|
||||
/// overflowing the call stack.
|
||||
///
|
||||
/// If you encounter this limit and believe that it should be increased,
|
||||
/// please [open an issue](https://github.com/surrealdb/surrealdb/issues)!
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn parse(input: &str) -> Result<Query, Error> {
|
||||
debug!("parsing query, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_query(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Value`].
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn value(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing value, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_value_field(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Value`].
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn value_legacy_strand(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing value with legacy strings, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
parser.allow_legacy_strand(true);
|
||||
stack
|
||||
.enter(|stk| parser.parse_value(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses JSON into an inert SurrealQL [`Value`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn json(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing json, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_json(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses JSON into an inert SurrealQL [`Value`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn json_legacy_strand(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing json with legacy strings, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
parser.allow_legacy_strand(true);
|
||||
stack
|
||||
.enter(|stk| parser.parse_json(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
/// Parses a SurrealQL Subquery [`Subquery`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn subquery(input: &str) -> Result<Subquery, Error> {
|
||||
debug!("parsing subquery, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_full_subquery(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Idiom`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn idiom(input: &str) -> Result<Idiom, Error> {
|
||||
debug!("parsing idiom, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_plain_idiom(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parse a datetime without enclosing delimiters from a string.
|
||||
pub fn datetime_raw(input: &str) -> Result<Datetime, Error> {
|
||||
debug!("parsing datetime, input = {input}");
|
||||
let mut lexer = Lexer::new(input.as_bytes());
|
||||
lexer
|
||||
.lex_datetime_raw_err()
|
||||
.map_err(|e| {
|
||||
ParseError::new(
|
||||
ParseErrorKind::InvalidToken(lexer::Error::DateTime(e)),
|
||||
lexer.current_span(),
|
||||
)
|
||||
})
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parse a duration from a string.
|
||||
pub fn duration(input: &str) -> Result<Duration, Error> {
|
||||
debug!("parsing duration, input = {input}");
|
||||
let mut lexer = Lexer::new(input.as_bytes());
|
||||
lexer
|
||||
.lex_only_duration()
|
||||
.map_err(|e| ParseError::new(ParseErrorKind::InvalidToken(e), lexer.current_span()))
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parse a range
|
||||
pub fn range(input: &str) -> Result<Range, Error> {
|
||||
debug!("parsing range, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_range(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parse a record id.
|
||||
pub fn thing(input: &str) -> Result<Thing, Error> {
|
||||
debug!("parsing thing, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_thing(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::{
|
|||
sql::{
|
||||
language::Language, Datetime, Duration, Ident, Number, Param, Regex, Strand, Table, Uuid,
|
||||
},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::mac::unexpected,
|
||||
token::{t, NumberKind, Token, TokenKind},
|
||||
},
|
|
@ -1,7 +1,7 @@
|
|||
use super::{ParseResult, Parser};
|
||||
use crate::{
|
||||
sql::{Constant, Function, Ident, Value},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{mac::expected, ParseError, ParseErrorKind},
|
||||
token::{t, Span},
|
||||
},
|
|
@ -1,10 +1,8 @@
|
|||
use crate::syn::{
|
||||
common::Location,
|
||||
error::{RenderedError, Snippet},
|
||||
v2::{
|
||||
lexer::Error as LexError,
|
||||
token::{Span, TokenKind},
|
||||
},
|
||||
lexer::Error as LexError,
|
||||
token::{Span, TokenKind},
|
||||
};
|
||||
use std::{
|
||||
fmt::Write,
|
|
@ -5,7 +5,7 @@ use reblessive::Stk;
|
|||
use super::mac::unexpected;
|
||||
use super::ParseError;
|
||||
use crate::sql::{value::TryNeg, Cast, Expression, Number, Operator, Value};
|
||||
use crate::syn::v2::{
|
||||
use crate::syn::{
|
||||
parser::{mac::expected, ParseErrorKind, ParseResult, Parser},
|
||||
token::{t, NumberKind, TokenKind},
|
||||
};
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{Function, Ident, Model},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::mac::{expected, unexpected},
|
||||
token::{t, NumberKind, TokenKind},
|
||||
},
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{Dir, Edges, Field, Fields, Graph, Ident, Idiom, Part, Table, Tables, Value},
|
||||
syn::v2::token::{t, Span, TokenKind},
|
||||
syn::token::{t, Span, TokenKind},
|
||||
};
|
||||
|
||||
use super::{mac::unexpected, ParseError, ParseErrorKind, ParseResult, Parser};
|
|
@ -4,7 +4,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{Array, Ident, Object, Strand, Value},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::mac::expected,
|
||||
token::{t, Span, TokenKind},
|
||||
},
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::Kind,
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::mac::expected,
|
||||
token::{t, Keyword, Span, TokenKind},
|
||||
},
|
|
@ -2,17 +2,17 @@
|
|||
macro_rules! unexpected {
|
||||
($parser:expr, $found:expr, $expected:expr) => {
|
||||
match $found {
|
||||
$crate::syn::v2::token::TokenKind::Invalid => {
|
||||
$crate::syn::token::TokenKind::Invalid => {
|
||||
let error = $parser.lexer.error.take().unwrap();
|
||||
return Err($crate::syn::v2::parser::ParseError::new(
|
||||
$crate::syn::v2::parser::ParseErrorKind::InvalidToken(error),
|
||||
return Err($crate::syn::parser::ParseError::new(
|
||||
$crate::syn::parser::ParseErrorKind::InvalidToken(error),
|
||||
$parser.recent_span(),
|
||||
));
|
||||
}
|
||||
$crate::syn::v2::token::TokenKind::Eof => {
|
||||
$crate::syn::token::TokenKind::Eof => {
|
||||
let expected = $expected;
|
||||
return Err($crate::syn::v2::parser::ParseError::new(
|
||||
$crate::syn::v2::parser::ParseErrorKind::UnexpectedEof {
|
||||
return Err($crate::syn::parser::ParseError::new(
|
||||
$crate::syn::parser::ParseErrorKind::UnexpectedEof {
|
||||
expected,
|
||||
},
|
||||
$parser.recent_span(),
|
||||
|
@ -20,8 +20,8 @@ macro_rules! unexpected {
|
|||
}
|
||||
x => {
|
||||
let expected = $expected;
|
||||
return Err($crate::syn::v2::parser::ParseError::new(
|
||||
$crate::syn::v2::parser::ParseErrorKind::Unexpected {
|
||||
return Err($crate::syn::parser::ParseError::new(
|
||||
$crate::syn::parser::ParseErrorKind::Unexpected {
|
||||
found: x,
|
||||
expected,
|
||||
},
|
||||
|
@ -38,27 +38,27 @@ macro_rules! expected {
|
|||
let token = $parser.next();
|
||||
match token.kind {
|
||||
$($kind)* => token,
|
||||
$crate::syn::v2::parser::TokenKind::Invalid => {
|
||||
$crate::syn::parser::TokenKind::Invalid => {
|
||||
let error = $parser.lexer.error.take().unwrap();
|
||||
return Err($crate::syn::v2::parser::ParseError::new(
|
||||
$crate::syn::v2::parser::ParseErrorKind::InvalidToken(error),
|
||||
return Err($crate::syn::parser::ParseError::new(
|
||||
$crate::syn::parser::ParseErrorKind::InvalidToken(error),
|
||||
$parser.recent_span(),
|
||||
));
|
||||
}
|
||||
x => {
|
||||
let expected = $($kind)*.as_str();
|
||||
let kind = if let $crate::syn::v2::token::TokenKind::Eof = x {
|
||||
$crate::syn::v2::parser::ParseErrorKind::UnexpectedEof {
|
||||
let kind = if let $crate::syn::token::TokenKind::Eof = x {
|
||||
$crate::syn::parser::ParseErrorKind::UnexpectedEof {
|
||||
expected,
|
||||
}
|
||||
} else {
|
||||
$crate::syn::v2::parser::ParseErrorKind::Unexpected {
|
||||
$crate::syn::parser::ParseErrorKind::Unexpected {
|
||||
found: x,
|
||||
expected,
|
||||
}
|
||||
};
|
||||
|
||||
return Err($crate::syn::v2::parser::ParseError::new(kind, $parser.last_span()));
|
||||
return Err($crate::syn::parser::ParseError::new(kind, $parser.last_span()));
|
||||
}
|
||||
}
|
||||
}};
|
||||
|
@ -68,7 +68,7 @@ macro_rules! expected {
|
|||
#[macro_export]
|
||||
macro_rules! test_parse {
|
||||
($func:ident$( ( $($e:expr),* $(,)? ))? , $t:literal) => {{
|
||||
let mut parser = $crate::syn::v2::parser::Parser::new($t.as_bytes());
|
||||
let mut parser = $crate::syn::parser::Parser::new($t.as_bytes());
|
||||
let mut stack = reblessive::Stack::new();
|
||||
stack.enter(|ctx| parser.$func(ctx,$($($e),*)*)).finish()
|
||||
}};
|
||||
|
@ -78,19 +78,19 @@ macro_rules! test_parse {
|
|||
macro_rules! enter_object_recursion {
|
||||
($name:ident = $this:expr => { $($t:tt)* }) => {{
|
||||
if $this.object_recursion == 0 {
|
||||
return Err($crate::syn::v2::parser::ParseError::new(
|
||||
$crate::syn::v2::parser::ParseErrorKind::ExceededObjectDepthLimit,
|
||||
return Err($crate::syn::parser::ParseError::new(
|
||||
$crate::syn::parser::ParseErrorKind::ExceededObjectDepthLimit,
|
||||
$this.last_span(),
|
||||
));
|
||||
}
|
||||
struct Dropper<'a, 'b>(&'a mut $crate::syn::v2::parser::Parser<'b>);
|
||||
struct Dropper<'a, 'b>(&'a mut $crate::syn::parser::Parser<'b>);
|
||||
impl Drop for Dropper<'_, '_> {
|
||||
fn drop(&mut self) {
|
||||
self.0.object_recursion += 1;
|
||||
}
|
||||
}
|
||||
impl<'a> ::std::ops::Deref for Dropper<'_,'a>{
|
||||
type Target = $crate::syn::v2::parser::Parser<'a>;
|
||||
type Target = $crate::syn::parser::Parser<'a>;
|
||||
|
||||
fn deref(&self) -> &Self::Target{
|
||||
self.0
|
||||
|
@ -115,19 +115,19 @@ macro_rules! enter_object_recursion {
|
|||
macro_rules! enter_query_recursion {
|
||||
($name:ident = $this:expr => { $($t:tt)* }) => {{
|
||||
if $this.query_recursion == 0 {
|
||||
return Err($crate::syn::v2::parser::ParseError::new(
|
||||
$crate::syn::v2::parser::ParseErrorKind::ExceededQueryDepthLimit,
|
||||
return Err($crate::syn::parser::ParseError::new(
|
||||
$crate::syn::parser::ParseErrorKind::ExceededQueryDepthLimit,
|
||||
$this.last_span(),
|
||||
));
|
||||
}
|
||||
struct Dropper<'a, 'b>(&'a mut $crate::syn::v2::parser::Parser<'b>);
|
||||
struct Dropper<'a, 'b>(&'a mut $crate::syn::parser::Parser<'b>);
|
||||
impl Drop for Dropper<'_, '_> {
|
||||
fn drop(&mut self) {
|
||||
self.0.query_recursion += 1;
|
||||
}
|
||||
}
|
||||
impl<'a> ::std::ops::Deref for Dropper<'_,'a>{
|
||||
type Target = $crate::syn::v2::parser::Parser<'a>;
|
||||
type Target = $crate::syn::parser::Parser<'a>;
|
||||
|
||||
fn deref(&self) -> &Self::Target{
|
||||
self.0
|
|
@ -19,7 +19,7 @@
|
|||
use self::token_buffer::TokenBuffer;
|
||||
use crate::{
|
||||
sql,
|
||||
syn::v2::{
|
||||
syn::{
|
||||
lexer::{Error as LexError, Lexer},
|
||||
token::{t, Span, Token, TokenKind},
|
||||
},
|
|
@ -6,7 +6,7 @@ use reblessive::Stk;
|
|||
use crate::{
|
||||
enter_object_recursion,
|
||||
sql::{Block, Geometry, Object, Strand, Value},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{mac::expected, ParseError, ParseErrorKind, ParseResult, Parser},
|
||||
token::{t, Span, TokenKind},
|
||||
},
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
Array, Dir, Function, Geometry, Ident, Idiom, Mock, Part, Script, Strand, Subquery, Table,
|
||||
Value,
|
||||
},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
lexer::Lexer,
|
||||
parser::{
|
||||
mac::{expected, unexpected},
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{statements::CreateStatement, Values},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{ParseResult, Parser},
|
||||
token::t,
|
||||
},
|
|
@ -14,7 +14,7 @@ use crate::{
|
|||
tokenizer::Tokenizer,
|
||||
Ident, Idioms, Index, Kind, Param, Permissions, Scoring, Strand, TableType, Values,
|
||||
},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{
|
||||
mac::{expected, unexpected},
|
||||
ParseResult, Parser,
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{statements::DeleteStatement, Values},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{ParseResult, Parser},
|
||||
token::t,
|
||||
},
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::statements::IfelseStatement,
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{
|
||||
mac::{expected, unexpected},
|
||||
ParseResult, Parser,
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{statements::InsertStatement, Data, Value},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{mac::expected, ParseResult, Parser},
|
||||
token::t,
|
||||
},
|
|
@ -8,8 +8,8 @@ use crate::sql::statements::{
|
|||
KillStatement, LiveStatement, OptionStatement, SetStatement, ThrowStatement,
|
||||
};
|
||||
use crate::sql::{Fields, Ident, Param};
|
||||
use crate::syn::v2::parser::{ParseError, ParseErrorKind};
|
||||
use crate::syn::v2::token::{t, TokenKind};
|
||||
use crate::syn::parser::{ParseError, ParseErrorKind};
|
||||
use crate::syn::token::{t, TokenKind};
|
||||
use crate::{
|
||||
sql::{
|
||||
statements::{
|
||||
|
@ -19,7 +19,7 @@ use crate::{
|
|||
},
|
||||
Expression, Operator, Statement, Statements, Value,
|
||||
},
|
||||
syn::v2::parser::mac::unexpected,
|
||||
syn::parser::mac::unexpected,
|
||||
};
|
||||
|
||||
use super::{mac::expected, ParseResult, Parser};
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
Cond, Data, Duration, Fetch, Fetchs, Field, Fields, Group, Groups, Ident, Idiom, Output,
|
||||
Permission, Permissions, Tables, Timeout, Value, View,
|
||||
},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{
|
||||
error::MissingKind,
|
||||
mac::{expected, unexpected},
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{statements::RelateStatement, Subquery, Value},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{
|
||||
mac::{expected, unexpected},
|
||||
ParseResult, Parser,
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
},
|
||||
Param,
|
||||
},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{
|
||||
mac::{expected, unexpected},
|
||||
ParseResult, Parser,
|
|
@ -5,7 +5,7 @@ use crate::{
|
|||
statements::SelectStatement, Explain, Field, Fields, Ident, Idioms, Limit, Order, Orders,
|
||||
Split, Splits, Start, Values, Version, With,
|
||||
},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{
|
||||
error::MissingKind,
|
||||
mac::{expected, unexpected},
|
|
@ -2,7 +2,7 @@ use reblessive::Stk;
|
|||
|
||||
use crate::{
|
||||
sql::{statements::UpdateStatement, Values},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{ParseResult, Parser},
|
||||
token::t,
|
||||
},
|
|
@ -1,6 +1,6 @@
|
|||
use reblessive::Stack;
|
||||
|
||||
use crate::syn::v2::parser::Parser;
|
||||
use crate::syn::parser::Parser;
|
||||
|
||||
#[test]
|
||||
fn object_depth() {
|
|
@ -27,7 +27,7 @@ use crate::{
|
|||
Permission, Permissions, Scoring, Split, Splits, Start, Statement, Strand, Subquery, Table,
|
||||
TableType, Tables, Thing, Timeout, Uuid, Value, Values, Version, With,
|
||||
},
|
||||
syn::v2::parser::mac::test_parse,
|
||||
syn::parser::mac::test_parse,
|
||||
};
|
||||
use chrono::{offset::TimeZone, NaiveDate, Offset, Utc};
|
||||
|
|
@ -23,7 +23,7 @@ use crate::{
|
|||
Permission, Permissions, Scoring, Split, Splits, Start, Statement, Strand, Subquery, Table,
|
||||
TableType, Tables, Thing, Timeout, Uuid, Value, Values, Version, With,
|
||||
},
|
||||
syn::v2::parser::{Parser, PartialResult},
|
||||
syn::parser::{Parser, PartialResult},
|
||||
};
|
||||
use chrono::{offset::TimeZone, NaiveDate, Offset, Utc};
|
||||
use reblessive::Stack;
|
|
@ -6,7 +6,7 @@ use crate::{
|
|||
sql::{
|
||||
Array, Constant, Id, Number, Object, Query, Statement, Statements, Strand, Thing, Value,
|
||||
},
|
||||
syn::v2::parser::{mac::test_parse, Parser},
|
||||
syn::parser::{mac::test_parse, Parser},
|
||||
};
|
||||
|
||||
#[test]
|
|
@ -3,7 +3,7 @@ use reblessive::Stk;
|
|||
use super::{ParseResult, Parser};
|
||||
use crate::{
|
||||
sql::{id::Gen, Id, Ident, Range, Thing, Value},
|
||||
syn::v2::{
|
||||
syn::{
|
||||
parser::{
|
||||
mac::{expected, unexpected},
|
||||
ParseError, ParseErrorKind,
|
|
@ -1,4 +1,4 @@
|
|||
use crate::syn::v2::token::Token;
|
||||
use crate::syn::token::Token;
|
||||
|
||||
#[non_exhaustive]
|
||||
pub struct TokenBuffer<const S: usize> {
|
|
@ -1,10 +1,11 @@
|
|||
use reblessive::Stack;
|
||||
|
||||
use super::super::Parse;
|
||||
use super::lexer::Lexer;
|
||||
use super::parse;
|
||||
use super::parser::Parser;
|
||||
use super::Parse;
|
||||
use crate::sql::{Array, Expression, Ident, Idiom, Param, Script, Thing, Value};
|
||||
use crate::syn::v2::token::{t, TokenKind};
|
||||
use crate::syn::token::{t, TokenKind};
|
||||
|
||||
impl Parse<Self> for Value {
|
||||
fn parse(val: &str) -> Self {
|
||||
|
@ -60,3 +61,12 @@ impl Parse<Self> for Expression {
|
|||
panic!("not an expression");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_in_lineterminator() {
|
||||
let q = r#"
|
||||
select * from person
|
||||
CREATE person CONTENT { foo:'bar'};
|
||||
"#;
|
||||
parse(q).unwrap_err();
|
||||
}
|
|
@ -17,7 +17,7 @@ macro_rules! keyword {
|
|||
|
||||
macro_rules! keyword_t {
|
||||
$(($value) => {
|
||||
$crate::syn::v2::token::Keyword::$name
|
||||
$crate::syn::token::Keyword::$name
|
||||
};)*
|
||||
}
|
||||
};
|
313
core/src/syn/token/mac.rs
Normal file
313
core/src/syn/token/mac.rs
Normal file
|
@ -0,0 +1,313 @@
|
|||
/// A shorthand for token kinds.
|
||||
macro_rules! t {
|
||||
("invalid") => {
|
||||
$crate::syn::token::TokenKind::Invalid
|
||||
};
|
||||
("eof") => {
|
||||
$crate::syn::token::TokenKind::Eof
|
||||
};
|
||||
("[") => {
|
||||
$crate::syn::token::TokenKind::OpenDelim($crate::syn::token::Delim::Bracket)
|
||||
};
|
||||
("{") => {
|
||||
$crate::syn::token::TokenKind::OpenDelim($crate::syn::token::Delim::Brace)
|
||||
};
|
||||
("(") => {
|
||||
$crate::syn::token::TokenKind::OpenDelim($crate::syn::token::Delim::Paren)
|
||||
};
|
||||
("]") => {
|
||||
$crate::syn::token::TokenKind::CloseDelim($crate::syn::token::Delim::Bracket)
|
||||
};
|
||||
("}") => {
|
||||
$crate::syn::token::TokenKind::CloseDelim($crate::syn::token::Delim::Brace)
|
||||
};
|
||||
(")") => {
|
||||
$crate::syn::token::TokenKind::CloseDelim($crate::syn::token::Delim::Paren)
|
||||
};
|
||||
|
||||
("r\"") => {
|
||||
$crate::syn::token::TokenKind::OpenRecordString {
|
||||
double: true,
|
||||
}
|
||||
};
|
||||
("r'") => {
|
||||
$crate::syn::token::TokenKind::OpenRecordString {
|
||||
double: false,
|
||||
}
|
||||
};
|
||||
|
||||
("\"r") => {
|
||||
$crate::syn::token::TokenKind::CloseRecordString {
|
||||
double: true,
|
||||
}
|
||||
};
|
||||
("'r") => {
|
||||
$crate::syn::token::TokenKind::CloseRecordString {
|
||||
double: false,
|
||||
}
|
||||
};
|
||||
|
||||
("<") => {
|
||||
$crate::syn::token::TokenKind::LeftChefron
|
||||
};
|
||||
(">") => {
|
||||
$crate::syn::token::TokenKind::RightChefron
|
||||
};
|
||||
("<|") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::KnnOpen)
|
||||
};
|
||||
("|>") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::KnnClose)
|
||||
};
|
||||
|
||||
(";") => {
|
||||
$crate::syn::token::TokenKind::SemiColon
|
||||
};
|
||||
(",") => {
|
||||
$crate::syn::token::TokenKind::Comma
|
||||
};
|
||||
("|") => {
|
||||
$crate::syn::token::TokenKind::Vert
|
||||
};
|
||||
("...") => {
|
||||
$crate::syn::token::TokenKind::DotDotDot
|
||||
};
|
||||
("..") => {
|
||||
$crate::syn::token::TokenKind::DotDot
|
||||
};
|
||||
(".") => {
|
||||
$crate::syn::token::TokenKind::Dot
|
||||
};
|
||||
("::") => {
|
||||
$crate::syn::token::TokenKind::PathSeperator
|
||||
};
|
||||
(":") => {
|
||||
$crate::syn::token::TokenKind::Colon
|
||||
};
|
||||
("<-") => {
|
||||
$crate::syn::token::TokenKind::ArrowLeft
|
||||
};
|
||||
("<->") => {
|
||||
$crate::syn::token::TokenKind::BiArrow
|
||||
};
|
||||
("->") => {
|
||||
$crate::syn::token::TokenKind::ArrowRight
|
||||
};
|
||||
|
||||
("*") => {
|
||||
$crate::syn::token::TokenKind::Star
|
||||
};
|
||||
("$") => {
|
||||
$crate::syn::token::TokenKind::Dollar
|
||||
};
|
||||
|
||||
("+") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Add)
|
||||
};
|
||||
("-") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Subtract)
|
||||
};
|
||||
("**") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Power)
|
||||
};
|
||||
("*=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AllEqual)
|
||||
};
|
||||
("*~") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AllLike)
|
||||
};
|
||||
("/") => {
|
||||
$crate::syn::token::TokenKind::ForwardSlash
|
||||
};
|
||||
("<=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::LessEqual)
|
||||
};
|
||||
(">=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::GreaterEqual)
|
||||
};
|
||||
("@") => {
|
||||
$crate::syn::token::TokenKind::At
|
||||
};
|
||||
("||") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Or)
|
||||
};
|
||||
("&&") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::And)
|
||||
};
|
||||
("×") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Mult)
|
||||
};
|
||||
("÷") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Divide)
|
||||
};
|
||||
|
||||
("$param") => {
|
||||
$crate::syn::token::TokenKind::Parameter
|
||||
};
|
||||
("123") => {
|
||||
$crate::syn::token::TokenKind::Number(_)
|
||||
};
|
||||
|
||||
("!") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Not)
|
||||
};
|
||||
("!~") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::NotLike)
|
||||
};
|
||||
("!=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::NotEqual)
|
||||
};
|
||||
|
||||
("?") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Like)
|
||||
};
|
||||
("?:") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Tco)
|
||||
};
|
||||
("??") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Nco)
|
||||
};
|
||||
("==") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Exact)
|
||||
};
|
||||
("!=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::NotEqual)
|
||||
};
|
||||
("*=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AllEqual)
|
||||
};
|
||||
("?=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AnyEqual)
|
||||
};
|
||||
("=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Equal)
|
||||
};
|
||||
("!~") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::NotLike)
|
||||
};
|
||||
("*~") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AllLike)
|
||||
};
|
||||
("?~") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AnyLike)
|
||||
};
|
||||
("~") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Like)
|
||||
};
|
||||
("+?=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Ext)
|
||||
};
|
||||
("+=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Inc)
|
||||
};
|
||||
("-=") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Dec)
|
||||
};
|
||||
|
||||
("∋") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Contains)
|
||||
};
|
||||
("∌") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::NotContains)
|
||||
};
|
||||
("∈") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::Inside)
|
||||
};
|
||||
("∉") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::NotInside)
|
||||
};
|
||||
("⊇") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::ContainsAll)
|
||||
};
|
||||
("⊃") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::ContainsAny)
|
||||
};
|
||||
("⊅") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::ContainsNone)
|
||||
};
|
||||
("⊆") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AllInside)
|
||||
};
|
||||
("⊂") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::AnyInside)
|
||||
};
|
||||
("⊄") => {
|
||||
$crate::syn::token::TokenKind::Operator($crate::syn::token::Operator::NoneInside)
|
||||
};
|
||||
|
||||
// algorithms
|
||||
("EDDSA") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::EdDSA)
|
||||
};
|
||||
("ES256") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Es256)
|
||||
};
|
||||
("ES384") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Es384)
|
||||
};
|
||||
("ES512") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Es512)
|
||||
};
|
||||
("HS256") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Hs256)
|
||||
};
|
||||
("HS384") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Hs384)
|
||||
};
|
||||
("HS512") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Hs512)
|
||||
};
|
||||
("PS256") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Ps256)
|
||||
};
|
||||
("PS384") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Ps384)
|
||||
};
|
||||
("PS512") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Ps512)
|
||||
};
|
||||
("RS256") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Rs256)
|
||||
};
|
||||
("RS384") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Rs384)
|
||||
};
|
||||
("RS512") => {
|
||||
$crate::syn::token::TokenKind::Algorithm($crate::sql::Algorithm::Rs512)
|
||||
};
|
||||
|
||||
// Distance
|
||||
("CHEBYSHEV") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Chebyshev)
|
||||
};
|
||||
("COSINE") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Cosine)
|
||||
};
|
||||
("EUCLIDEAN") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Euclidean)
|
||||
};
|
||||
("HAMMING") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Hamming)
|
||||
};
|
||||
("JACCARD") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Jaccard)
|
||||
};
|
||||
("MANHATTAN") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Manhattan)
|
||||
};
|
||||
("MAHALANOBIS") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Mahalanobis)
|
||||
};
|
||||
("MINKOWSKI") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Minkowski)
|
||||
};
|
||||
("PEARSON") => {
|
||||
$crate::syn::token::TokenKind::Distance($crate::syn::token::DistanceKind::Pearson)
|
||||
};
|
||||
|
||||
($t:tt) => {
|
||||
$crate::syn::token::TokenKind::Keyword($crate::syn::token::keyword_t!($t))
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use t;
|
|
@ -1,105 +0,0 @@
|
|||
use super::{
|
||||
comment::mightbespace,
|
||||
common::{closebraces, colons, expect_delimited, openbraces},
|
||||
stmt::{
|
||||
create, define, delete, foreach, ifelse, insert, output, r#break, r#continue, relate,
|
||||
remove, select, set, throw, update,
|
||||
},
|
||||
value::value,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{block::Entry, Block};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
combinator::map,
|
||||
multi::{many0, separated_list0},
|
||||
sequence::delimited,
|
||||
};
|
||||
|
||||
pub fn block(i: &str) -> IResult<&str, Block> {
|
||||
expect_delimited(
|
||||
openbraces,
|
||||
|i| {
|
||||
let (i, v) = separated_list0(colons, entry)(i)?;
|
||||
let (i, _) = many0(colons)(i)?;
|
||||
Ok((i, Block(v)))
|
||||
},
|
||||
closebraces,
|
||||
)(i)
|
||||
}
|
||||
|
||||
pub fn entry(i: &str) -> IResult<&str, Entry> {
|
||||
delimited(
|
||||
mightbespace,
|
||||
alt((
|
||||
map(set, Entry::Set),
|
||||
map(output, Entry::Output),
|
||||
map(ifelse, Entry::Ifelse),
|
||||
map(select, Entry::Select),
|
||||
map(create, Entry::Create),
|
||||
map(update, Entry::Update),
|
||||
map(relate, Entry::Relate),
|
||||
map(delete, Entry::Delete),
|
||||
map(insert, Entry::Insert),
|
||||
map(define, Entry::Define),
|
||||
map(remove, Entry::Remove),
|
||||
map(throw, Entry::Throw),
|
||||
map(r#break, Entry::Break),
|
||||
map(r#continue, Entry::Continue),
|
||||
map(foreach, Entry::Foreach),
|
||||
map(value, Entry::Value),
|
||||
)),
|
||||
mightbespace,
|
||||
)(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn block_empty() {
|
||||
let sql = "{}";
|
||||
let res = block(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn block_value() {
|
||||
let sql = "{ 80 }";
|
||||
let res = block(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn block_ifelse() {
|
||||
let sql = "{ RETURN IF true THEN 50 ELSE 40 END; }";
|
||||
let res = block(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn block_multiple() {
|
||||
let sql = r#"{
|
||||
|
||||
LET $person = (SELECT * FROM person WHERE first = $first AND last = $last AND birthday = $birthday);
|
||||
|
||||
RETURN IF $person[0].id THEN
|
||||
$person[0]
|
||||
ELSE
|
||||
(CREATE person SET first = $first, last = $last, birthday = $birthday)
|
||||
END;
|
||||
|
||||
}"#;
|
||||
let res = block(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{:#}", out))
|
||||
}
|
||||
}
|
|
@ -1,562 +0,0 @@
|
|||
use super::{literal::ident_raw, ParseError};
|
||||
use crate::sql::constant;
|
||||
use nom::{
|
||||
bytes::complete::{tag, tag_no_case},
|
||||
combinator::{opt, peek, value},
|
||||
Err, IResult,
|
||||
};
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum BuiltinName<I> {
|
||||
Function(I),
|
||||
Constant(constant::Constant),
|
||||
}
|
||||
|
||||
/// A macro to generate a parser which is able to parse all the different functions, returning an
|
||||
/// error of the function does not exists.
|
||||
macro_rules! impl_builtins {
|
||||
($($name:ident$( ( $s:ident ) )? $(= $rename:expr)? => { $($t:tt)* }),*$(,)?) => {
|
||||
fn _parse_builtin_name(i: &str) -> IResult<&str, BuiltinName<&str>, ParseError<&str>> {
|
||||
$(
|
||||
impl_builtins!{
|
||||
@variant,
|
||||
impl_builtins!(@rename, $name, $($rename)?),
|
||||
$name,
|
||||
$($s)?,
|
||||
$($rename)?,
|
||||
{ $($t)* }
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let (i, Some(x)) = opt($name)(i)?{
|
||||
return Ok((i,x))
|
||||
}
|
||||
)*
|
||||
Err(Err::Error(ParseError::Base(i)))
|
||||
}
|
||||
};
|
||||
|
||||
(@variant, $full:expr, $name:ident, $($s:ident)?,$($rename:expr)?, { fn }) => {
|
||||
fn $name(i: &str) -> IResult<&str, BuiltinName<&str>, ParseError<&str>>{
|
||||
let parser = tag_no_case(impl_builtins!(@rename,$name,$($rename)?));
|
||||
let res = value(BuiltinName::Function($full),parser)(i)?;
|
||||
Ok(res)
|
||||
}
|
||||
};
|
||||
(@variant, $full:expr, $name:ident,$($s:ident)?,$($rename:expr)?, { const = $value:expr}) => {
|
||||
#[allow(non_snake_case)]
|
||||
fn $name(i: &str) -> IResult<&str, BuiltinName<&str>, ParseError<&str>>{
|
||||
let parser = tag_no_case(impl_builtins!(@rename,$name,$($rename)?));
|
||||
let res = value(BuiltinName::Constant($value),parser)(i)?;
|
||||
Ok(res)
|
||||
}
|
||||
};
|
||||
(@variant, $full:expr, $name:ident,$($s:ident)*,$($rename:expr)?, { $($t:tt)* }) => {
|
||||
fn $name(i: &str) -> IResult<&str, BuiltinName<&str>, ParseError<&str>>{
|
||||
let (i,_) = tag_no_case(impl_builtins!(@rename,$name,$($rename)?))(i)?;
|
||||
let (i,_) = impl_builtins!(@sep, i,$full, $($s)*);
|
||||
|
||||
let (i,_) = impl_builtins!{@block,i, $full, { $($t)* }};
|
||||
|
||||
if let Ok((i, Some(_))) = peek(opt(ident_raw))(i){
|
||||
Err(Err::Failure(ParseError::InvalidPath{
|
||||
tried: i,
|
||||
parent: $full
|
||||
}))
|
||||
}else{
|
||||
Err(Err::Failure(ParseError::Expected{
|
||||
tried: i,
|
||||
expected: "a identifier"
|
||||
}))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(@block, $i:ident, $full:expr, { $($name:ident $(($s:ident))? $(= $rename:expr)? => { $($t:tt)* }),* $(,)? }) => {
|
||||
{
|
||||
$(
|
||||
impl_builtins!{@variant,
|
||||
concat!($full,"::",impl_builtins!(@rename, $name, $($rename)?)),
|
||||
$name,
|
||||
$($s)?,
|
||||
$($rename)?,
|
||||
{ $($t) * }
|
||||
}
|
||||
)*
|
||||
|
||||
$(
|
||||
match $name($i){
|
||||
Ok((i,x)) => return Ok((i,x)),
|
||||
Err(Err::Failure(x)) => return Err(Err::Failure(x)),
|
||||
_ => {}
|
||||
}
|
||||
)*
|
||||
($i,())
|
||||
}
|
||||
};
|
||||
|
||||
(@sep, $input:expr, $full:expr, func) => {
|
||||
match tag::<_,_,ParseError<&str>>("::")($input) {
|
||||
Ok(x) => x,
|
||||
Err(_) => {
|
||||
return Ok(($input, BuiltinName::Function($full)))
|
||||
}
|
||||
}
|
||||
};
|
||||
(@sep, $input:expr, $full:expr, cons) => {
|
||||
match tag::<_,_,ParseError<&str>>("::")($input) {
|
||||
Ok(x) => x,
|
||||
Err(_) => {
|
||||
return Ok(($input, BuiltinName::Constant($full)))
|
||||
}
|
||||
}
|
||||
};
|
||||
(@sep, $input:expr,$full:expr, ) => {{
|
||||
match tag::<_,_,ParseError<&str>>("::")($input) {
|
||||
Ok(x) => x,
|
||||
Err(_) => {
|
||||
return Err(Err::Error(ParseError::Expected{
|
||||
tried: $input,
|
||||
expected: "a path separator `::`"
|
||||
}))
|
||||
}
|
||||
}
|
||||
}};
|
||||
|
||||
(@rename, $name:ident, $rename:expr) => {
|
||||
$rename
|
||||
};
|
||||
|
||||
(@rename, $name:ident,) => {
|
||||
stringify!($name)
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) fn builtin_name(i: &str) -> IResult<&str, BuiltinName<&str>, ParseError<&str>> {
|
||||
impl_builtins! {
|
||||
array => {
|
||||
add => { fn },
|
||||
all => { fn },
|
||||
any => { fn },
|
||||
append => { fn },
|
||||
at => { fn },
|
||||
boolean_and => { fn },
|
||||
boolean_not => { fn },
|
||||
boolean_or => { fn },
|
||||
boolean_xor => { fn },
|
||||
clump => { fn },
|
||||
combine => { fn },
|
||||
complement => { fn },
|
||||
concat => { fn },
|
||||
difference => { fn },
|
||||
distinct => { fn },
|
||||
filter_index => { fn },
|
||||
find_index => { fn },
|
||||
first => { fn },
|
||||
flatten => { fn },
|
||||
group => { fn },
|
||||
insert => { fn },
|
||||
intersect=> { fn },
|
||||
join => { fn },
|
||||
last=> { fn },
|
||||
len => { fn },
|
||||
logical_and => { fn },
|
||||
logical_or => { fn },
|
||||
logical_xor => { fn },
|
||||
matches => { fn },
|
||||
max => { fn },
|
||||
min => { fn },
|
||||
pop => { fn },
|
||||
prepend => { fn },
|
||||
push => { fn },
|
||||
remove => { fn },
|
||||
reverse => { fn },
|
||||
slice => { fn },
|
||||
// says that sort is also itself a function
|
||||
sort(func) => {
|
||||
asc => {fn },
|
||||
desc => {fn },
|
||||
},
|
||||
transpose => { fn },
|
||||
r#union = "union" => { fn },
|
||||
},
|
||||
bytes => {
|
||||
len => { fn }
|
||||
},
|
||||
crypto => {
|
||||
argon2 => {
|
||||
compare => { fn },
|
||||
generate => { fn }
|
||||
},
|
||||
bcrypt => {
|
||||
compare => { fn },
|
||||
generate => { fn }
|
||||
},
|
||||
pbkdf2 => {
|
||||
compare => { fn },
|
||||
generate => { fn }
|
||||
},
|
||||
scrypt => {
|
||||
compare => { fn },
|
||||
generate => { fn }
|
||||
},
|
||||
md5 => { fn },
|
||||
sha1 => { fn },
|
||||
sha256 => { fn },
|
||||
sha512 => { fn }
|
||||
},
|
||||
duration => {
|
||||
days => { fn },
|
||||
hours => { fn },
|
||||
micros => { fn },
|
||||
millis => { fn },
|
||||
mins => { fn },
|
||||
nanos => { fn },
|
||||
secs => { fn },
|
||||
weeks => { fn },
|
||||
years => { fn },
|
||||
from => {
|
||||
days => { fn },
|
||||
hours => { fn },
|
||||
micros => { fn },
|
||||
millis => { fn },
|
||||
mins => { fn },
|
||||
nanos => { fn },
|
||||
secs => { fn },
|
||||
weeks => { fn },
|
||||
},
|
||||
},
|
||||
encoding => {
|
||||
base64 => {
|
||||
decode => { fn },
|
||||
encode => { fn },
|
||||
}
|
||||
},
|
||||
geo => {
|
||||
area => { fn },
|
||||
bearing => { fn },
|
||||
centroid => { fn },
|
||||
distance => { fn },
|
||||
hash => {
|
||||
decode => { fn },
|
||||
encode => { fn },
|
||||
},
|
||||
},
|
||||
http => {
|
||||
head => { fn },
|
||||
get => { fn },
|
||||
put => { fn },
|
||||
post => { fn },
|
||||
patch => { fn },
|
||||
delete => { fn },
|
||||
},
|
||||
math => {
|
||||
abs => { fn },
|
||||
bottom => { fn },
|
||||
ceil => { fn },
|
||||
fixed => { fn },
|
||||
floor => { fn },
|
||||
interquartile => { fn },
|
||||
max => { fn },
|
||||
mean => { fn },
|
||||
median => { fn },
|
||||
midhinge => { fn },
|
||||
min => { fn },
|
||||
mode => { fn },
|
||||
nearestrank => { fn },
|
||||
percentile => { fn },
|
||||
pow => { fn },
|
||||
product => { fn },
|
||||
round => { fn },
|
||||
spread => { fn },
|
||||
SQRT_2 => { const = constant::Constant::MathSqrt2 },
|
||||
sqrt => { fn },
|
||||
stddev => { fn },
|
||||
sum => { fn },
|
||||
top => { fn },
|
||||
trimean => { fn },
|
||||
variance => { fn },
|
||||
E => { const = constant::Constant::MathE },
|
||||
FRAC_1_PI => { const = constant::Constant::MathFrac1Pi },
|
||||
FRAC_1_SQRT_2 => { const = constant::Constant::MathFrac1Sqrt2 },
|
||||
FRAC_2_PI => { const = constant::Constant::MathFrac2Pi },
|
||||
FRAC_2_SQRT_PI => { const = constant::Constant::MathFrac2SqrtPi },
|
||||
FRAC_PI_2 => { const = constant::Constant::MathFracPi2 },
|
||||
FRAC_PI_3 => { const = constant::Constant::MathFracPi3 },
|
||||
FRAC_PI_4 => { const = constant::Constant::MathFracPi4 },
|
||||
FRAC_PI_6 => { const = constant::Constant::MathFracPi6 },
|
||||
FRAC_PI_8 => { const = constant::Constant::MathFracPi8 },
|
||||
INF => { const = constant::Constant::MathInf },
|
||||
LN_10 => { const = constant::Constant::MathLn10 },
|
||||
LN_2 => { const = constant::Constant::MathLn2 },
|
||||
LOG10_2 => { const = constant::Constant::MathLog102 },
|
||||
LOG10_E => { const = constant::Constant::MathLog10E },
|
||||
LOG2_10 => { const = constant::Constant::MathLog210 },
|
||||
LOG2_E => { const = constant::Constant::MathLog2E },
|
||||
PI => { const = constant::Constant::MathPi },
|
||||
TAU => { const = constant::Constant::MathTau },
|
||||
},
|
||||
meta => {
|
||||
id => { fn },
|
||||
table => { fn },
|
||||
tb => { fn },
|
||||
},
|
||||
object => {
|
||||
entries => { fn },
|
||||
from_entries => { fn },
|
||||
keys => { fn },
|
||||
len => { fn },
|
||||
values => { fn },
|
||||
},
|
||||
parse => {
|
||||
email => {
|
||||
host => { fn },
|
||||
user => { fn },
|
||||
},
|
||||
url => {
|
||||
domain => { fn },
|
||||
fragment => { fn },
|
||||
host => { fn },
|
||||
path => { fn },
|
||||
port => { fn },
|
||||
query => { fn },
|
||||
scheme => { fn },
|
||||
}
|
||||
},
|
||||
rand(func) => {
|
||||
r#bool = "bool" => { fn },
|
||||
r#enum = "enum" => { fn },
|
||||
float => { fn },
|
||||
guid => { fn },
|
||||
int => { fn },
|
||||
string => { fn },
|
||||
time => { fn },
|
||||
ulid => { fn },
|
||||
uuid(func) => {
|
||||
v4 => { fn },
|
||||
v7 => { fn },
|
||||
},
|
||||
},
|
||||
search => {
|
||||
analyze => { fn },
|
||||
score => { fn },
|
||||
highlight => { fn },
|
||||
offsets => { fn },
|
||||
},
|
||||
session => {
|
||||
db => { fn },
|
||||
id => { fn },
|
||||
ip => { fn },
|
||||
ns => { fn },
|
||||
origin => { fn },
|
||||
sc => { fn },
|
||||
sd => { fn },
|
||||
token => { fn },
|
||||
},
|
||||
string => {
|
||||
concat => { fn },
|
||||
contains => { fn },
|
||||
ends_with = "endsWith" => { fn },
|
||||
join => { fn },
|
||||
len => { fn },
|
||||
lowercase => { fn },
|
||||
matches => {fn},
|
||||
repeat => { fn },
|
||||
replace => { fn },
|
||||
reverse => { fn },
|
||||
slice => { fn },
|
||||
slug => { fn },
|
||||
split => { fn },
|
||||
starts_with = "startsWith" => { fn },
|
||||
trim => { fn },
|
||||
uppercase => { fn },
|
||||
words => { fn },
|
||||
distance => {
|
||||
hamming => { fn },
|
||||
levenshtein => { fn },
|
||||
},
|
||||
similarity => {
|
||||
fuzzy => { fn },
|
||||
jaro => { fn },
|
||||
smithwaterman => { fn },
|
||||
},
|
||||
is => {
|
||||
alphanum => { fn },
|
||||
alpha => { fn },
|
||||
ascii => { fn },
|
||||
datetime => { fn },
|
||||
domain => { fn },
|
||||
email => { fn },
|
||||
hexadecimal => { fn },
|
||||
latitude => { fn },
|
||||
longitude => { fn },
|
||||
numeric => { fn },
|
||||
semver => { fn },
|
||||
url => { fn },
|
||||
uuid => { fn },
|
||||
},
|
||||
semver => {
|
||||
compare => { fn },
|
||||
major => { fn },
|
||||
minor => { fn },
|
||||
patch => { fn },
|
||||
inc => {
|
||||
major => { fn },
|
||||
minor => { fn },
|
||||
patch => { fn },
|
||||
},
|
||||
set => {
|
||||
major => { fn },
|
||||
minor => { fn },
|
||||
patch => { fn },
|
||||
}
|
||||
}
|
||||
},
|
||||
time => {
|
||||
ceil => { fn },
|
||||
day => { fn },
|
||||
floor => { fn },
|
||||
format => { fn },
|
||||
group => { fn },
|
||||
hour => { fn },
|
||||
minute => { fn },
|
||||
max => { fn },
|
||||
min => { fn },
|
||||
month => { fn },
|
||||
nano => { fn },
|
||||
micros => { fn },
|
||||
millis => { fn },
|
||||
now => { fn },
|
||||
round => { fn },
|
||||
second => { fn },
|
||||
timezone => { fn },
|
||||
unix => { fn },
|
||||
wday => { fn },
|
||||
week => { fn },
|
||||
yday => { fn },
|
||||
year => { fn },
|
||||
from => {
|
||||
nanos => {fn},
|
||||
micros => {fn},
|
||||
millis => {fn},
|
||||
unix => {fn},
|
||||
secs => {fn},
|
||||
}
|
||||
},
|
||||
r#type = "type" => {
|
||||
r#bool = "bool" => { fn },
|
||||
datetime => { fn },
|
||||
decimal => { fn },
|
||||
duration => { fn },
|
||||
fields => { fn },
|
||||
field => { fn },
|
||||
float => { fn },
|
||||
int => { fn },
|
||||
number => { fn },
|
||||
point => { fn },
|
||||
string => { fn },
|
||||
table => { fn },
|
||||
thing => { fn },
|
||||
range => { fn },
|
||||
is => {
|
||||
array => { fn },
|
||||
r#bool = "bool" => { fn },
|
||||
bytes => { fn },
|
||||
collection => { fn },
|
||||
datetime => { fn },
|
||||
decimal => { fn },
|
||||
duration => { fn },
|
||||
float => { fn },
|
||||
geometry => { fn },
|
||||
int => { fn },
|
||||
line => { fn },
|
||||
none => { fn },
|
||||
null => { fn },
|
||||
multiline => { fn },
|
||||
multipoint => { fn },
|
||||
multipolygon => { fn },
|
||||
number => { fn },
|
||||
object => { fn },
|
||||
point => { fn },
|
||||
polygon => { fn },
|
||||
record => { fn },
|
||||
string => { fn },
|
||||
uuid => { fn },
|
||||
}
|
||||
},
|
||||
vector => {
|
||||
add => { fn },
|
||||
angle => { fn },
|
||||
divide => { fn },
|
||||
cross => { fn },
|
||||
dot => { fn },
|
||||
magnitude => { fn },
|
||||
multiply => { fn },
|
||||
normalize => { fn },
|
||||
project => { fn },
|
||||
subtract => { fn },
|
||||
distance => {
|
||||
chebyshev => { fn },
|
||||
euclidean => { fn },
|
||||
hamming => { fn },
|
||||
mahalanobis => { fn },
|
||||
manhattan => { fn },
|
||||
minkowski => { fn },
|
||||
},
|
||||
similarity => {
|
||||
cosine => {fn },
|
||||
jaccard => {fn },
|
||||
pearson => {fn },
|
||||
spearman => {fn },
|
||||
}
|
||||
},
|
||||
count => { fn },
|
||||
not => { fn },
|
||||
sleep => { fn },
|
||||
}
|
||||
_parse_builtin_name(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::sql::constant::Constant;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn constant_lowercase() {
|
||||
let sql = "math::pi";
|
||||
let res = builtin_name(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, BuiltinName::Constant(Constant::MathPi));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn constant_uppercase() {
|
||||
let sql = "MATH::PI";
|
||||
let res = builtin_name(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, BuiltinName::Constant(Constant::MathPi));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn constant_mixedcase() {
|
||||
let sql = "math::PI";
|
||||
let res = builtin_name(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, BuiltinName::Constant(Constant::MathPi));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn constant_sqrt_2() {
|
||||
let sql = "math::SqRt_2";
|
||||
let res = builtin_name(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, BuiltinName::Constant(Constant::MathSqrt2));
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
use super::IResult;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{tag, take_until},
|
||||
character::complete::{char, multispace0, multispace1, not_line_ending},
|
||||
multi::{many0, many1},
|
||||
};
|
||||
|
||||
pub fn mightbespace(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = many0(alt((comment, space)))(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn shouldbespace(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = many1(alt((comment, space)))(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn comment(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = multispace0(i)?;
|
||||
let (i, _) = many1(alt((block, slash, dash, hash)))(i)?;
|
||||
let (i, _) = multispace0(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn block(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = multispace0(i)?;
|
||||
let (i, _) = tag("/*")(i)?;
|
||||
let (i, _) = take_until("*/")(i)?;
|
||||
let (i, _) = tag("*/")(i)?;
|
||||
let (i, _) = multispace0(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn slash(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = multispace0(i)?;
|
||||
let (i, _) = tag("//")(i)?;
|
||||
let (i, _) = not_line_ending(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn dash(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = multispace0(i)?;
|
||||
let (i, _) = tag("--")(i)?;
|
||||
let (i, _) = not_line_ending(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn hash(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = multispace0(i)?;
|
||||
let (i, _) = char('#')(i)?;
|
||||
let (i, _) = not_line_ending(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
fn space(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = multispace1(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::sql::parse;
|
||||
|
||||
#[test]
|
||||
fn any_whitespace() {
|
||||
let sql = "USE /* white space and comment between */ NS test;";
|
||||
parse(sql).unwrap();
|
||||
}
|
||||
}
|
|
@ -1,342 +0,0 @@
|
|||
use super::{
|
||||
comment::{mightbespace, shouldbespace},
|
||||
error::ParseError,
|
||||
IResult,
|
||||
};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{take_while, take_while_m_n},
|
||||
character::complete::char,
|
||||
combinator::map_res,
|
||||
multi::many1,
|
||||
Err, InputLength, Parser,
|
||||
};
|
||||
use std::ops::RangeBounds;
|
||||
|
||||
pub fn colons(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = many1(char(';'))(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn commas(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = char(',')(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn verbar(i: &str) -> IResult<&str, ()> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = char('|')(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, ()))
|
||||
}
|
||||
|
||||
pub fn commasorspace(i: &str) -> IResult<&str, ()> {
|
||||
alt((commas, shouldbespace))(i)
|
||||
}
|
||||
|
||||
pub fn openparentheses(s: &str) -> IResult<&str, &str> {
|
||||
let (i, _) = char('(')(s)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
pub fn closeparentheses(i: &str) -> IResult<&str, &str> {
|
||||
let (s, _) = mightbespace(i)?;
|
||||
let (i, _) = char(')')(s)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
pub fn openbraces(s: &str) -> IResult<&str, &str> {
|
||||
let (i, _) = char('{')(s)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
pub fn closebraces(i: &str) -> IResult<&str, &str> {
|
||||
let (s, _) = mightbespace(i)?;
|
||||
let (i, _) = char('}')(s)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
pub fn openbracket(s: &str) -> IResult<&str, &str> {
|
||||
let (i, _) = char('[')(s)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
pub fn closebracket(i: &str) -> IResult<&str, &str> {
|
||||
let (s, _) = mightbespace(i)?;
|
||||
let (i, _) = char(']')(s)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
pub fn openchevron(s: &str) -> IResult<&str, &str> {
|
||||
let (i, _) = char('<')(s)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
pub fn closechevron(i: &str) -> IResult<&str, &str> {
|
||||
let (s, _) = mightbespace(i)?;
|
||||
let (i, _) = char('>')(s)?;
|
||||
Ok((i, s))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_hex(chr: char) -> bool {
|
||||
chr.is_ascii_hexdigit()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_digit(chr: char) -> bool {
|
||||
chr.is_ascii_digit()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn val_char(chr: char) -> bool {
|
||||
chr.is_ascii_alphanumeric() || chr == '_'
|
||||
}
|
||||
|
||||
pub fn take_u64(i: &str) -> IResult<&str, u64> {
|
||||
map_res(take_while(is_digit), |s: &str| s.parse::<u64>())(i)
|
||||
}
|
||||
|
||||
pub fn take_u32_len(i: &str) -> IResult<&str, (u32, usize)> {
|
||||
map_res(take_while(is_digit), |s: &str| s.parse::<u32>().map(|x| (x, s.len())))(i)
|
||||
}
|
||||
|
||||
pub fn take_digits(i: &str, n: usize) -> IResult<&str, u32> {
|
||||
map_res(take_while_m_n(n, n, is_digit), |s: &str| s.parse::<u32>())(i)
|
||||
}
|
||||
|
||||
pub fn take_digits_range(i: &str, n: usize, range: impl RangeBounds<u32>) -> IResult<&str, u32> {
|
||||
let (i, v) = take_while_m_n(n, n, is_digit)(i)?;
|
||||
match v.parse::<u32>() {
|
||||
Ok(v) => {
|
||||
if range.contains(&v) {
|
||||
Ok((i, v))
|
||||
} else {
|
||||
Result::Err(Err::Error(ParseError::RangeError {
|
||||
tried: i,
|
||||
lower: range.start_bound().cloned(),
|
||||
upper: range.end_bound().cloned(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
Err(error) => Result::Err(Err::Error(ParseError::ParseInt {
|
||||
tried: v,
|
||||
error,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a parser delimited by two other parsers.
|
||||
///
|
||||
/// This parser fails (not errors) if the second delimiting parser returns an error.
|
||||
pub fn expect_delimited<I, D, V, T, O, O1>(
|
||||
mut prefix: D,
|
||||
mut value: V,
|
||||
mut terminator: T,
|
||||
) -> impl FnMut(I) -> IResult<I, O, ParseError<I>>
|
||||
where
|
||||
I: Clone + InputLength,
|
||||
V: Parser<I, O, ParseError<I>>,
|
||||
D: Parser<I, I, ParseError<I>>,
|
||||
T: Parser<I, O1, ParseError<I>>,
|
||||
{
|
||||
move |i| {
|
||||
let (i, s) = prefix.parse(i)?;
|
||||
let (i, res) = value.parse(i)?;
|
||||
match terminator.parse(i) {
|
||||
Ok((i, _)) => Result::Ok((i, res)),
|
||||
Result::Err(Err::Failure(e)) | Result::Err(Err::Error(e)) => {
|
||||
Result::Err(Err::Failure(ParseError::MissingDelimiter {
|
||||
opened: s,
|
||||
tried: e.tried(),
|
||||
}))
|
||||
}
|
||||
Result::Err(Err::Incomplete(e)) => Result::Err(Err::Incomplete(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_terminator<P, I, O>(
|
||||
open_span: I,
|
||||
mut terminator: P,
|
||||
) -> impl FnMut(I) -> IResult<I, O, ParseError<I>>
|
||||
where
|
||||
I: Clone,
|
||||
P: Parser<I, O, ParseError<I>>,
|
||||
{
|
||||
move |i| match terminator.parse(i) {
|
||||
Ok((i, x)) => Ok((i, x)),
|
||||
Result::Err(Err::Failure(e)) | Result::Err(Err::Error(e)) => {
|
||||
Result::Err(Err::Failure(ParseError::MissingDelimiter {
|
||||
opened: open_span.clone(),
|
||||
tried: e.tried(),
|
||||
}))
|
||||
}
|
||||
Result::Err(Err::Incomplete(e)) => Result::Err(Err::Incomplete(e)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a delimited list with an option trailing separator in the form of:
|
||||
///
|
||||
///```text
|
||||
/// PREFIX $(PARSER)SEPARATOR* $(SEPARATOR)? TERMINATOR
|
||||
///```
|
||||
///
|
||||
/// Which parsers productions like
|
||||
/// (a,b,c,) or [a,b]
|
||||
///
|
||||
/// First parses the prefix and returns it's error if there is one.
|
||||
/// The tries to parse the terminator. If there is one the parser completes else it tries to parse
|
||||
/// the value, else it returns the parsed values.
|
||||
/// Then it tries to parse the separator, if there is one it start again trying to parse the
|
||||
/// terminator followed by a value if there is no terminator. Else it tries to parse the terminator
|
||||
/// and if there is none it returns a failure. Otherwise completes with an vec of the parsed
|
||||
/// values.
|
||||
///
|
||||
pub fn delimited_list0<I, D, S, V, T, O, O1, O2>(
|
||||
mut prefix: D,
|
||||
mut separator: S,
|
||||
mut value: V,
|
||||
mut terminator: T,
|
||||
) -> impl FnMut(I) -> IResult<I, Vec<O>, ParseError<I>>
|
||||
where
|
||||
I: Clone + InputLength,
|
||||
V: Parser<I, O, ParseError<I>>,
|
||||
D: Parser<I, I, ParseError<I>>,
|
||||
S: Parser<I, O1, ParseError<I>>,
|
||||
T: Parser<I, O2, ParseError<I>>,
|
||||
{
|
||||
move |i| {
|
||||
let (i, s) = prefix.parse(i)?;
|
||||
let mut res = Vec::new();
|
||||
let mut input = i;
|
||||
loop {
|
||||
match terminator.parse(input.clone()) {
|
||||
Err(Err::Error(_)) => {}
|
||||
Err(e) => return Err(e),
|
||||
Ok((i, _)) => {
|
||||
input = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
let (i, value) = value.parse(input)?;
|
||||
res.push(value);
|
||||
match separator.parse(i.clone()) {
|
||||
Ok((i, _)) => {
|
||||
input = i;
|
||||
}
|
||||
Err(Err::Error(_)) => match terminator.parse(i.clone()) {
|
||||
Ok((i, _)) => {
|
||||
input = i;
|
||||
break;
|
||||
}
|
||||
Result::Err(Err::Error(_)) => {
|
||||
return Err(Err::Failure(ParseError::MissingDelimiter {
|
||||
opened: s,
|
||||
tried: i,
|
||||
}))
|
||||
}
|
||||
Result::Err(e) => return Err(e),
|
||||
},
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
Ok((input, res))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a delimited list with an option trailing separator in the form of:
|
||||
///
|
||||
///```text
|
||||
/// PREFIX $(PARSER)SEPARATOR+ $(SEPARATOR)? TERMINATOR
|
||||
///```
|
||||
///
|
||||
/// Which parsers productions like
|
||||
/// (a,b,c,) or [a,b] but not empty lists
|
||||
///
|
||||
/// First parses the prefix and returns it's error if there is one.
|
||||
/// The tries to parse the terminator. If there is one the parser completes else it tries to parse
|
||||
/// the value, else it returns the parsed values.
|
||||
/// Then it tries to parse the separator, if there is one it start again trying to parse the
|
||||
/// terminator followed by a value if there is no terminator. Else it tries to parse the terminator
|
||||
/// and if there is none it returns a failure. Otherwise completes with an vec of the parsed
|
||||
/// values.
|
||||
///
|
||||
pub fn delimited_list1<I, D, S, V, T, O, O1, O2>(
|
||||
mut prefix: D,
|
||||
mut separator: S,
|
||||
mut value: V,
|
||||
mut terminator: T,
|
||||
) -> impl FnMut(I) -> IResult<I, Vec<O>, ParseError<I>>
|
||||
where
|
||||
I: Clone + InputLength,
|
||||
V: Parser<I, O, ParseError<I>>,
|
||||
D: Parser<I, I, ParseError<I>>,
|
||||
S: Parser<I, O1, ParseError<I>>,
|
||||
T: Parser<I, O2, ParseError<I>>,
|
||||
{
|
||||
move |i| {
|
||||
let (i, s) = prefix.parse(i)?;
|
||||
let mut input = i;
|
||||
let (i, v) = value.parse(input)?;
|
||||
let mut res = vec![v];
|
||||
|
||||
match separator.parse(i.clone()) {
|
||||
Ok((i, _)) => {
|
||||
input = i;
|
||||
}
|
||||
Err(Err::Error(_)) => match terminator.parse(i.clone()) {
|
||||
Ok((i, _)) => return Ok((i, res)),
|
||||
Result::Err(Err::Error(_)) => {
|
||||
return Err(Err::Failure(ParseError::MissingDelimiter {
|
||||
opened: s,
|
||||
tried: i,
|
||||
}))
|
||||
}
|
||||
Result::Err(e) => return Err(e),
|
||||
},
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
|
||||
loop {
|
||||
match terminator.parse(input.clone()) {
|
||||
Err(Err::Error(_)) => {}
|
||||
Err(e) => return Err(e),
|
||||
Ok((i, _)) => {
|
||||
input = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
let (i, v) = value.parse(input)?;
|
||||
res.push(v);
|
||||
match separator.parse(i.clone()) {
|
||||
Ok((i, _)) => {
|
||||
input = i;
|
||||
}
|
||||
Err(Err::Error(_)) => match terminator.parse(i.clone()) {
|
||||
Ok((i, _)) => {
|
||||
input = i;
|
||||
break;
|
||||
}
|
||||
Result::Err(Err::Error(_)) => {
|
||||
return Err(Err::Failure(ParseError::MissingDelimiter {
|
||||
opened: s,
|
||||
tried: i,
|
||||
}))
|
||||
}
|
||||
Result::Err(e) => return Err(e),
|
||||
},
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
Ok((input, res))
|
||||
}
|
||||
}
|
|
@ -1,380 +0,0 @@
|
|||
use super::ParseError;
|
||||
use crate::cnf::MAX_COMPUTATION_DEPTH;
|
||||
use nom::Err;
|
||||
use std::cell::Cell;
|
||||
use std::thread::panicking;
|
||||
|
||||
thread_local! {
|
||||
/// How many recursion levels deep parsing is currently.
|
||||
static DEPTH: Cell<u8> = Cell::default();
|
||||
}
|
||||
|
||||
/// Scale down `MAX_COMPUTATION_DEPTH` for parsing because:
|
||||
/// - Only a few intermediate parsers, collectively sufficient to limit depth, call dive.
|
||||
/// - Some of the depth budget during execution is for futures, graph traversal, and
|
||||
/// other operations that don't exist during parsing.
|
||||
/// - The parser currently runs in exponential time, so a lower limit guards against
|
||||
/// CPU-intensive, time-consuming parsing.
|
||||
const DEPTH_PER_DIVE: u8 = 4;
|
||||
|
||||
/// Call when starting the parser to reset the recursion depth.
|
||||
#[inline(never)]
|
||||
pub(super) fn reset() {
|
||||
DEPTH.with(|cell| {
|
||||
debug_assert_eq!(cell.get(), 0, "previous parsing stopped abruptly");
|
||||
cell.set(0)
|
||||
});
|
||||
}
|
||||
|
||||
/// Call at least once in recursive parsing code paths to limit recursion depth.
|
||||
#[inline(never)]
|
||||
#[must_use = "must store and implicitly drop when returning"]
|
||||
pub(crate) fn dive<I>(position: I) -> Result<Diving, Err<ParseError<I>>> {
|
||||
DEPTH.with(|cell| {
|
||||
let depth = cell.get().saturating_add(DEPTH_PER_DIVE);
|
||||
if depth <= *MAX_COMPUTATION_DEPTH {
|
||||
cell.replace(depth);
|
||||
Ok(Diving)
|
||||
} else {
|
||||
Err(Err::Failure(ParseError::ExcessiveDepth(position)))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[non_exhaustive]
|
||||
pub(crate) struct Diving;
|
||||
|
||||
impl Drop for Diving {
|
||||
fn drop(&mut self) {
|
||||
DEPTH.with(|cell| {
|
||||
if let Some(depth) = cell.get().checked_sub(DEPTH_PER_DIVE) {
|
||||
cell.replace(depth);
|
||||
} else {
|
||||
debug_assert!(panicking());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::super::{super::super::syn, query};
|
||||
use super::*;
|
||||
use crate::sql::{Query, Value};
|
||||
use nom::Finish;
|
||||
use serde::Serialize;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn no_ending() {
|
||||
let sql = "SELECT * FROM test";
|
||||
syn::parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_query_string() {
|
||||
let sql = "SELECT * FROM test;";
|
||||
syn::parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trim_query_string() {
|
||||
let sql = " SELECT * FROM test ; ";
|
||||
syn::parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_complex_rubbish() {
|
||||
let sql = " SELECT * FROM test ; /* shouldbespace */ ;;; ";
|
||||
syn::parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_complex_failure() {
|
||||
let sql = " SELECT * FROM { }} ";
|
||||
syn::parse(sql).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ok_recursion() {
|
||||
let sql = "SELECT * FROM ((SELECT * FROM (5))) * 5;";
|
||||
syn::parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ok_recursion_deeper() {
|
||||
let sql = "SELECT * FROM (((( SELECT * FROM ((5)) + ((5)) + ((5)) )))) * ((( function() {return 5;} )));";
|
||||
let start = Instant::now();
|
||||
syn::parse(sql).unwrap();
|
||||
let elapsed = start.elapsed();
|
||||
assert!(
|
||||
elapsed < Duration::from_millis(2000),
|
||||
"took {}ms, previously took ~1000ms in debug",
|
||||
elapsed.as_millis()
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ok_recursion_ifelse() {
|
||||
let depth = 20;
|
||||
let sql = format!("{} {}", "IF A {".repeat(depth), "}".repeat(depth));
|
||||
syn::parse(&sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ko_recursion_ifelse() {
|
||||
use crate::err::Error;
|
||||
let depth = 2000;
|
||||
let sql = format!("{} {}", "IF A {".repeat(depth), "}".repeat(depth));
|
||||
let err = syn::parse(&sql).unwrap_err();
|
||||
assert!(
|
||||
matches!(err, Error::InvalidQuery(_)),
|
||||
"expected invalid query due to computation depth exceeded, got {:?}",
|
||||
err
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ok_recursion_relate() {
|
||||
let depth = 20;
|
||||
let sql =
|
||||
format!("{} {} {}", "(RELATE ".repeat(depth), "a:1", " -> b:1 -> c:1)".repeat(depth));
|
||||
syn::parse(&sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ko_recursion_relate() {
|
||||
use crate::err::Error;
|
||||
let depth = 2000;
|
||||
let sql =
|
||||
format!("{} {} {}", "(RELATE ".repeat(depth), "a:1", " -> b:1 -> c:1)".repeat(depth));
|
||||
let err = syn::parse(&sql).unwrap_err();
|
||||
assert!(
|
||||
matches!(err, Error::InvalidQuery(_)),
|
||||
"expected invalid query due to computation depth exceeded, got {:?}",
|
||||
err
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ok_recursion_basic_idiom() {
|
||||
let depth = 2;
|
||||
let sql = format!("{}{}", "[a".repeat(depth), "]".repeat(depth));
|
||||
syn::parse(&sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ko_recursion_basic_idiom() {
|
||||
use crate::err::Error;
|
||||
let depth = 2000;
|
||||
let sql = format!("{}{}", "[a".repeat(depth), "]".repeat(depth));
|
||||
let err = syn::parse(&sql).unwrap_err();
|
||||
assert!(
|
||||
matches!(err, Error::InvalidQuery(_)),
|
||||
"expected invalid query due to computation depth exceeded, got {:?}",
|
||||
err
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_cast() {
|
||||
for n in [10, 100, 500] {
|
||||
recursive("SELECT * FROM ", "<int>", "5", "", n, n > 50);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_geometry() {
|
||||
for n in [1, 50, 100] {
|
||||
recursive(
|
||||
"SELECT * FROM ",
|
||||
r#"{type: "GeometryCollection",geometries: ["#,
|
||||
r#"{type: "MultiPoint",coordinates: [[10.0, 11.2],[10.5, 11.9]]}"#,
|
||||
"]}",
|
||||
n,
|
||||
n > 25,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_javascript() {
|
||||
for n in [10, 1000] {
|
||||
recursive("SELECT * FROM ", "function() {", "return 5;", "}", n, n > 500);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_mixed() {
|
||||
for n in [3, 15, 75] {
|
||||
recursive("", "SELECT * FROM ((((", "5 * 5", ")))) * 5", n, n > 5);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_select() {
|
||||
for n in [5, 10, 100] {
|
||||
recursive("SELECT * FROM ", "(SELECT * FROM ", "5", ")", n, n > 15);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_value_subquery() {
|
||||
for p in 1..=4 {
|
||||
recursive("SELECT * FROM ", "(", "5", ")", 10usize.pow(p), p > 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_if_subquery() {
|
||||
for p in 1..=3 {
|
||||
recursive("SELECT * FROM ", "IF true THEN ", "5", " ELSE 4 END", 6usize.pow(p), p > 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_try() {
|
||||
let sql = "
|
||||
SELECT
|
||||
*,
|
||||
tags[$].value,
|
||||
3s as duration,
|
||||
1.345 AS number,
|
||||
test AS `some thing`,
|
||||
'2012-04-23T18:25:43.511Z' AS utctime,
|
||||
'2012-04-23T18:25:43.511-08:00' AS pacifictime,
|
||||
{ key: (3 + 1 + 2), other: 9 * 7, 'some thing': { otherkey: 'text', } } AS object
|
||||
FROM $param, test, temp, test:thingy, |test:10|, |test:1..10|
|
||||
WHERE IF true THEN 'YAY' ELSE 'OOPS' END
|
||||
AND (0.1341, 0.5719) INSIDE { type: 'Polygon', coordinates: [[[0.1341, 0.5719], [0.1341, 0.5719]]] }
|
||||
AND (3 + 3 * 4)=6
|
||||
AND 3 + 3 * 4 = 6
|
||||
AND ages CONTAINS 18
|
||||
AND if IS true
|
||||
SPLIT test.things
|
||||
VERSION '2019-01-01T08:00:00Z'
|
||||
TIMEOUT 2w;
|
||||
|
||||
CREATE person SET name = 'Tobie', age += 18;
|
||||
";
|
||||
let tmp = syn::parse(sql).unwrap();
|
||||
|
||||
let enc: Vec<u8> = Vec::from(&tmp);
|
||||
let dec: Query = Query::from(enc);
|
||||
assert_eq!(tmp, dec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_full() {
|
||||
let sql = std::fs::read("test.surql").unwrap();
|
||||
let sql = std::str::from_utf8(&sql).unwrap();
|
||||
let res = syn::parse(sql);
|
||||
let tmp = res.unwrap();
|
||||
|
||||
let enc: Vec<u8> = Vec::from(&tmp);
|
||||
let dec: Query = Query::from(enc);
|
||||
assert_eq!(tmp, dec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(debug_assertions, ignore)]
|
||||
fn json_benchmark() {
|
||||
// From the top level of the repository,
|
||||
// cargo test sql::syn::tests::json_benchmark --package surrealdb --lib --release -- --nocapture --exact
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
struct Data {
|
||||
boolean: bool,
|
||||
integer: i32,
|
||||
decimal: f32,
|
||||
string: String,
|
||||
inner: Option<Box<Self>>,
|
||||
inners: Vec<Self>,
|
||||
inner_map: HashMap<String, Self>,
|
||||
}
|
||||
|
||||
let inner = Data {
|
||||
boolean: true,
|
||||
integer: -1,
|
||||
decimal: 0.5,
|
||||
string: "foo".to_owned(),
|
||||
inner: None,
|
||||
inners: Vec::new(),
|
||||
inner_map: HashMap::new(),
|
||||
};
|
||||
let inners = vec![inner.clone(); 10];
|
||||
|
||||
let data = Data {
|
||||
boolean: false,
|
||||
integer: 42,
|
||||
decimal: 9000.0,
|
||||
string: "SurrealDB".to_owned(),
|
||||
inner_map: inners.iter().enumerate().map(|(i, d)| (i.to_string(), d.clone())).collect(),
|
||||
inners,
|
||||
inner: Some(Box::new(inner)),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&data).unwrap();
|
||||
let json_pretty = serde_json::to_string_pretty(&data).unwrap();
|
||||
|
||||
let benchmark = |de: fn(&str) -> Value| {
|
||||
let time = Instant::now();
|
||||
const ITERATIONS: u32 = 32;
|
||||
for _ in 0..ITERATIONS {
|
||||
std::hint::black_box(de(std::hint::black_box(&json)));
|
||||
std::hint::black_box(de(std::hint::black_box(&json_pretty)));
|
||||
}
|
||||
time.elapsed().as_secs_f32() / (2 * ITERATIONS) as f32
|
||||
};
|
||||
|
||||
println!("sql::json took {:.10}s/iter", benchmark(|s| crate::sql::json(s).unwrap()));
|
||||
}
|
||||
|
||||
/// Try parsing a query with O(n) recursion depth and expect to fail if and only if
|
||||
/// `excessive` is true.
|
||||
fn recursive(
|
||||
prefix: &str,
|
||||
recursive_start: &str,
|
||||
base: &str,
|
||||
recursive_end: &str,
|
||||
n: usize,
|
||||
excessive: bool,
|
||||
) {
|
||||
let mut sql = String::from(prefix);
|
||||
for _ in 0..n {
|
||||
sql.push_str(recursive_start);
|
||||
}
|
||||
sql.push_str(base);
|
||||
for _ in 0..n {
|
||||
sql.push_str(recursive_end);
|
||||
}
|
||||
let start = Instant::now();
|
||||
let res = query(&sql).finish();
|
||||
let elapsed = start.elapsed();
|
||||
if excessive {
|
||||
assert!(
|
||||
matches!(res, Err(ParseError::ExcessiveDepth(_))),
|
||||
"expected computation depth exceeded, got {:?}",
|
||||
res
|
||||
);
|
||||
} else {
|
||||
res.unwrap();
|
||||
}
|
||||
// The parser can terminate faster in the excessive case.
|
||||
let cutoff = if excessive {
|
||||
500
|
||||
} else {
|
||||
1000
|
||||
};
|
||||
assert!(
|
||||
elapsed < Duration::from_millis(cutoff),
|
||||
"took {}ms, previously much faster to parse {n} in debug mode",
|
||||
elapsed.as_millis()
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
use super::{
|
||||
comment::{comment, mightbespace, shouldbespace},
|
||||
operator::{assigner, binary},
|
||||
IResult,
|
||||
};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{tag, tag_no_case},
|
||||
character::complete::{char, multispace1, satisfy},
|
||||
combinator::{eof, peek, value},
|
||||
sequence::{preceded, tuple},
|
||||
};
|
||||
|
||||
pub fn number(i: &str) -> IResult<&str, ()> {
|
||||
peek(alt((
|
||||
value((), multispace1), // 1 + 1
|
||||
value((), binary), // 1+1
|
||||
value((), assigner), // 1=1
|
||||
value((), comment), // 1/*comment*/
|
||||
value((), char(')')), // (1)
|
||||
value((), char(']')), // a[1]
|
||||
value((), char('}')), // {k: 1}
|
||||
value((), char('"')), // r"foo:1"
|
||||
value((), char('\'')), // r'foo:1'
|
||||
value((), char(';')), // SET a = 1;
|
||||
value((), char(',')), // [1, 2]
|
||||
value((), char('[')), // thing:1[foo]
|
||||
value((), tag("..")), // thing:1..2
|
||||
value(
|
||||
(),
|
||||
tuple((
|
||||
char('.'),
|
||||
mightbespace,
|
||||
satisfy(|x| x.is_alphanumeric() || x == '$' || x == '*'),
|
||||
)),
|
||||
), // thing:1.foo
|
||||
value((), eof), // SET a = 1
|
||||
)))(i)
|
||||
}
|
||||
|
||||
pub fn ident(i: &str) -> IResult<&str, ()> {
|
||||
peek(alt((
|
||||
value((), multispace1), // a + 1
|
||||
value((), binary), // a+1
|
||||
value((), assigner), // a+=1
|
||||
value((), comment), // a/*comment*/
|
||||
value((), char(')')), // (a)
|
||||
value((), char(']')), // foo[a]
|
||||
value((), char('}')), // {k: a}
|
||||
value((), char(';')), // SET k = a;
|
||||
value((), char(',')), // [a, b]
|
||||
value((), char('.')), // a.k
|
||||
value((), char('…')), // a…
|
||||
value((), char('[')), // a[0]
|
||||
value((), eof), // SET k = a
|
||||
)))(i)
|
||||
}
|
||||
|
||||
/// none, false, etc.
|
||||
pub fn keyword(i: &str) -> IResult<&str, ()> {
|
||||
peek(alt((
|
||||
value((), multispace1), // false || true
|
||||
value((), binary), // false||true
|
||||
value((), comment), // false/*comment*/
|
||||
value((), char(')')), // (false)
|
||||
value((), char(']')), // [WHERE k = false]
|
||||
value((), char('}')), // {k: false}
|
||||
value((), char(';')), // SET a = false;
|
||||
value((), char(',')), // [false, true]
|
||||
value((), eof), // SET a = false
|
||||
)))(i)
|
||||
}
|
||||
|
||||
pub fn duration(i: &str) -> IResult<&str, ()> {
|
||||
peek(alt((
|
||||
value((), multispace1),
|
||||
value((), binary),
|
||||
value((), assigner),
|
||||
value((), comment),
|
||||
value((), char(')')),
|
||||
value((), char(']')),
|
||||
value((), char('}')),
|
||||
value((), char(';')),
|
||||
value((), char(',')),
|
||||
value((), char('.')),
|
||||
value((), eof),
|
||||
)))(i)
|
||||
}
|
||||
|
||||
pub fn field(i: &str) -> IResult<&str, ()> {
|
||||
peek(alt((
|
||||
value(
|
||||
(),
|
||||
preceded(
|
||||
shouldbespace,
|
||||
alt((tag_no_case("FROM"), tag_no_case("TIMEOUT"), tag_no_case("PARALLEL"))),
|
||||
),
|
||||
),
|
||||
value((), char(';')),
|
||||
value((), eof),
|
||||
)))(i)
|
||||
}
|
||||
|
||||
pub fn subquery(i: &str) -> IResult<&str, ()> {
|
||||
peek(alt((
|
||||
value((), preceded(shouldbespace, tag_no_case("THEN"))),
|
||||
value((), preceded(shouldbespace, tag_no_case("ELSE"))),
|
||||
value((), preceded(shouldbespace, tag_no_case("END"))),
|
||||
|i| {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
alt((
|
||||
value((), eof),
|
||||
value((), char(';')),
|
||||
value((), char(',')),
|
||||
value((), char('}')),
|
||||
value((), char(')')),
|
||||
value((), char(']')),
|
||||
))(i)
|
||||
},
|
||||
)))(i)
|
||||
}
|
||||
|
||||
pub fn query(i: &str) -> IResult<&str, ()> {
|
||||
peek(alt((
|
||||
value((), preceded(shouldbespace, tag_no_case("THEN"))),
|
||||
value((), preceded(shouldbespace, tag_no_case("ELSE"))),
|
||||
value((), preceded(shouldbespace, tag_no_case("END"))),
|
||||
|i| {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
alt((
|
||||
value((), eof),
|
||||
value((), char(';')),
|
||||
value((), char(',')),
|
||||
value((), char('}')),
|
||||
value((), char(')')),
|
||||
value((), char(']')),
|
||||
))(i)
|
||||
},
|
||||
)))(i)
|
||||
}
|
|
@ -1,134 +0,0 @@
|
|||
pub use crate::syn::error::ParseError;
|
||||
use nom::{bytes::complete::tag_no_case, Err, Parser};
|
||||
|
||||
pub type IResult<I, O, E = ParseError<I>> = Result<(I, O), Err<E>>;
|
||||
|
||||
pub fn expected<I, O, P>(expect: &'static str, mut parser: P) -> impl FnMut(I) -> IResult<I, O>
|
||||
where
|
||||
P: Parser<I, O, ParseError<I>>,
|
||||
{
|
||||
move |input: I| match parser.parse(input) {
|
||||
Err(Err::Error(err)) => match err {
|
||||
ParseError::Base(tried) => Err(Err::Error(ParseError::Expected {
|
||||
tried,
|
||||
expected: expect,
|
||||
})),
|
||||
ParseError::Explained {
|
||||
tried,
|
||||
explained,
|
||||
} => Err(Err::Error(ParseError::ExplainedExpected {
|
||||
tried,
|
||||
expected: expect,
|
||||
explained,
|
||||
})),
|
||||
ParseError::Expected {
|
||||
tried,
|
||||
..
|
||||
} => Err(Err::Error(ParseError::Expected {
|
||||
tried,
|
||||
expected: expect,
|
||||
})),
|
||||
x => Err(Err::Error(x)),
|
||||
},
|
||||
Err(Err::Failure(err)) => match err {
|
||||
ParseError::Base(tried) => Err(Err::Failure(ParseError::Expected {
|
||||
tried,
|
||||
expected: expect,
|
||||
})),
|
||||
ParseError::Explained {
|
||||
tried,
|
||||
explained,
|
||||
} => Err(Err::Failure(ParseError::ExplainedExpected {
|
||||
tried,
|
||||
expected: expect,
|
||||
explained,
|
||||
})),
|
||||
ParseError::Expected {
|
||||
tried: input,
|
||||
..
|
||||
} => Err(Err::Failure(ParseError::Expected {
|
||||
tried: input,
|
||||
expected: expect,
|
||||
})),
|
||||
x => Err(Err::Failure(x)),
|
||||
},
|
||||
rest => rest,
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExplainResultExt<I, O> {
|
||||
/// A function which adds a explanation to an error if the parser fails at a place which can
|
||||
/// be parsed with the given parser.
|
||||
fn explain<P, O1>(self, explain: &'static str, condition: P) -> Self
|
||||
where
|
||||
P: Parser<I, O1, ParseError<I>>;
|
||||
}
|
||||
|
||||
impl<I: Clone, O> ExplainResultExt<I, O> for IResult<I, O> {
|
||||
fn explain<P, O1>(self, explain: &'static str, mut condition: P) -> Self
|
||||
where
|
||||
P: Parser<I, O1, ParseError<I>>,
|
||||
{
|
||||
let error = match self {
|
||||
Ok(x) => return Ok(x),
|
||||
Err(e) => e,
|
||||
};
|
||||
|
||||
let mut was_failure = false;
|
||||
let error = match error {
|
||||
Err::Error(e) => e,
|
||||
Err::Failure(e) => {
|
||||
was_failure = true;
|
||||
e
|
||||
}
|
||||
Err::Incomplete(e) => return Err(Err::Incomplete(e)),
|
||||
};
|
||||
|
||||
let new_error = match error {
|
||||
ParseError::Base(tried) => {
|
||||
if condition.parse(tried.clone()).is_ok() {
|
||||
ParseError::Explained {
|
||||
tried,
|
||||
explained: explain,
|
||||
}
|
||||
} else {
|
||||
ParseError::Base(tried)
|
||||
}
|
||||
}
|
||||
ParseError::Expected {
|
||||
tried,
|
||||
expected,
|
||||
} => {
|
||||
if condition.parse(tried.clone()).is_ok() {
|
||||
ParseError::ExplainedExpected {
|
||||
tried,
|
||||
expected,
|
||||
explained: explain,
|
||||
}
|
||||
} else {
|
||||
ParseError::Expected {
|
||||
tried,
|
||||
expected,
|
||||
}
|
||||
}
|
||||
}
|
||||
e => e,
|
||||
};
|
||||
|
||||
if was_failure {
|
||||
Err(Err::Failure(new_error))
|
||||
} else {
|
||||
Err(Err::Error(new_error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_tag_no_case(tag: &'static str) -> impl FnMut(&str) -> IResult<&str, &str> {
|
||||
move |input: &str| match tag_no_case(tag).parse(input) {
|
||||
Result::Err(_) => Err(Err::Failure(ParseError::Expected {
|
||||
tried: input,
|
||||
expected: tag,
|
||||
})),
|
||||
rest => rest,
|
||||
}
|
||||
}
|
|
@ -1,203 +0,0 @@
|
|||
use super::{
|
||||
block::block,
|
||||
comment::mightbespace,
|
||||
common::{closechevron, expect_delimited, openchevron},
|
||||
kind::kind,
|
||||
operator,
|
||||
value::single,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{Cast, Expression, Future, Operator, Value};
|
||||
use nom::{bytes::complete::tag, character::complete::char, combinator::cut, sequence::delimited};
|
||||
|
||||
pub fn cast(i: &str) -> IResult<&str, Cast> {
|
||||
let (i, k) = delimited(char('<'), cut(kind), char('>'))(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, v) = cut(single)(i)?;
|
||||
Ok((i, Cast(k, v)))
|
||||
}
|
||||
|
||||
pub fn unary(i: &str) -> IResult<&str, Expression> {
|
||||
let (i, o) = operator::unary(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, v) = single(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Expression::Unary {
|
||||
o,
|
||||
v,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
/// Augment an existing expression
|
||||
pub(crate) fn augment(mut this: Expression, l: Value, o: Operator) -> Expression {
|
||||
match &mut this {
|
||||
Expression::Binary {
|
||||
l: left,
|
||||
o: op,
|
||||
..
|
||||
} if o.precedence() >= op.precedence() => match left {
|
||||
Value::Expression(x) => {
|
||||
*x.as_mut() = augment(std::mem::take(x), l, o);
|
||||
this
|
||||
}
|
||||
_ => {
|
||||
*left = Expression::new(l, o, std::mem::take(left)).into();
|
||||
this
|
||||
}
|
||||
},
|
||||
e => {
|
||||
let r = Value::from(std::mem::take(e));
|
||||
Expression::new(l, o, r)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn binary(i: &str) -> IResult<&str, Expression> {
|
||||
use super::depth;
|
||||
use super::value;
|
||||
|
||||
let (i, l) = single(i)?;
|
||||
let (i, o) = operator::binary(i)?;
|
||||
// Make sure to dive if the query is a right-deep binary tree.
|
||||
let _diving = depth::dive(i)?;
|
||||
let (i, r) = value::value(i)?;
|
||||
let v = match r {
|
||||
Value::Expression(r) => augment(*r, l, o),
|
||||
_ => Expression::new(l, o, r),
|
||||
};
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
pub fn future(i: &str) -> IResult<&str, Future> {
|
||||
let (i, _) = expect_delimited(openchevron, tag("future"), closechevron)(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, v) = block(i)?;
|
||||
Ok((i, Future(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::sql::{Block, Kind, Number};
|
||||
|
||||
#[test]
|
||||
fn cast_int() {
|
||||
let sql = "<int>1.2345";
|
||||
let res = cast(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<int> 1.2345f", format!("{}", out));
|
||||
assert_eq!(out, Cast(Kind::Int, 1.2345.into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cast_string() {
|
||||
let sql = "<string>1.2345";
|
||||
let res = cast(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<string> 1.2345f", format!("{}", out));
|
||||
assert_eq!(out, Cast(Kind::String, 1.2345.into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_statement() {
|
||||
let sql = "true AND false";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("true AND false", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_left_opened() {
|
||||
let sql = "3 * 3 * 3 = 27";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("3 * 3 * 3 = 27", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_left_closed() {
|
||||
let sql = "(3 * 3 * 3) = 27";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("(3 * 3 * 3) = 27", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_right_opened() {
|
||||
let sql = "27 = 3 * 3 * 3";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("27 = 3 * 3 * 3", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_right_closed() {
|
||||
let sql = "27 = (3 * 3 * 3)";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("27 = (3 * 3 * 3)", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_both_opened() {
|
||||
let sql = "3 * 3 * 3 = 3 * 3 * 3";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("3 * 3 * 3 = 3 * 3 * 3", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_both_closed() {
|
||||
let sql = "(3 * 3 * 3) = (3 * 3 * 3)";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("(3 * 3 * 3) = (3 * 3 * 3)", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_unary() {
|
||||
let sql = "-a";
|
||||
let res = unary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_with_unary() {
|
||||
let sql = "-(5) + 5";
|
||||
let res = binary(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn future_expression() {
|
||||
let sql = "<future> { 5 + 10 }";
|
||||
let res = future(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<future> { 5 + 10 }", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
Future(Block::from(Value::from(Expression::Binary {
|
||||
l: Value::Number(Number::Int(5)),
|
||||
o: Operator::Add,
|
||||
r: Value::Number(Number::Int(10))
|
||||
})))
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,395 +0,0 @@
|
|||
use super::{
|
||||
comment::{block, mightbespace, slash},
|
||||
common::{
|
||||
closechevron, closeparentheses, commas, delimited_list0, delimited_list1, expect_delimited,
|
||||
openchevron, openparentheses, val_char,
|
||||
},
|
||||
depth,
|
||||
error::{expect_tag_no_case, expected},
|
||||
value::value,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{Function, Model, Script};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{escaped, is_not, tag, take_while1},
|
||||
character::complete::{anychar, char, i64, multispace0},
|
||||
combinator::{cut, recognize},
|
||||
multi::{many0, many1, separated_list1},
|
||||
sequence::{delimited, terminated},
|
||||
};
|
||||
|
||||
const SINGLE: char = '\'';
|
||||
const SINGLE_ESC_NUL: &str = "'\\\0";
|
||||
|
||||
const DOUBLE: char = '"';
|
||||
const DOUBLE_ESC_NUL: &str = "\"\\\0";
|
||||
|
||||
const BACKTICK: char = '`';
|
||||
const BACKTICK_ESC_NUL: &str = "`\\\0";
|
||||
|
||||
const OBJECT_BEG: char = '{';
|
||||
const OBJECT_END: char = '}';
|
||||
|
||||
pub fn defined_function(i: &str) -> IResult<&str, Function> {
|
||||
alt((custom, script))(i)
|
||||
}
|
||||
|
||||
pub fn builtin_function<'a>(name: &'a str, i: &'a str) -> IResult<&'a str, Function> {
|
||||
let (i, a) = expected(
|
||||
"function arguments",
|
||||
delimited_list0(openparentheses, commas, terminated(cut(value), mightbespace), char(')')),
|
||||
)(i)?;
|
||||
Ok((i, Function::Normal(name.to_string(), a)))
|
||||
}
|
||||
|
||||
pub fn custom(i: &str) -> IResult<&str, Function> {
|
||||
let (i, _) = tag("fn::")(i)?;
|
||||
cut(|i| {
|
||||
let (i, s) = recognize(separated_list1(tag("::"), take_while1(val_char)))(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, a) = expected(
|
||||
"function arguments",
|
||||
delimited_list0(
|
||||
cut(openparentheses),
|
||||
commas,
|
||||
terminated(cut(value), mightbespace),
|
||||
char(')'),
|
||||
),
|
||||
)(i)?;
|
||||
Ok((i, Function::Custom(s.to_string(), a)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
fn script(i: &str) -> IResult<&str, Function> {
|
||||
let (i, _) = tag("function")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, a) = delimited_list0(
|
||||
openparentheses,
|
||||
commas,
|
||||
terminated(cut(value), mightbespace),
|
||||
char(')'),
|
||||
)(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = char('{')(i)?;
|
||||
let (i, v) = script_body(i)?;
|
||||
let (i, _) = char('}')(i)?;
|
||||
Ok((i, Function::Script(v, a)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn model(i: &str) -> IResult<&str, Model> {
|
||||
let (i, _) = tag("ml::")(i)?;
|
||||
|
||||
cut(|i| {
|
||||
let (i, name) = recognize(separated_list1(tag("::"), take_while1(val_char)))(i)?;
|
||||
|
||||
let (i, version) =
|
||||
expected("a version", expect_delimited(openchevron, version, closechevron))(i)?;
|
||||
|
||||
let (i, args) = expected(
|
||||
"model arguments",
|
||||
delimited_list1(openparentheses, commas, value, closeparentheses),
|
||||
)(i)?;
|
||||
|
||||
Ok((
|
||||
i,
|
||||
Model {
|
||||
name: name.to_owned(),
|
||||
version,
|
||||
args,
|
||||
},
|
||||
))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn version(i: &str) -> IResult<&str, String> {
|
||||
use std::fmt::Write;
|
||||
|
||||
let (i, major) = expected("a version number", i64)(i)?;
|
||||
let (i, _) = expect_tag_no_case(".")(i)?;
|
||||
let (i, minor) = expected("a version number", i64)(i)?;
|
||||
let (i, _) = expect_tag_no_case(".")(i)?;
|
||||
let (i, patch) = expected("a version number", i64)(i)?;
|
||||
|
||||
let mut res = String::new();
|
||||
// Writing into a string can never error.
|
||||
write!(&mut res, "{major}.{minor}.{patch}").unwrap();
|
||||
Ok((i, res))
|
||||
}
|
||||
|
||||
pub fn script_body(i: &str) -> IResult<&str, Script> {
|
||||
let (i, v) = script_body_raw(i)?;
|
||||
Ok((i, Script(String::from(v))))
|
||||
}
|
||||
|
||||
fn script_body_raw(i: &str) -> IResult<&str, &str> {
|
||||
let _diving = depth::dive(i)?;
|
||||
recognize(many0(alt((
|
||||
script_body_comment,
|
||||
script_body_object,
|
||||
script_body_string,
|
||||
script_body_maths,
|
||||
script_body_other,
|
||||
))))(i)
|
||||
}
|
||||
|
||||
fn script_body_maths(i: &str) -> IResult<&str, &str> {
|
||||
recognize(tag("/"))(i)
|
||||
}
|
||||
|
||||
fn script_body_other(i: &str) -> IResult<&str, &str> {
|
||||
recognize(many1(is_not("/{}`'\"")))(i)
|
||||
}
|
||||
|
||||
fn script_body_comment(i: &str) -> IResult<&str, &str> {
|
||||
recognize(delimited(multispace0, many1(alt((block, slash))), multispace0))(i)
|
||||
}
|
||||
|
||||
fn script_body_object(i: &str) -> IResult<&str, &str> {
|
||||
recognize(delimited(char(OBJECT_BEG), script_body_raw, char(OBJECT_END)))(i)
|
||||
}
|
||||
|
||||
fn script_body_string(i: &str) -> IResult<&str, &str> {
|
||||
recognize(alt((
|
||||
|i| {
|
||||
let (i, _) = char(SINGLE)(i)?;
|
||||
let (i, _) = char(SINGLE)(i)?;
|
||||
Ok((i, ""))
|
||||
},
|
||||
|i| {
|
||||
let (i, _) = char(DOUBLE)(i)?;
|
||||
let (i, _) = char(DOUBLE)(i)?;
|
||||
Ok((i, ""))
|
||||
},
|
||||
|i| {
|
||||
let (i, _) = char(SINGLE)(i)?;
|
||||
let (i, v) = escaped(is_not(SINGLE_ESC_NUL), '\\', anychar)(i)?;
|
||||
let (i, _) = char(SINGLE)(i)?;
|
||||
Ok((i, v))
|
||||
},
|
||||
|i| {
|
||||
let (i, _) = char(DOUBLE)(i)?;
|
||||
let (i, v) = escaped(is_not(DOUBLE_ESC_NUL), '\\', anychar)(i)?;
|
||||
let (i, _) = char(DOUBLE)(i)?;
|
||||
Ok((i, v))
|
||||
},
|
||||
|i| {
|
||||
let (i, _) = char(BACKTICK)(i)?;
|
||||
let (i, v) = escaped(is_not(BACKTICK_ESC_NUL), '\\', anychar)(i)?;
|
||||
let (i, _) = char(BACKTICK)(i)?;
|
||||
Ok((i, v))
|
||||
},
|
||||
)))(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::super::builtin::{builtin_name, BuiltinName};
|
||||
use super::*;
|
||||
use crate::sql::Value;
|
||||
use crate::syn::{self, Parse};
|
||||
|
||||
fn function(i: &str) -> IResult<&str, Function> {
|
||||
alt((defined_function, |i| {
|
||||
let (i, name) = builtin_name(i)?;
|
||||
let BuiltinName::Function(x) = name else {
|
||||
panic!("not a function")
|
||||
};
|
||||
builtin_function(x, i)
|
||||
}))(i)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_single() {
|
||||
let sql = "count()";
|
||||
let res = function(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("count()", format!("{}", out));
|
||||
assert_eq!(out, Function::Normal(String::from("count"), vec![]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_single_not() {
|
||||
let sql = "not(10)";
|
||||
let res = function(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("not(10)", format!("{}", out));
|
||||
assert_eq!(out, Function::Normal("not".to_owned(), vec![10.into()]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_module() {
|
||||
let sql = "rand::uuid()";
|
||||
let res = function(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("rand::uuid()", format!("{}", out));
|
||||
assert_eq!(out, Function::Normal(String::from("rand::uuid"), vec![]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_arguments() {
|
||||
let sql = "string::is::numeric(null)";
|
||||
let res = function(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("string::is::numeric(NULL)", format!("{}", out));
|
||||
assert_eq!(out, Function::Normal(String::from("string::is::numeric"), vec![Value::Null]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_simple_together() {
|
||||
let sql = "function() { return 'test'; }";
|
||||
let res = function(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("function() { return 'test'; }", format!("{}", out));
|
||||
assert_eq!(out, Function::Script(Script::parse(" return 'test'; "), vec![]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_simple_whitespace() {
|
||||
let sql = "function () { return 'test'; }";
|
||||
let res = function(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("function() { return 'test'; }", format!("{}", out));
|
||||
assert_eq!(out, Function::Script(Script::parse(" return 'test'; "), vec![]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_script_expression() {
|
||||
let sql = "function() { return this.tags.filter(t => { return t.length > 3; }); }";
|
||||
let res = function(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
"function() { return this.tags.filter(t => { return t.length > 3; }); }",
|
||||
format!("{}", out)
|
||||
);
|
||||
assert_eq!(
|
||||
out,
|
||||
Function::Script(
|
||||
Script::parse(" return this.tags.filter(t => { return t.length > 3; }); "),
|
||||
vec![]
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ml_model_example() {
|
||||
let sql = r#"ml::insurance::prediction<1.0.0>({
|
||||
age: 18,
|
||||
disposable_income: "yes",
|
||||
purchased_before: true
|
||||
})
|
||||
"#;
|
||||
let res = model(sql);
|
||||
let out = res.unwrap().1.to_string();
|
||||
assert_eq!("ml::insurance::prediction<1.0.0>({ age: 18, disposable_income: 'yes', purchased_before: true })",out);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ml_model_example_in_select() {
|
||||
let sql = r"
|
||||
SELECT
|
||||
name,
|
||||
age,
|
||||
ml::insurance::prediction<1.0.0>({
|
||||
age: age,
|
||||
disposable_income: math::round(income),
|
||||
purchased_before: array::len(->purchased->property) > 0,
|
||||
}) AS likely_to_buy FROM person:tobie;
|
||||
";
|
||||
let res = syn::parse(sql);
|
||||
let out = res.unwrap().to_string();
|
||||
assert_eq!(
|
||||
"SELECT name, age, ml::insurance::prediction<1.0.0>({ age: age, disposable_income: math::round(income), purchased_before: array::len(->purchased->property) > 0 }) AS likely_to_buy FROM person:tobie;",
|
||||
out,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ml_model_with_mutiple_arguments() {
|
||||
let sql = "ml::insurance::prediction<1.0.0>(1,2,3,4,);";
|
||||
let res = syn::parse(sql);
|
||||
let out = res.unwrap().to_string();
|
||||
assert_eq!("ml::insurance::prediction<1.0.0>(1,2,3,4);", out,);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn script_basic() {
|
||||
let sql = "return true;";
|
||||
let res = script_body(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("return true;", format!("{}", out));
|
||||
assert_eq!(out, Script::from("return true;"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn script_object() {
|
||||
let sql = "return { test: true, something: { other: true } };";
|
||||
let res = script_body(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("return { test: true, something: { other: true } };", format!("{}", out));
|
||||
assert_eq!(out, Script::from("return { test: true, something: { other: true } };"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn script_closure() {
|
||||
let sql = "return this.values.map(v => `This value is ${Number(v * 3)}`);";
|
||||
let res = script_body(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
"return this.values.map(v => `This value is ${Number(v * 3)}`);",
|
||||
format!("{}", out)
|
||||
);
|
||||
assert_eq!(
|
||||
out,
|
||||
Script::from("return this.values.map(v => `This value is ${Number(v * 3)}`);")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn script_complex() {
|
||||
let sql = r#"return { test: true, some: { object: "some text with uneven {{{ {} \" brackets", else: false } };"#;
|
||||
let res = script_body(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
r#"return { test: true, some: { object: "some text with uneven {{{ {} \" brackets", else: false } };"#,
|
||||
format!("{}", out)
|
||||
);
|
||||
assert_eq!(
|
||||
out,
|
||||
Script::from(
|
||||
r#"return { test: true, some: { object: "some text with uneven {{{ {} \" brackets", else: false } };"#
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn script_advanced() {
|
||||
let sql = r#"
|
||||
// {
|
||||
// }
|
||||
// {}
|
||||
// { }
|
||||
/* { */
|
||||
/* } */
|
||||
/* {} */
|
||||
/* { } */
|
||||
/* {{{ $ }} */
|
||||
/* /* /* /* */
|
||||
let x = {};
|
||||
let x = { };
|
||||
let x = '{';
|
||||
let x = "{";
|
||||
let x = '}';
|
||||
let x = "}";
|
||||
let x = '} } { {';
|
||||
let x = 3 / 4 * 2;
|
||||
let x = /* something */ 45 + 2;
|
||||
"#;
|
||||
let res = script_body(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out));
|
||||
assert_eq!(out, Script::from(sql));
|
||||
}
|
||||
}
|
|
@ -1,634 +0,0 @@
|
|||
use super::{
|
||||
comment::{mightbespace, shouldbespace},
|
||||
common::{
|
||||
closebracket, closeparentheses, commas, expect_delimited, openbracket, openparentheses,
|
||||
},
|
||||
ending,
|
||||
error::{expected, ExplainResultExt},
|
||||
literal::{ident, number, param, strand, table, tables},
|
||||
operator::dir,
|
||||
part::cond,
|
||||
value, IResult,
|
||||
};
|
||||
use crate::sql::{Cond, Fields, Graph, Idiom, Idioms, Part, Tables, Value};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{tag, tag_no_case},
|
||||
character::complete::char,
|
||||
combinator::{self, cut, map, not, opt, peek},
|
||||
multi::{many0, many1, separated_list1},
|
||||
sequence::{preceded, terminated},
|
||||
};
|
||||
|
||||
pub fn locals(i: &str) -> IResult<&str, Idioms> {
|
||||
let (i, v) = separated_list1(commas, local)(i)?;
|
||||
Ok((i, Idioms(v)))
|
||||
}
|
||||
|
||||
pub fn graph(i: &str) -> IResult<&str, Graph> {
|
||||
let (i, dir) = dir(i)?;
|
||||
let (i, (what, cond, alias)) = alt((simple, custom))(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Graph {
|
||||
dir,
|
||||
expr: Fields::all(),
|
||||
what,
|
||||
cond,
|
||||
alias,
|
||||
split: None,
|
||||
group: None,
|
||||
order: None,
|
||||
limit: None,
|
||||
start: None,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn simple(i: &str) -> IResult<&str, (Tables, Option<Cond>, Option<Idiom>)> {
|
||||
let (i, w) = alt((any, one))(i)?;
|
||||
Ok((i, (w, None, None)))
|
||||
}
|
||||
|
||||
fn custom(i: &str) -> IResult<&str, (Tables, Option<Cond>, Option<Idiom>)> {
|
||||
expect_delimited(
|
||||
openparentheses,
|
||||
|i| {
|
||||
let (i, w) = alt((any, tables))(i)?;
|
||||
let (i, c) = opt(|i| {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cond(i)?;
|
||||
Ok((i, v))
|
||||
})(i)?;
|
||||
let (i, a) = opt(|i| {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("AS")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = plain(i)?;
|
||||
Ok((i, v))
|
||||
})(i)?;
|
||||
Ok((i, (w, c, a)))
|
||||
},
|
||||
closeparentheses,
|
||||
)(i)
|
||||
}
|
||||
|
||||
fn one(i: &str) -> IResult<&str, Tables> {
|
||||
let (i, v) = table(i)?;
|
||||
Ok((i, Tables::from(v)))
|
||||
}
|
||||
|
||||
fn any(i: &str) -> IResult<&str, Tables> {
|
||||
map(char('?'), |_| Tables::default())(i)
|
||||
}
|
||||
|
||||
/// Used in DEFINE FIELD and DEFINE INDEX clauses
|
||||
pub fn local(i: &str) -> IResult<&str, Idiom> {
|
||||
expected("a local idiom", |i| {
|
||||
let (i, p) = first(i).explain("graphs are not allowed in a local idioms.", dir)?;
|
||||
let (i, mut v) = many0(local_part)(i)?;
|
||||
// Flatten is only allowed at the end
|
||||
let (i, flat) = opt(flatten)(i)?;
|
||||
if let Some(p) = flat {
|
||||
v.push(p);
|
||||
}
|
||||
v.insert(0, p);
|
||||
Ok((i, Idiom::from(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
/// Used in a SPLIT, ORDER, and GROUP clauses
|
||||
///
|
||||
/// Doesnt allow flatten, computed values or where selectors.
|
||||
pub fn basic(i: &str) -> IResult<&str, Idiom> {
|
||||
use super::depth;
|
||||
// Limit recursion depth.
|
||||
let _diving = depth::dive(i)?;
|
||||
expected("a basic idiom", |i| {
|
||||
let (i, p) = first(i).explain("graphs are not allowed in a basic idioms.", dir)?;
|
||||
let (i, mut v) = many0(basic_part)(i)?;
|
||||
v.insert(0, p);
|
||||
Ok((i, Idiom::from(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
/// A simple idiom with one or more parts
|
||||
pub fn plain(i: &str) -> IResult<&str, Idiom> {
|
||||
expected("a idiom", |i| {
|
||||
let (i, p) = alt((first, map(graph, Part::Graph)))(i)?;
|
||||
let (i, mut v) = many0(part)(i)?;
|
||||
v.insert(0, p);
|
||||
Ok((i, Idiom::from(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
/// Reparse a value which might part of an idiom.
|
||||
pub fn reparse_idiom_start(start: Value, i: &str) -> IResult<&str, Value> {
|
||||
if start.can_start_idiom() {
|
||||
if let (i, Some(mut parts)) = opt(many1(part))(i)? {
|
||||
let start = Part::Start(start);
|
||||
parts.insert(0, start);
|
||||
let v = Value::from(Idiom::from(parts));
|
||||
return Ok((i, v));
|
||||
}
|
||||
}
|
||||
Ok((i, start))
|
||||
}
|
||||
|
||||
/// A complex idiom with graph or many parts excluding idioms which start with a value.
|
||||
pub fn multi_without_start(i: &str) -> IResult<&str, Idiom> {
|
||||
alt((
|
||||
|i| {
|
||||
let (i, p) = graph(i)?;
|
||||
let (i, mut v) = many0(part)(i)?;
|
||||
v.insert(0, Part::Graph(p));
|
||||
Ok((i, Idiom::from(v)))
|
||||
},
|
||||
|i| {
|
||||
let (i, p) = first(i)?;
|
||||
let (i, mut v) = many1(part)(i)?;
|
||||
v.insert(0, p);
|
||||
Ok((i, Idiom::from(v)))
|
||||
},
|
||||
))(i)
|
||||
}
|
||||
|
||||
/// A simple field based idiom
|
||||
pub fn path(i: &str) -> IResult<&str, Idiom> {
|
||||
let (i, p) = first(i)?;
|
||||
let (i, mut v) = many0(part)(i)?;
|
||||
v.insert(0, p);
|
||||
Ok((i, Idiom::from(v)))
|
||||
}
|
||||
|
||||
/// A full complex idiom with any number of parts
|
||||
#[cfg(test)]
|
||||
pub fn idiom(i: &str) -> IResult<&str, Idiom> {
|
||||
use nom::combinator::fail;
|
||||
|
||||
alt((
|
||||
plain,
|
||||
alt((multi_without_start, |i| {
|
||||
let (i, v) = value::value(i)?;
|
||||
let (i, v) = reparse_idiom_start(v, i)?;
|
||||
if let Value::Idiom(x) = v {
|
||||
return Ok((i, x));
|
||||
}
|
||||
fail(i)
|
||||
})),
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn part(i: &str) -> IResult<&str, Part> {
|
||||
alt((
|
||||
flatten,
|
||||
preceded(tag("."), cut(dot_part)),
|
||||
expect_delimited(openbracket, cut(bracketed_part), closebracket),
|
||||
map(graph, Part::Graph),
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn flatten(i: &str) -> IResult<&str, Part> {
|
||||
combinator::value(Part::Flatten, alt((tag("..."), tag("…"))))(i)
|
||||
}
|
||||
|
||||
pub fn local_part(i: &str) -> IResult<&str, Part> {
|
||||
// Cant cut dot part since it might be part of the flatten at the end.
|
||||
alt((
|
||||
preceded(tag("."), dot_part),
|
||||
expect_delimited(openbracket, cut(local_bracketed_part), closebracket),
|
||||
// TODO explain
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn basic_part(i: &str) -> IResult<&str, Part> {
|
||||
alt((
|
||||
preceded(
|
||||
tag("."),
|
||||
cut(|i| dot_part(i).explain("flattening is not allowed with a basic idiom", tag(".."))),
|
||||
),
|
||||
|s| {
|
||||
let (i, _) = openbracket(s)?;
|
||||
let (i, v) = expected(
|
||||
"$, * or a number",
|
||||
cut(terminated(basic_bracketed_part, closebracket)),
|
||||
)(i)
|
||||
.explain("basic idioms don't allow computed values", bracketed_value)
|
||||
.explain("basic idioms don't allow where selectors", bracketed_where)?;
|
||||
Ok((i, v))
|
||||
},
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn dot_part(i: &str) -> IResult<&str, Part> {
|
||||
alt((
|
||||
combinator::value(Part::All, tag("*")),
|
||||
map(terminated(ident, ending::ident), Part::Field),
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn basic_bracketed_part(i: &str) -> IResult<&str, Part> {
|
||||
alt((
|
||||
combinator::value(Part::All, tag("*")),
|
||||
combinator::value(Part::Last, tag("$")),
|
||||
map(number, Part::Index),
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn local_bracketed_part(i: &str) -> IResult<&str, Part> {
|
||||
alt((combinator::value(Part::All, tag("*")), map(number, Part::Index)))(i)
|
||||
.explain("using `[$]` in a local idiom is not allowed", tag("$"))
|
||||
}
|
||||
|
||||
fn bracketed_part(i: &str) -> IResult<&str, Part> {
|
||||
alt((
|
||||
combinator::value(Part::All, tag("*")),
|
||||
combinator::value(Part::Last, terminated(tag("$"), peek(closebracket))),
|
||||
map(number, Part::Index),
|
||||
bracketed_where,
|
||||
bracketed_value,
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn first(i: &str) -> IResult<&str, Part> {
|
||||
let (i, _) = peek(not(number))(i)?;
|
||||
let (i, v) = ident(i)?;
|
||||
let (i, _) = ending::ident(i)?;
|
||||
Ok((i, Part::Field(v)))
|
||||
}
|
||||
|
||||
pub fn bracketed_where(i: &str) -> IResult<&str, Part> {
|
||||
let (i, _) = alt((
|
||||
terminated(tag("?"), mightbespace),
|
||||
terminated(tag_no_case("WHERE"), shouldbespace),
|
||||
))(i)?;
|
||||
|
||||
let (i, v) = value::value(i)?;
|
||||
Ok((i, Part::Where(v)))
|
||||
}
|
||||
|
||||
pub fn bracketed_value(i: &str) -> IResult<&str, Part> {
|
||||
let (i, v) =
|
||||
alt((map(strand, Value::Strand), map(param, Value::Param), map(basic, Value::Idiom)))(i)?;
|
||||
Ok((i, Part::Value(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::sql::{Dir, Expression, Id, Number, Param, Strand, Table, Thing};
|
||||
use crate::syn::Parse;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn graph_in() {
|
||||
let sql = "<-likes";
|
||||
let res = graph(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<-likes", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn graph_out() {
|
||||
let sql = "->likes";
|
||||
let res = graph(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("->likes", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn graph_both() {
|
||||
let sql = "<->likes";
|
||||
let res = graph(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<->likes", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn graph_multiple() {
|
||||
let sql = "->(likes, follows)";
|
||||
let res = graph(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("->(likes, follows)", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn graph_aliases() {
|
||||
let sql = "->(likes, follows AS connections)";
|
||||
let res = graph(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("->(likes, follows AS connections)", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn graph_conditions() {
|
||||
let sql = "->(likes, follows WHERE influencer = true)";
|
||||
let res = graph(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("->(likes, follows WHERE influencer = true)", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn graph_conditions_aliases() {
|
||||
let sql = "->(likes, follows WHERE influencer = true AS connections)";
|
||||
let res = graph(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("->(likes, follows WHERE influencer = true AS connections)", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_number() {
|
||||
let sql = "13.495";
|
||||
let res = idiom(sql);
|
||||
assert!(res.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_normal() {
|
||||
let sql = "test";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Idiom(vec![Part::from("test")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_quoted_backtick() {
|
||||
let sql = "`test`";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Idiom(vec![Part::from("test")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_quoted_brackets() {
|
||||
let sql = "⟨test⟩";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Idiom(vec![Part::from("test")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_nested() {
|
||||
let sql = "test.temp";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test.temp", format!("{}", out));
|
||||
assert_eq!(out, Idiom(vec![Part::from("test"), Part::from("temp")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_nested_quoted() {
|
||||
let sql = "test.`some key`";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test.`some key`", format!("{}", out));
|
||||
assert_eq!(out, Idiom(vec![Part::from("test"), Part::from("some key")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_nested_array_all() {
|
||||
let sql = "test.temp[*]";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test.temp[*]", format!("{}", out));
|
||||
assert_eq!(out, Idiom(vec![Part::from("test"), Part::from("temp"), Part::All]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_nested_array_last() {
|
||||
let sql = "test.temp[$]";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test.temp[$]", format!("{}", out));
|
||||
assert_eq!(out, Idiom(vec![Part::from("test"), Part::from("temp"), Part::Last]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_nested_array_value() {
|
||||
let sql = "test.temp[*].text";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test.temp[*].text", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![Part::from("test"), Part::from("temp"), Part::All, Part::from("text")])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_nested_array_question() {
|
||||
let sql = "test.temp[? test = true].text";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test.temp[WHERE test = true].text", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![
|
||||
Part::from("test"),
|
||||
Part::from("temp"),
|
||||
Part::Where(Value::from(Expression::parse("test = true"))),
|
||||
Part::from("text")
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_nested_array_condition() {
|
||||
let sql = "test.temp[WHERE test = true].text";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test.temp[WHERE test = true].text", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![
|
||||
Part::from("test"),
|
||||
Part::from("temp"),
|
||||
Part::Where(Value::from(Expression::parse("test = true"))),
|
||||
Part::from("text")
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_start_param_local_field() {
|
||||
let sql = "$test.temporary[0].embedded…";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("$test.temporary[0].embedded…", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![
|
||||
Part::Start(Param::from("test").into()),
|
||||
Part::from("temporary"),
|
||||
Part::Index(Number::Int(0)),
|
||||
Part::from("embedded"),
|
||||
Part::Flatten,
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_start_thing_remote_traversal() {
|
||||
let sql = "person:test.friend->like->person";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("person:test.friend->like->person", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![
|
||||
Part::Start(Thing::from(("person", "test")).into()),
|
||||
Part::from("friend"),
|
||||
Part::Graph(Graph {
|
||||
dir: Dir::Out,
|
||||
expr: Fields::all(),
|
||||
what: Table::from("like").into(),
|
||||
cond: None,
|
||||
alias: None,
|
||||
split: None,
|
||||
group: None,
|
||||
order: None,
|
||||
limit: None,
|
||||
start: None,
|
||||
}),
|
||||
Part::Graph(Graph {
|
||||
dir: Dir::Out,
|
||||
expr: Fields::all(),
|
||||
what: Table::from("person").into(),
|
||||
cond: None,
|
||||
alias: None,
|
||||
split: None,
|
||||
group: None,
|
||||
order: None,
|
||||
limit: None,
|
||||
start: None,
|
||||
}),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_all() {
|
||||
let sql = "[*]";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("[*]", format!("{}", out));
|
||||
assert_eq!(out, Part::All);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_last() {
|
||||
let sql = "[$]";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("[$]", format!("{}", out));
|
||||
assert_eq!(out, Part::Last);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_param() {
|
||||
let sql = "[$param]";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("[$param]", format!("{}", out));
|
||||
assert_eq!(out, Part::Value(Value::Param(Param::from("param"))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_flatten() {
|
||||
let sql = "...";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("…", format!("{}", out));
|
||||
assert_eq!(out, Part::Flatten);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_flatten_ellipsis() {
|
||||
let sql = "…";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("…", format!("{}", out));
|
||||
assert_eq!(out, Part::Flatten);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_number() {
|
||||
let sql = "[0]";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("[0]", format!("{}", out));
|
||||
assert_eq!(out, Part::Index(Number::from(0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_expression_question() {
|
||||
let sql = "[?test = true]";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("[WHERE test = true]", format!("{}", out));
|
||||
assert_eq!(out, Part::Where(Value::from(Expression::parse("test = true"))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn part_expression_condition() {
|
||||
let sql = "[WHERE test = true]";
|
||||
let res = part(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("[WHERE test = true]", format!("{}", out));
|
||||
assert_eq!(out, Part::Where(Value::from(Expression::parse("test = true"))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_thing_number() {
|
||||
let sql = "test:1.foo";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![
|
||||
Part::Start(Value::Thing(Thing {
|
||||
tb: "test".to_owned(),
|
||||
id: Id::Number(1),
|
||||
})),
|
||||
Part::from("foo"),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_thing_index() {
|
||||
let sql = "test:1['foo']";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![
|
||||
Part::Start(Value::Thing(Thing {
|
||||
tb: "test".to_owned(),
|
||||
id: Id::Number(1),
|
||||
})),
|
||||
Part::Value(Value::Strand(Strand("foo".to_owned()))),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idiom_thing_all() {
|
||||
let sql = "test:1.*";
|
||||
let res = idiom(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Idiom(vec![
|
||||
Part::Start(Value::Thing(Thing {
|
||||
tb: "test".to_owned(),
|
||||
id: Id::Number(1),
|
||||
})),
|
||||
Part::All
|
||||
])
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,418 +0,0 @@
|
|||
use super::{
|
||||
comment::mightbespace,
|
||||
common::{
|
||||
closeparentheses, commas, delimited_list1, expect_terminator, openparentheses, verbar,
|
||||
},
|
||||
literal::table,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::Kind;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag,
|
||||
character::complete::{char, u64},
|
||||
combinator::{cut, map, opt, value},
|
||||
multi::separated_list1,
|
||||
};
|
||||
|
||||
pub fn kind(i: &str) -> IResult<&str, Kind> {
|
||||
alt((any, either, option))(i)
|
||||
}
|
||||
|
||||
pub fn any(i: &str) -> IResult<&str, Kind> {
|
||||
value(Kind::Any, tag("any"))(i)
|
||||
}
|
||||
|
||||
pub fn simple(i: &str) -> IResult<&str, Kind> {
|
||||
alt((
|
||||
value(Kind::Bool, tag("bool")),
|
||||
value(Kind::Null, tag("null")),
|
||||
value(Kind::Bytes, tag("bytes")),
|
||||
value(Kind::Datetime, tag("datetime")),
|
||||
value(Kind::Decimal, tag("decimal")),
|
||||
value(Kind::Duration, tag("duration")),
|
||||
value(Kind::Float, tag("float")),
|
||||
value(Kind::Int, tag("int")),
|
||||
value(Kind::Number, tag("number")),
|
||||
value(Kind::Object, tag("object")),
|
||||
value(Kind::Point, tag("point")),
|
||||
value(Kind::String, tag("string")),
|
||||
value(Kind::Uuid, tag("uuid")),
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn either(i: &str) -> IResult<&str, Kind> {
|
||||
let (i, mut v) = separated_list1(verbar, alt((simple, geometry, record, array, set)))(i)?;
|
||||
match v.len() {
|
||||
1 => Ok((i, v.remove(0))),
|
||||
_ => Ok((i, Kind::Either(v))),
|
||||
}
|
||||
}
|
||||
|
||||
fn option(i: &str) -> IResult<&str, Kind> {
|
||||
let (i, _) = tag("option")(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
cut(|i| {
|
||||
let (i, s) = tag("<")(i)?;
|
||||
let (i, v) = map(alt((either, simple, geometry, record, array, set)), Box::new)(i)?;
|
||||
let (i, _) = expect_terminator(s, char('>'))(i)?;
|
||||
Ok((i, Kind::Option(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
fn record(i: &str) -> IResult<&str, Kind> {
|
||||
let (i, _) = tag("record")(i)?;
|
||||
let (i, v) = opt(|i| {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
alt((delimited_list1(openparentheses, commas, cut(table), closeparentheses), |i| {
|
||||
let (i, s) = tag("<")(i)?;
|
||||
let (i, v) = separated_list1(verbar, table)(i)?;
|
||||
let (i, _) = expect_terminator(s, char('>'))(i)?;
|
||||
Ok((i, v))
|
||||
}))(i)
|
||||
})(i)?;
|
||||
Ok((i, Kind::Record(v.unwrap_or_default())))
|
||||
}
|
||||
|
||||
fn geometry(i: &str) -> IResult<&str, Kind> {
|
||||
let (i, _) = tag("geometry")(i)?;
|
||||
let (i, v) =
|
||||
opt(alt((delimited_list1(openparentheses, commas, cut(geo), closeparentheses), |i| {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, s) = tag("<")(i)?;
|
||||
let (i, v) = separated_list1(verbar, cut(geo))(i)?;
|
||||
let (i, _) = expect_terminator(s, char('>'))(i)?;
|
||||
Ok((i, v))
|
||||
})))(i)?;
|
||||
Ok((i, Kind::Geometry(v.unwrap_or_default())))
|
||||
}
|
||||
|
||||
fn array(i: &str) -> IResult<&str, Kind> {
|
||||
let (i, _) = tag("array")(i)?;
|
||||
let (i, v) = opt(|i| {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, s) = tag("<")(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, k) = kind(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, l) = opt(|i| {
|
||||
let (i, _) = char(',')(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, l) = u64(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, l))
|
||||
})(i)?;
|
||||
|
||||
let (i, _) = expect_terminator(s, char('>'))(i)?;
|
||||
Ok((i, (k, l)))
|
||||
})(i)?;
|
||||
Ok((
|
||||
i,
|
||||
match v {
|
||||
Some((k, l)) => Kind::Array(Box::new(k), l),
|
||||
None => Kind::Array(Box::new(Kind::Any), None),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn set(i: &str) -> IResult<&str, Kind> {
|
||||
let (i, _) = tag("set")(i)?;
|
||||
let (i, v) = opt(|i| {
|
||||
let (i, s) = tag("<")(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, k) = kind(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, l) = opt(|i| {
|
||||
let (i, _) = char(',')(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, l) = u64(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, l))
|
||||
})(i)?;
|
||||
let (i, _) = expect_terminator(s, char('>'))(i)?;
|
||||
Ok((i, (k, l)))
|
||||
})(i)?;
|
||||
Ok((
|
||||
i,
|
||||
match v {
|
||||
Some((k, l)) => Kind::Set(Box::new(k), l),
|
||||
None => Kind::Set(Box::new(Kind::Any), None),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn geo(i: &str) -> IResult<&str, String> {
|
||||
map(
|
||||
alt((
|
||||
tag("feature"),
|
||||
tag("point"),
|
||||
tag("line"),
|
||||
tag("polygon"),
|
||||
tag("multipoint"),
|
||||
tag("multiline"),
|
||||
tag("multipolygon"),
|
||||
tag("collection"),
|
||||
)),
|
||||
String::from,
|
||||
)(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::sql::table::Table;
|
||||
|
||||
#[test]
|
||||
fn kind_any() {
|
||||
let sql = "any";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("any", format!("{}", out));
|
||||
assert_eq!(out, Kind::Any);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_null() {
|
||||
let sql = "null";
|
||||
let res = kind(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("null", format!("{}", out));
|
||||
assert_eq!(out, Kind::Null);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_bool() {
|
||||
let sql = "bool";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("bool", format!("{}", out));
|
||||
assert_eq!(out, Kind::Bool);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_bytes() {
|
||||
let sql = "bytes";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("bytes", format!("{}", out));
|
||||
assert_eq!(out, Kind::Bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_datetime() {
|
||||
let sql = "datetime";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("datetime", format!("{}", out));
|
||||
assert_eq!(out, Kind::Datetime);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_decimal() {
|
||||
let sql = "decimal";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("decimal", format!("{}", out));
|
||||
assert_eq!(out, Kind::Decimal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_duration() {
|
||||
let sql = "duration";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("duration", format!("{}", out));
|
||||
assert_eq!(out, Kind::Duration);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_float() {
|
||||
let sql = "float";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("float", format!("{}", out));
|
||||
assert_eq!(out, Kind::Float);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_number() {
|
||||
let sql = "number";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("number", format!("{}", out));
|
||||
assert_eq!(out, Kind::Number);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_object() {
|
||||
let sql = "object";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("object", format!("{}", out));
|
||||
assert_eq!(out, Kind::Object);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_point() {
|
||||
let sql = "point";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("point", format!("{}", out));
|
||||
assert_eq!(out, Kind::Point);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_string() {
|
||||
let sql = "string";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("string", format!("{}", out));
|
||||
assert_eq!(out, Kind::String);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_uuid() {
|
||||
let sql = "uuid";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("uuid", format!("{}", out));
|
||||
assert_eq!(out, Kind::Uuid);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_either() {
|
||||
let sql = "int | float";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("int | float", format!("{}", out));
|
||||
assert_eq!(out, Kind::Either(vec![Kind::Int, Kind::Float]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_record_any() {
|
||||
let sql = "record";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("record", format!("{}", out));
|
||||
assert_eq!(out, Kind::Record(vec![]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_record_one() {
|
||||
let sql = "record<person>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("record<person>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Record(vec![Table::from("person")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_record_many() {
|
||||
let sql = "record<person | animal>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("record<person | animal>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Record(vec![Table::from("person"), Table::from("animal")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_geometry_any() {
|
||||
let sql = "geometry";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("geometry", format!("{}", out));
|
||||
assert_eq!(out, Kind::Geometry(vec![]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_geometry_one() {
|
||||
let sql = "geometry<point>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("geometry<point>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Geometry(vec![String::from("point")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_geometry_many() {
|
||||
let sql = "geometry<point | multipoint>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("geometry<point | multipoint>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Geometry(vec![String::from("point"), String::from("multipoint")]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_option_one() {
|
||||
let sql = "option<int>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("option<int>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Option(Box::new(Kind::Int)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_option_many() {
|
||||
let sql = "option<int | float>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("option<int | float>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Option(Box::new(Kind::Either(vec![Kind::Int, Kind::Float]))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_array_any() {
|
||||
let sql = "array";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("array", format!("{}", out));
|
||||
assert_eq!(out, Kind::Array(Box::new(Kind::Any), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_array_some() {
|
||||
let sql = "array<float>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("array<float>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Array(Box::new(Kind::Float), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_array_some_size() {
|
||||
let sql = "array<float, 10>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("array<float, 10>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Array(Box::new(Kind::Float), Some(10)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_set_any() {
|
||||
let sql = "set";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("set", format!("{}", out));
|
||||
assert_eq!(out, Kind::Set(Box::new(Kind::Any), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_set_some() {
|
||||
let sql = "set<float>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("set<float>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Set(Box::new(Kind::Float), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kind_set_some_size() {
|
||||
let sql = "set<float, 10>";
|
||||
let res = kind(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("set<float, 10>", format!("{}", out));
|
||||
assert_eq!(out, Kind::Set(Box::new(Kind::Float), Some(10)));
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
use super::super::IResult;
|
||||
use crate::sql::Algorithm;
|
||||
use nom::{branch::alt, bytes::complete::tag, combinator::value};
|
||||
|
||||
pub fn algorithm(i: &str) -> IResult<&str, Algorithm> {
|
||||
alt((
|
||||
value(Algorithm::EdDSA, tag("EDDSA")),
|
||||
value(Algorithm::Es256, tag("ES256")),
|
||||
value(Algorithm::Es384, tag("ES384")),
|
||||
value(Algorithm::Es512, tag("ES512")),
|
||||
value(Algorithm::Hs256, tag("HS256")),
|
||||
value(Algorithm::Hs384, tag("HS384")),
|
||||
value(Algorithm::Hs512, tag("HS512")),
|
||||
value(Algorithm::Ps256, tag("PS256")),
|
||||
value(Algorithm::Ps384, tag("PS384")),
|
||||
value(Algorithm::Ps512, tag("PS512")),
|
||||
value(Algorithm::Rs256, tag("RS256")),
|
||||
value(Algorithm::Rs384, tag("RS384")),
|
||||
value(Algorithm::Rs512, tag("RS512")),
|
||||
value(Algorithm::Jwks, tag("JWKS")), // Not an algorithm.
|
||||
))(i)
|
||||
}
|
|
@ -1,298 +0,0 @@
|
|||
use super::super::{
|
||||
common::{take_digits, take_digits_range, take_u32_len},
|
||||
error::expected,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::Datetime;
|
||||
use chrono::{FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, Offset, TimeZone, Utc};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag,
|
||||
character::complete::char,
|
||||
combinator::{cut, map},
|
||||
error::ErrorKind,
|
||||
error_position,
|
||||
sequence::delimited,
|
||||
Err,
|
||||
};
|
||||
|
||||
pub fn datetime(i: &str) -> IResult<&str, Datetime> {
|
||||
expected("a datetime", alt((datetime_single, datetime_double)))(i)
|
||||
}
|
||||
|
||||
fn datetime_single(i: &str) -> IResult<&str, Datetime> {
|
||||
alt((
|
||||
delimited(tag("d\'"), cut(datetime_raw), cut(char('\''))),
|
||||
delimited(char('\''), datetime_raw, char('\'')),
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn datetime_double(i: &str) -> IResult<&str, Datetime> {
|
||||
alt((
|
||||
delimited(tag("d\""), cut(datetime_raw), cut(char('"'))),
|
||||
delimited(char('"'), datetime_raw, char('"')),
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn datetime_all_raw(i: &str) -> IResult<&str, Datetime> {
|
||||
alt((nano, time, date))(i)
|
||||
}
|
||||
|
||||
fn datetime_raw(i: &str) -> IResult<&str, Datetime> {
|
||||
alt((nano, time))(i)
|
||||
}
|
||||
|
||||
fn date(i: &str) -> IResult<&str, Datetime> {
|
||||
let (i, year) = year(i)?;
|
||||
let (i, _) = char('-')(i)?;
|
||||
let (i, mon) = month(i)?;
|
||||
let (i, _) = char('-')(i)?;
|
||||
let (i, day) = day(i)?;
|
||||
convert(i, (year, mon, day), (0, 0, 0, 0), Utc.fix())
|
||||
}
|
||||
|
||||
fn time(i: &str) -> IResult<&str, Datetime> {
|
||||
let (i, year) = year(i)?;
|
||||
let (i, _) = char('-')(i)?;
|
||||
let (i, mon) = month(i)?;
|
||||
let (i, _) = char('-')(i)?;
|
||||
let (i, day) = day(i)?;
|
||||
let (i, _) = char('T')(i)?;
|
||||
let (i, hour) = hour(i)?;
|
||||
let (i, _) = char(':')(i)?;
|
||||
let (i, min) = minute(i)?;
|
||||
let (i, _) = char(':')(i)?;
|
||||
let (i, sec) = second(i)?;
|
||||
let (i, zone) = zone(i)?;
|
||||
convert(i, (year, mon, day), (hour, min, sec, 0), zone)
|
||||
}
|
||||
|
||||
fn nano(i: &str) -> IResult<&str, Datetime> {
|
||||
let (i, year) = year(i)?;
|
||||
let (i, _) = char('-')(i)?;
|
||||
let (i, mon) = month(i)?;
|
||||
let (i, _) = char('-')(i)?;
|
||||
let (i, day) = day(i)?;
|
||||
let (i, _) = char('T')(i)?;
|
||||
let (i, hour) = hour(i)?;
|
||||
let (i, _) = char(':')(i)?;
|
||||
let (i, min) = minute(i)?;
|
||||
let (i, _) = char(':')(i)?;
|
||||
let (i, sec) = second(i)?;
|
||||
let (i, nano) = nanosecond(i)?;
|
||||
let (i, zone) = zone(i)?;
|
||||
convert(i, (year, mon, day), (hour, min, sec, nano), zone)
|
||||
}
|
||||
|
||||
fn convert(
|
||||
i: &str,
|
||||
(year, mon, day): (i32, u32, u32),
|
||||
(hour, min, sec, nano): (u32, u32, u32, u32),
|
||||
zone: FixedOffset,
|
||||
) -> IResult<&str, Datetime> {
|
||||
// Attempt to create date
|
||||
let d = NaiveDate::from_ymd_opt(year, mon, day)
|
||||
.ok_or_else(|| Err::Error(error_position!(i, ErrorKind::Verify)))?;
|
||||
// Attempt to create time
|
||||
let t = NaiveTime::from_hms_nano_opt(hour, min, sec, nano)
|
||||
.ok_or_else(|| Err::Error(error_position!(i, ErrorKind::Verify)))?;
|
||||
//
|
||||
let v = NaiveDateTime::new(d, t);
|
||||
// Attempt to create time
|
||||
let d = zone
|
||||
.from_local_datetime(&v)
|
||||
.earliest()
|
||||
.ok_or_else(|| Err::Error(error_position!(i, ErrorKind::Verify)))?
|
||||
.with_timezone(&Utc);
|
||||
// This is a valid datetime
|
||||
Ok((i, Datetime(d)))
|
||||
}
|
||||
|
||||
fn year(i: &str) -> IResult<&str, i32> {
|
||||
let (i, s) = sign(i).unwrap_or((i, 1));
|
||||
let (i, y) = take_digits(i, 4)?;
|
||||
let v = s * y as i32;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
fn month(i: &str) -> IResult<&str, u32> {
|
||||
take_digits_range(i, 2, 1..=12)
|
||||
}
|
||||
|
||||
fn day(i: &str) -> IResult<&str, u32> {
|
||||
take_digits_range(i, 2, 1..=31)
|
||||
}
|
||||
|
||||
fn hour(i: &str) -> IResult<&str, u32> {
|
||||
take_digits_range(i, 2, 0..=24)
|
||||
}
|
||||
|
||||
fn minute(i: &str) -> IResult<&str, u32> {
|
||||
take_digits_range(i, 2, 0..=59)
|
||||
}
|
||||
|
||||
fn second(i: &str) -> IResult<&str, u32> {
|
||||
take_digits_range(i, 2, 0..=60)
|
||||
}
|
||||
|
||||
fn nanosecond(i: &str) -> IResult<&str, u32> {
|
||||
let (i, _) = char('.')(i)?;
|
||||
let (i, (v, l)) = take_u32_len(i)?;
|
||||
let v = match l {
|
||||
l if l <= 2 => v * 10000000,
|
||||
l if l <= 3 => v * 1000000,
|
||||
l if l <= 4 => v * 100000,
|
||||
l if l <= 5 => v * 10000,
|
||||
l if l <= 6 => v * 1000,
|
||||
l if l <= 7 => v * 100,
|
||||
l if l <= 8 => v * 10,
|
||||
_ => v,
|
||||
};
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
fn zone(i: &str) -> IResult<&str, FixedOffset> {
|
||||
alt((zone_utc, zone_all))(i)
|
||||
}
|
||||
|
||||
fn zone_utc(i: &str) -> IResult<&str, FixedOffset> {
|
||||
let (i, _) = char('Z')(i)?;
|
||||
Ok((i, Utc.fix()))
|
||||
}
|
||||
|
||||
fn zone_all(i: &str) -> IResult<&str, FixedOffset> {
|
||||
let (i, s) = sign(i)?;
|
||||
let (i, h) = hour(i)?;
|
||||
let (i, _) = char(':')(i)?;
|
||||
let (i, m) = minute(i)?;
|
||||
if h == 0 && m == 0 {
|
||||
Ok((i, Utc.fix()))
|
||||
} else if s < 0 {
|
||||
match FixedOffset::west_opt((h * 3600 + m * 60) as i32) {
|
||||
Some(v) => Ok((i, v)),
|
||||
None => Err(Err::Error(error_position!(i, ErrorKind::Verify))),
|
||||
}
|
||||
} else if s > 0 {
|
||||
match FixedOffset::east_opt((h * 3600 + m * 60) as i32) {
|
||||
Some(v) => Ok((i, v)),
|
||||
None => Err(Err::Error(error_position!(i, ErrorKind::Verify))),
|
||||
}
|
||||
} else {
|
||||
Ok((i, Utc.fix()))
|
||||
}
|
||||
}
|
||||
|
||||
fn sign(i: &str) -> IResult<&str, i32> {
|
||||
map(alt((char('-'), char('+'))), |s: char| match s {
|
||||
'-' => -1,
|
||||
_ => 1,
|
||||
})(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
// use chrono::Date;
|
||||
|
||||
use crate::{sql::Value, syn::Parse};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn date_zone() {
|
||||
let sql = "2020-01-01T00:00:00Z";
|
||||
let res = datetime_all_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2020-01-01T00:00:00Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2020-01-01T00:00:00Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time() {
|
||||
let sql = "2012-04-23T18:25:43Z";
|
||||
let res = datetime_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2012-04-23T18:25:43Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-23T18:25:43Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_nanos() {
|
||||
let sql = "2012-04-23T18:25:43.5631Z";
|
||||
let res = datetime_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2012-04-23T18:25:43.563100Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-23T18:25:43.563100Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_timezone_utc() {
|
||||
let sql = "2012-04-23T18:25:43.0000511Z";
|
||||
let res = datetime_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2012-04-23T18:25:43.000051100Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-23T18:25:43.000051100Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_timezone_pacific() {
|
||||
let sql = "2012-04-23T18:25:43.511-08:00";
|
||||
let res = datetime_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2012-04-24T02:25:43.511Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-24T02:25:43.511Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_timezone_pacific_partial() {
|
||||
let sql = "2012-04-23T18:25:43.511-08:30";
|
||||
let res = datetime_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2012-04-24T02:55:43.511Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-24T02:55:43.511Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_timezone_utc_nanoseconds() {
|
||||
let sql = "2012-04-23T18:25:43.5110000Z";
|
||||
let res = datetime_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2012-04-23T18:25:43.511Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-23T18:25:43.511Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_timezone_utc_sub_nanoseconds() {
|
||||
let sql = "2012-04-23T18:25:43.0000511Z";
|
||||
let res = datetime_raw(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'2012-04-23T18:25:43.000051100Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-23T18:25:43.000051100Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_timezone_utc_sub_nanoseconds_from_value() {
|
||||
let sql = "'2012-04-23T18:25:43.0000511Z'";
|
||||
let res = Value::parse(sql);
|
||||
let Value::Datetime(out) = res else {
|
||||
panic!();
|
||||
};
|
||||
assert_eq!("'2012-04-23T18:25:43.000051100Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-23T18:25:43.000051100Z").unwrap());
|
||||
|
||||
let sql = "d'2012-04-23T18:25:43.0000511Z'";
|
||||
let res = Value::parse(sql);
|
||||
let Value::Datetime(out) = res else {
|
||||
panic!();
|
||||
};
|
||||
assert_eq!("'2012-04-23T18:25:43.000051100Z'", format!("{}", out));
|
||||
assert_eq!(out, Datetime::try_from("2012-04-23T18:25:43.000051100Z").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn date_time_illegal_date() {
|
||||
// Hey! There's not a 31st of November!
|
||||
let sql = "2022-11-31T12:00:00.000Z";
|
||||
datetime_raw(sql).unwrap_err();
|
||||
}
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
use super::super::{ending::duration as ending, error::expected, IResult, ParseError};
|
||||
use crate::{
|
||||
sql::{
|
||||
duration::{
|
||||
SECONDS_PER_DAY, SECONDS_PER_HOUR, SECONDS_PER_MINUTE, SECONDS_PER_WEEK,
|
||||
SECONDS_PER_YEAR,
|
||||
},
|
||||
Duration,
|
||||
},
|
||||
syn::v1::common::take_u64,
|
||||
};
|
||||
use nom::{branch::alt, bytes::complete::tag, multi::many1};
|
||||
use std::time;
|
||||
|
||||
pub fn duration(i: &str) -> IResult<&str, Duration> {
|
||||
expected("a duration", |i| {
|
||||
let (i, v) = many1(duration_raw)(i)?;
|
||||
let (i, _) = ending(i)?;
|
||||
Ok((i, v.iter().sum::<Duration>()))
|
||||
})(i)
|
||||
}
|
||||
|
||||
fn duration_raw(i: &str) -> IResult<&str, Duration> {
|
||||
let (i, v) = part(i)?;
|
||||
let (i, u) = unit(i)?;
|
||||
|
||||
let std_duration = match u {
|
||||
"ns" => Some(time::Duration::from_nanos(v)),
|
||||
"µs" => Some(time::Duration::from_micros(v)),
|
||||
"us" => Some(time::Duration::from_micros(v)),
|
||||
"ms" => Some(time::Duration::from_millis(v)),
|
||||
"s" => Some(time::Duration::from_secs(v)),
|
||||
"m" => v.checked_mul(SECONDS_PER_MINUTE).map(time::Duration::from_secs),
|
||||
"h" => v.checked_mul(SECONDS_PER_HOUR).map(time::Duration::from_secs),
|
||||
"d" => v.checked_mul(SECONDS_PER_DAY).map(time::Duration::from_secs),
|
||||
"w" => v.checked_mul(SECONDS_PER_WEEK).map(time::Duration::from_secs),
|
||||
"y" => v.checked_mul(SECONDS_PER_YEAR).map(time::Duration::from_secs),
|
||||
_ => unreachable!("shouldn't have parsed {u} as duration unit"),
|
||||
};
|
||||
|
||||
std_duration.map(|d| (i, Duration(d))).ok_or(nom::Err::Error(ParseError::Base(i)))
|
||||
}
|
||||
|
||||
fn part(i: &str) -> IResult<&str, u64> {
|
||||
take_u64(i)
|
||||
}
|
||||
|
||||
fn unit(i: &str) -> IResult<&str, &str> {
|
||||
alt((
|
||||
tag("ns"),
|
||||
tag("µs"),
|
||||
tag("us"),
|
||||
tag("ms"),
|
||||
tag("s"),
|
||||
tag("m"),
|
||||
tag("h"),
|
||||
tag("d"),
|
||||
tag("w"),
|
||||
tag("y"),
|
||||
))(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
use std::time::Duration;
|
||||
|
||||
#[test]
|
||||
fn duration_nil() {
|
||||
let sql = "0ns";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("0ns", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(0, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_basic() {
|
||||
let sql = "1s";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("1s", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(1, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_simple() {
|
||||
let sql = "1000ms";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("1s", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(1, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_complex() {
|
||||
let sql = "86400s";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("1d", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(86_400, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_days() {
|
||||
let sql = "5d";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("5d", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(432_000, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_weeks() {
|
||||
let sql = "4w";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("4w", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(2_419_200, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_split() {
|
||||
let sql = "129600s";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("1d12h", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(129_600, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_multi() {
|
||||
let sql = "1d12h30m";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("1d12h30m", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(131_400, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_milliseconds() {
|
||||
let sql = "500ms";
|
||||
let res = duration(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("500ms", format!("{}", out));
|
||||
assert_eq!(out.0, Duration::new(0, 500000000));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duration_overflow() {
|
||||
let sql = "10000000000000000d";
|
||||
let res = duration(sql);
|
||||
res.unwrap_err();
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
use super::super::{
|
||||
common::{closeparentheses, commas, openparentheses},
|
||||
literal::language::language,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::filter::Filter;
|
||||
use nom::{
|
||||
branch::alt, bytes::complete::tag_no_case, character::complete::u16, combinator::cut,
|
||||
multi::separated_list1,
|
||||
};
|
||||
|
||||
fn ascii(i: &str) -> IResult<&str, Filter> {
|
||||
let (i, _) = tag_no_case("ASCII")(i)?;
|
||||
Ok((i, Filter::Ascii))
|
||||
}
|
||||
|
||||
fn edgengram(i: &str) -> IResult<&str, Filter> {
|
||||
let (i, _) = tag_no_case("EDGENGRAM")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = openparentheses(i)?;
|
||||
let (i, min) = u16(i)?;
|
||||
let (i, _) = commas(i)?;
|
||||
let (i, max) = u16(i)?;
|
||||
let (i, _) = closeparentheses(i)?;
|
||||
Ok((i, Filter::EdgeNgram(min, max)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
fn ngram(i: &str) -> IResult<&str, Filter> {
|
||||
let (i, _) = tag_no_case("NGRAM")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = openparentheses(i)?;
|
||||
let (i, min) = u16(i)?;
|
||||
let (i, _) = commas(i)?;
|
||||
let (i, max) = u16(i)?;
|
||||
let (i, _) = closeparentheses(i)?;
|
||||
Ok((i, Filter::Ngram(min, max)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
fn lowercase(i: &str) -> IResult<&str, Filter> {
|
||||
let (i, _) = tag_no_case("LOWERCASE")(i)?;
|
||||
Ok((i, Filter::Lowercase))
|
||||
}
|
||||
|
||||
fn snowball(i: &str) -> IResult<&str, Filter> {
|
||||
let (i, _) = tag_no_case("SNOWBALL")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = openparentheses(i)?;
|
||||
let (i, language) = language(i)?;
|
||||
let (i, _) = closeparentheses(i)?;
|
||||
Ok((i, Filter::Snowball(language)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
fn uppercase(i: &str) -> IResult<&str, Filter> {
|
||||
let (i, _) = tag_no_case("UPPERCASE")(i)?;
|
||||
Ok((i, Filter::Uppercase))
|
||||
}
|
||||
|
||||
fn filter(i: &str) -> IResult<&str, Filter> {
|
||||
alt((ascii, edgengram, lowercase, ngram, snowball, uppercase))(i)
|
||||
}
|
||||
|
||||
pub fn filters(i: &str) -> IResult<&str, Vec<Filter>> {
|
||||
separated_list1(commas, filter)(i)
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::map};
|
||||
|
||||
use crate::sql::language::Language;
|
||||
|
||||
use super::super::IResult;
|
||||
|
||||
/// Language supports the english name and also ISO 639-1 (3 characters)
|
||||
/// and ISO 639-2 (2 characters)
|
||||
pub(super) fn language(i: &str) -> IResult<&str, Language> {
|
||||
alt((
|
||||
map(alt((tag_no_case("ARABIC"), tag_no_case("ARA"), tag_no_case("AR"))), |_| {
|
||||
Language::Arabic
|
||||
}),
|
||||
map(alt((tag_no_case("DANISH"), tag_no_case("DAN"), tag_no_case("DA"))), |_| {
|
||||
Language::Danish
|
||||
}),
|
||||
map(alt((tag_no_case("DUTCH"), tag_no_case("NLD"), tag_no_case("NL"))), |_| {
|
||||
Language::Dutch
|
||||
}),
|
||||
map(alt((tag_no_case("ENGLISH"), tag_no_case("ENG"), tag_no_case("EN"))), |_| {
|
||||
Language::English
|
||||
}),
|
||||
map(alt((tag_no_case("FRENCH"), tag_no_case("FRA"), tag_no_case("FR"))), |_| {
|
||||
Language::French
|
||||
}),
|
||||
map(alt((tag_no_case("GERMAN"), tag_no_case("DEU"), tag_no_case("DE"))), |_| {
|
||||
Language::German
|
||||
}),
|
||||
map(alt((tag_no_case("GREEK"), tag_no_case("ELL"), tag_no_case("EL"))), |_| {
|
||||
Language::Greek
|
||||
}),
|
||||
map(alt((tag_no_case("HUNGARIAN"), tag_no_case("HUN"), tag_no_case("HU"))), |_| {
|
||||
Language::Hungarian
|
||||
}),
|
||||
map(alt((tag_no_case("ITALIAN"), tag_no_case("ITA"), tag_no_case("IT"))), |_| {
|
||||
Language::Italian
|
||||
}),
|
||||
map(alt((tag_no_case("NORWEGIAN"), tag_no_case("NOR"), tag_no_case("NO"))), |_| {
|
||||
Language::Norwegian
|
||||
}),
|
||||
map(alt((tag_no_case("PORTUGUESE"), tag_no_case("POR"), tag_no_case("PT"))), |_| {
|
||||
Language::Portuguese
|
||||
}),
|
||||
map(alt((tag_no_case("ROMANIAN"), tag_no_case("RON"), tag_no_case("RO"))), |_| {
|
||||
Language::Romanian
|
||||
}),
|
||||
map(alt((tag_no_case("RUSSIAN"), tag_no_case("RUS"), tag_no_case("RU"))), |_| {
|
||||
Language::Russian
|
||||
}),
|
||||
map(alt((tag_no_case("SPANISH"), tag_no_case("SPA"), tag_no_case("ES"))), |_| {
|
||||
Language::Spanish
|
||||
}),
|
||||
map(alt((tag_no_case("SWEDISH"), tag_no_case("SWE"), tag_no_case("SV"))), |_| {
|
||||
Language::Swedish
|
||||
}),
|
||||
map(alt((tag_no_case("TAMIL"), tag_no_case("TAM"), tag_no_case("TA"))), |_| {
|
||||
Language::Tamil
|
||||
}),
|
||||
map(alt((tag_no_case("TURKISH"), tag_no_case("TUR"), tag_no_case("TR"))), |_| {
|
||||
Language::Turkish
|
||||
}),
|
||||
))(i)
|
||||
}
|
|
@ -1,184 +0,0 @@
|
|||
use super::{
|
||||
common::{commas, val_char},
|
||||
error::expected,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{Ident, Param, Table, Tables};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{escaped_transform, is_not, tag, take_while1},
|
||||
character::complete::char,
|
||||
combinator::{cut, recognize, value},
|
||||
multi::separated_list1,
|
||||
sequence::delimited,
|
||||
};
|
||||
|
||||
pub mod algorithm;
|
||||
pub mod datetime;
|
||||
pub mod duration;
|
||||
pub mod filter;
|
||||
pub mod language;
|
||||
pub mod number;
|
||||
pub mod range;
|
||||
pub mod regex;
|
||||
pub mod scoring;
|
||||
pub mod strand;
|
||||
pub mod tokenizer;
|
||||
pub mod uuid;
|
||||
|
||||
pub use self::algorithm::algorithm;
|
||||
pub use self::datetime::{datetime, datetime_all_raw};
|
||||
pub use self::duration::duration;
|
||||
pub use self::filter::filters;
|
||||
pub use self::number::number;
|
||||
pub use self::range::range;
|
||||
pub use self::regex::regex;
|
||||
pub use self::scoring::scoring;
|
||||
pub use self::strand::strand;
|
||||
pub use self::uuid::uuid;
|
||||
|
||||
const BRACKET_L: char = '⟨';
|
||||
const BRACKET_R: char = '⟩';
|
||||
const BRACKET_END_NUL: &str = "⟩\0";
|
||||
|
||||
pub fn ident(i: &str) -> IResult<&str, Ident> {
|
||||
let (i, v) = expected("an identifier", ident_raw)(i)?;
|
||||
Ok((i, Ident::from(v)))
|
||||
}
|
||||
|
||||
pub fn ident_path(i: &str) -> IResult<&str, Ident> {
|
||||
let (i, v) = recognize(separated_list1(tag("::"), take_while1(val_char)))(i)?;
|
||||
Ok((i, Ident::from(v)))
|
||||
}
|
||||
|
||||
pub fn ident_raw(i: &str) -> IResult<&str, String> {
|
||||
let (i, v) = alt((ident_default, ident_backtick, ident_brackets))(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
fn ident_default(i: &str) -> IResult<&str, String> {
|
||||
let (i, v) = take_while1(val_char)(i)?;
|
||||
Ok((i, String::from(v)))
|
||||
}
|
||||
|
||||
fn ident_backtick(i: &str) -> IResult<&str, String> {
|
||||
let (i, _) = char('`')(i)?;
|
||||
let (i, v) = escaped_transform(
|
||||
is_not("`\\\0"),
|
||||
'\\',
|
||||
alt((
|
||||
value('\u{5c}', char('\\')),
|
||||
value('\u{60}', char('`')),
|
||||
value('\u{2f}', char('/')),
|
||||
value('\u{08}', char('b')),
|
||||
value('\u{0c}', char('f')),
|
||||
value('\u{0a}', char('n')),
|
||||
value('\u{0d}', char('r')),
|
||||
value('\u{09}', char('t')),
|
||||
)),
|
||||
)(i)?;
|
||||
let (i, _) = char('`')(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
fn ident_brackets(i: &str) -> IResult<&str, String> {
|
||||
let (i, v) = delimited(char(BRACKET_L), is_not(BRACKET_END_NUL), char(BRACKET_R))(i)?;
|
||||
Ok((i, String::from(v)))
|
||||
}
|
||||
|
||||
pub fn param(i: &str) -> IResult<&str, Param> {
|
||||
let (i, _) = char('$')(i)?;
|
||||
cut(|i| {
|
||||
let (i, v) = ident(i)?;
|
||||
Ok((i, Param::from(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn table(i: &str) -> IResult<&str, Table> {
|
||||
let (i, v) = expected("a table name", ident_raw)(i)?;
|
||||
Ok((i, Table(v)))
|
||||
}
|
||||
|
||||
pub fn tables(i: &str) -> IResult<&str, Tables> {
|
||||
let (i, v) = separated_list1(commas, table)(i)?;
|
||||
Ok((i, Tables(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::syn::Parse;
|
||||
|
||||
#[test]
|
||||
fn ident_normal() {
|
||||
let sql = "test";
|
||||
let res = ident(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Ident::from("test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_quoted_backtick() {
|
||||
let sql = "`test`";
|
||||
let res = ident(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Ident::from("test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_quoted_brackets() {
|
||||
let sql = "⟨test⟩";
|
||||
let res = ident(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Ident::from("test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn param_normal() {
|
||||
let sql = "$test";
|
||||
let res = param(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("$test", format!("{}", out));
|
||||
assert_eq!(out, Param::parse("$test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn param_longer() {
|
||||
let sql = "$test_and_deliver";
|
||||
let res = param(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("$test_and_deliver", format!("{}", out));
|
||||
assert_eq!(out, Param::parse("$test_and_deliver"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn table_normal() {
|
||||
let sql = "test";
|
||||
let res = table(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Table(String::from("test")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn table_quoted_backtick() {
|
||||
let sql = "`test`";
|
||||
let res = table(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Table(String::from("test")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn table_quoted_brackets() {
|
||||
let sql = "⟨test⟩";
|
||||
let res = table(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("test", format!("{}", out));
|
||||
assert_eq!(out, Table(String::from("test")));
|
||||
}
|
||||
}
|
|
@ -1,317 +0,0 @@
|
|||
use super::super::{ending::number as ending, IResult, ParseError};
|
||||
use crate::sql::Number;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag,
|
||||
character::complete::i64,
|
||||
combinator::{opt, value},
|
||||
number::complete::recognize_float,
|
||||
Err,
|
||||
};
|
||||
use rust_decimal::Decimal;
|
||||
use std::str::FromStr;
|
||||
|
||||
fn not_nan(i: &str) -> IResult<&str, Number> {
|
||||
let (i, v) = match recognize_float(i) {
|
||||
Ok(x) => x,
|
||||
Err(Err::Failure(x)) | Err(Err::Error(x)) => return Err(Err::Error(x)),
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
let (i, suffix) = suffix(i)?;
|
||||
let (i, _) = ending(i)?;
|
||||
let number = match suffix {
|
||||
Suffix::None => {
|
||||
// Manually check for int or float for better parsing errors
|
||||
if v.contains(['e', 'E', '.']) {
|
||||
let float = f64::from_str(v)
|
||||
.map_err(|e| ParseError::ParseFloat {
|
||||
tried: v,
|
||||
error: e,
|
||||
})
|
||||
.map_err(Err::Failure)?;
|
||||
Number::from(float)
|
||||
} else {
|
||||
let int = i64::from_str(v)
|
||||
.map_err(|e| ParseError::ParseInt {
|
||||
tried: v,
|
||||
error: e,
|
||||
})
|
||||
.map_err(Err::Failure)?;
|
||||
Number::from(int)
|
||||
}
|
||||
}
|
||||
Suffix::Float => {
|
||||
let float = f64::from_str(v)
|
||||
.map_err(|e| ParseError::ParseFloat {
|
||||
tried: v,
|
||||
error: e,
|
||||
})
|
||||
.map_err(Err::Failure)?;
|
||||
Number::from(float)
|
||||
}
|
||||
Suffix::Decimal => Number::from(if v.contains(['e', 'E']) {
|
||||
Decimal::from_scientific(v)
|
||||
.map_err(|e| ParseError::ParseDecimal {
|
||||
tried: v,
|
||||
error: e,
|
||||
})
|
||||
.map_err(Err::Failure)?
|
||||
} else {
|
||||
Decimal::from_str(v)
|
||||
.map_err(|e| ParseError::ParseDecimal {
|
||||
tried: v,
|
||||
error: e,
|
||||
})
|
||||
.map_err(Err::Failure)?
|
||||
}),
|
||||
};
|
||||
Ok((i, number))
|
||||
}
|
||||
|
||||
pub fn number(i: &str) -> IResult<&str, Number> {
|
||||
alt((value(Number::NAN, tag("NaN")), not_nan))(i)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
|
||||
enum Suffix {
|
||||
None,
|
||||
Float,
|
||||
Decimal,
|
||||
}
|
||||
|
||||
fn suffix(i: &str) -> IResult<&str, Suffix> {
|
||||
let (i, opt_suffix) =
|
||||
opt(alt((value(Suffix::Float, tag("f")), value(Suffix::Decimal, tag("dec")))))(i)?;
|
||||
Ok((i, opt_suffix.unwrap_or(Suffix::None)))
|
||||
}
|
||||
|
||||
pub fn integer(i: &str) -> IResult<&str, i64> {
|
||||
let (i, v) = i64(i)?;
|
||||
let (i, _) = ending(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use rust_decimal::prelude::FromPrimitive;
|
||||
|
||||
use super::*;
|
||||
use std::{cmp::Ordering, ops::Div};
|
||||
|
||||
#[test]
|
||||
fn number_nan() {
|
||||
let sql = "NaN";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("NaN", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_int() {
|
||||
let sql = "123";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("123", format!("{}", out));
|
||||
assert_eq!(out, Number::Int(123));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_int_neg() {
|
||||
let sql = "-123";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("-123", format!("{}", out));
|
||||
assert_eq!(out, Number::Int(-123));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_float() {
|
||||
let sql = "123.45f";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out));
|
||||
assert_eq!(out, Number::Float(123.45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_float_neg() {
|
||||
let sql = "-123.45f";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out));
|
||||
assert_eq!(out, Number::Float(-123.45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_scientific_lower() {
|
||||
let sql = "12345e-1";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("1234.5f", format!("{}", out));
|
||||
assert_eq!(out, Number::Float(1234.5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_scientific_lower_neg() {
|
||||
let sql = "-12345e-1";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("-1234.5f", format!("{}", out));
|
||||
assert_eq!(out, Number::Float(-1234.5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_scientific_upper() {
|
||||
let sql = "12345E-02";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("123.45f", format!("{}", out));
|
||||
assert_eq!(out, Number::Float(123.45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_scientific_upper_neg() {
|
||||
let sql = "-12345E-02";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("-123.45f", format!("{}", out));
|
||||
assert_eq!(out, Number::Float(-123.45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_float_keeps_precision() {
|
||||
let sql = "13.571938471938472f";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_decimal_keeps_precision() {
|
||||
let sql = "0.0000000000000000000000000321dec";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(sql, format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_scientific_upper_decimal() {
|
||||
let sql = "12345E-02dec";
|
||||
let res = number(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("123.45dec", format!("{}", out));
|
||||
assert_eq!(out, Number::Decimal(rust_decimal::Decimal::from_f64(123.45).unwrap()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_div_int() {
|
||||
let res = Number::Int(3).div(Number::Int(2));
|
||||
assert_eq!(res, Number::Int(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_pow_int() {
|
||||
let res = Number::Int(3).pow(Number::Int(4));
|
||||
assert_eq!(res, Number::Int(81));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_pow_int_negative() {
|
||||
let res = Number::Int(4).pow(Number::Float(-0.5));
|
||||
assert_eq!(res, Number::Float(0.5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_pow_float() {
|
||||
let res = Number::Float(2.5).pow(Number::Int(2));
|
||||
assert_eq!(res, Number::Float(6.25));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_pow_float_negative() {
|
||||
let res = Number::Int(4).pow(Number::Float(-0.5));
|
||||
assert_eq!(res, Number::Float(0.5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_pow_decimal_one() {
|
||||
let res = Number::try_from("13.5719384719384719385639856394139476937756394756")
|
||||
.unwrap()
|
||||
.pow(Number::Int(1));
|
||||
assert_eq!(
|
||||
res,
|
||||
Number::try_from("13.5719384719384719385639856394139476937756394756").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_pow_decimal_two() {
|
||||
let res = Number::try_from("13.5719384719384719385639856394139476937756394756")
|
||||
.unwrap()
|
||||
.pow(Number::Int(2));
|
||||
assert_eq!(
|
||||
res,
|
||||
Number::try_from("184.19751388608358465578173996877942643463869043732548087725588482334195240945031617770904299536").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ord() {
|
||||
fn assert_cmp(a: &Number, b: &Number, ord: Ordering) {
|
||||
assert_eq!(a.cmp(b), ord, "{a} {ord:?} {b}");
|
||||
assert_eq!(a == b, ord.is_eq(), "{a} {ord:?} {b}");
|
||||
}
|
||||
|
||||
let nz = -0.0f64;
|
||||
let z = 0.0f64;
|
||||
assert_ne!(nz.to_bits(), z.to_bits());
|
||||
let nzp = permutations(nz);
|
||||
let zp = permutations(z);
|
||||
for nzp in nzp.iter() {
|
||||
for zp in zp.iter() {
|
||||
assert_cmp(nzp, zp, Ordering::Equal);
|
||||
}
|
||||
}
|
||||
|
||||
let negative_nan = f64::from_bits(18444492273895866368);
|
||||
|
||||
let ordering = &[
|
||||
negative_nan,
|
||||
f64::NEG_INFINITY,
|
||||
-10.0,
|
||||
-1.0,
|
||||
-f64::MIN_POSITIVE,
|
||||
0.0,
|
||||
f64::MIN_POSITIVE,
|
||||
1.0,
|
||||
10.0,
|
||||
f64::INFINITY,
|
||||
f64::NAN,
|
||||
];
|
||||
|
||||
fn permutations(n: f64) -> Vec<Number> {
|
||||
let mut ret = Vec::new();
|
||||
ret.push(Number::Float(n));
|
||||
if n.is_finite() && (n == 0.0 || n.abs() > f64::EPSILON) {
|
||||
ret.push(Number::Decimal(n.try_into().unwrap()));
|
||||
ret.push(Number::Int(n as i64));
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
for (ai, a) in ordering.iter().enumerate() {
|
||||
let ap = permutations(*a);
|
||||
for (bi, b) in ordering.iter().enumerate() {
|
||||
let bp = permutations(*b);
|
||||
let correct = ai.cmp(&bi);
|
||||
|
||||
for a in &ap {
|
||||
for b in &bp {
|
||||
assert_cmp(a, b, correct);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
use std::ops::Bound;
|
||||
|
||||
use super::{
|
||||
super::{thing::id, IResult},
|
||||
ident_raw,
|
||||
};
|
||||
use crate::sql::Range;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
character::complete::char,
|
||||
combinator::{map, opt},
|
||||
sequence::{preceded, terminated},
|
||||
};
|
||||
|
||||
pub fn range(i: &str) -> IResult<&str, Range> {
|
||||
let (i, tb) = ident_raw(i)?;
|
||||
let (i, _) = char(':')(i)?;
|
||||
let (i, beg) =
|
||||
opt(alt((map(terminated(id, char('>')), Bound::Excluded), map(id, Bound::Included))))(i)?;
|
||||
let (i, _) = char('.')(i)?;
|
||||
let (i, _) = char('.')(i)?;
|
||||
let (i, end) =
|
||||
opt(alt((map(preceded(char('='), id), Bound::Included), map(id, Bound::Excluded))))(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Range {
|
||||
tb,
|
||||
beg: beg.unwrap_or(Bound::Unbounded),
|
||||
end: end.unwrap_or(Bound::Unbounded),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn range_int() {
|
||||
let sql = "person:1..100";
|
||||
let res = range(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(r#"person:1..100"#, format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_array() {
|
||||
let sql = "person:['USA', 10]..['USA', 100]";
|
||||
let res = range(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("person:['USA', 10]..['USA', 100]", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_object() {
|
||||
let sql = "person:{ country: 'USA', position: 10 }..{ country: 'USA', position: 100 }";
|
||||
let res = range(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
"person:{ country: 'USA', position: 10 }..{ country: 'USA', position: 100 }",
|
||||
format!("{}", out)
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
use super::super::{IResult, ParseError};
|
||||
use crate::sql::Regex;
|
||||
use nom::{
|
||||
bytes::complete::{escaped, is_not},
|
||||
character::complete::{anychar, char},
|
||||
};
|
||||
|
||||
pub fn regex(i: &str) -> IResult<&str, Regex> {
|
||||
let (i, _) = char('/')(i)?;
|
||||
let (i, v) = escaped(is_not("\\/"), '\\', anychar)(i)?;
|
||||
let (i, _) = char('/')(i)?;
|
||||
let regex = v.parse().map_err(|_| nom::Err::Error(ParseError::Base(v)))?;
|
||||
Ok((i, regex))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn regex_simple() {
|
||||
let sql = "/test/";
|
||||
let res = regex(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("/test/", format!("{}", out));
|
||||
assert_eq!(out, "test".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regex_complex() {
|
||||
let sql = r"/(?i)test\/[a-z]+\/\s\d\w{1}.*/";
|
||||
let res = regex(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(r"/(?i)test/[a-z]+/\s\d\w{1}.*/", format!("{}", out));
|
||||
assert_eq!(out, r"(?i)test/[a-z]+/\s\d\w{1}.*".parse().unwrap());
|
||||
}
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
use super::super::{
|
||||
common::{closeparentheses, commas, expect_delimited, openparentheses},
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::Scoring;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{cut, map_res, value},
|
||||
number::complete::recognize_float,
|
||||
};
|
||||
|
||||
pub fn scoring(i: &str) -> IResult<&str, Scoring> {
|
||||
alt((
|
||||
value(Scoring::Vs, tag_no_case("VS")),
|
||||
|i| {
|
||||
let (i, _) = tag_no_case("BM25")(i)?;
|
||||
expect_delimited(
|
||||
openparentheses,
|
||||
|i| {
|
||||
let (i, k1) = cut(map_res(recognize_float, |x: &str| x.parse::<f32>()))(i)?;
|
||||
let (i, _) = cut(commas)(i)?;
|
||||
let (i, b) = cut(map_res(recognize_float, |x: &str| x.parse::<f32>()))(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Scoring::Bm {
|
||||
k1,
|
||||
b,
|
||||
},
|
||||
))
|
||||
},
|
||||
closeparentheses,
|
||||
)(i)
|
||||
},
|
||||
value(Scoring::bm25(), tag_no_case("BM25")),
|
||||
))(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn scoring_bm_25_with_parameters() {
|
||||
let sql = "BM25(1.0,0.6)";
|
||||
let res = scoring(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("BM25(1,0.6)", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scoring_bm_25_without_parameters() {
|
||||
let sql = "BM25";
|
||||
let res = scoring(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("BM25(1.2,0.75)", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scoring_vs() {
|
||||
let sql = "VS";
|
||||
let res = scoring(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("VS", format!("{}", out))
|
||||
}
|
||||
}
|
|
@ -1,285 +0,0 @@
|
|||
use super::super::{error::expected, IResult, ParseError};
|
||||
use crate::sql::Strand;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{escaped_transform, is_not, tag, take, take_while_m_n},
|
||||
character::complete::char,
|
||||
combinator::{opt, value},
|
||||
sequence::preceded,
|
||||
Err,
|
||||
};
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
const LEADING_SURROGATES: RangeInclusive<u16> = 0xD800..=0xDBFF;
|
||||
const TRAILING_SURROGATES: RangeInclusive<u16> = 0xDC00..=0xDFFF;
|
||||
|
||||
pub fn strand(i: &str) -> IResult<&str, Strand> {
|
||||
let (i, v) = strand_raw(i)?;
|
||||
Ok((i, Strand(v)))
|
||||
}
|
||||
|
||||
pub fn strand_raw(i: &str) -> IResult<&str, String> {
|
||||
expected("a strand", alt((strand_blank, strand_single, strand_double)))(i)
|
||||
}
|
||||
|
||||
fn strand_blank(i: &str) -> IResult<&str, String> {
|
||||
alt((
|
||||
|i| {
|
||||
let (i, _) = opt(char('s'))(i)?;
|
||||
let (i, _) = char('\'')(i)?;
|
||||
let (i, _) = char('\'')(i)?;
|
||||
Ok((i, String::new()))
|
||||
},
|
||||
|i| {
|
||||
let (i, _) = opt(char('s'))(i)?;
|
||||
let (i, _) = char('\"')(i)?;
|
||||
let (i, _) = char('\"')(i)?;
|
||||
Ok((i, String::new()))
|
||||
},
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn strand_single(i: &str) -> IResult<&str, String> {
|
||||
let (i, _) = opt(char('s'))(i)?;
|
||||
let (i, _) = char('\'')(i)?;
|
||||
let (i, v) = escaped_transform(
|
||||
is_not("\'\\\0"),
|
||||
'\\',
|
||||
alt((
|
||||
char_unicode,
|
||||
value('\u{5c}', char('\\')),
|
||||
value('\u{27}', char('\'')),
|
||||
value('\u{2f}', char('/')),
|
||||
value('\u{08}', char('b')),
|
||||
value('\u{0c}', char('f')),
|
||||
value('\u{0a}', char('n')),
|
||||
value('\u{0d}', char('r')),
|
||||
value('\u{09}', char('t')),
|
||||
)),
|
||||
)(i)?;
|
||||
let (i, _) = char('\'')(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
fn strand_double(i: &str) -> IResult<&str, String> {
|
||||
let (i, _) = opt(char('s'))(i)?;
|
||||
let (i, _) = char('\"')(i)?;
|
||||
let (i, v) = escaped_transform(
|
||||
is_not("\"\\\0"),
|
||||
'\\',
|
||||
alt((
|
||||
char_unicode,
|
||||
value('\u{5c}', char('\\')),
|
||||
value('\u{22}', char('\"')),
|
||||
value('\u{2f}', char('/')),
|
||||
value('\u{08}', char('b')),
|
||||
value('\u{0c}', char('f')),
|
||||
value('\u{0a}', char('n')),
|
||||
value('\u{0d}', char('r')),
|
||||
value('\u{09}', char('t')),
|
||||
)),
|
||||
)(i)?;
|
||||
let (i, _) = char('\"')(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
fn char_unicode(i: &str) -> IResult<&str, char> {
|
||||
preceded(char('u'), alt((char_unicode_bracketed, char_unicode_bare)))(i)
|
||||
}
|
||||
|
||||
// \uABCD or \uDBFF\uDFFF (surrogate pair)
|
||||
fn char_unicode_bare(i: &str) -> IResult<&str, char> {
|
||||
// Take exactly 4 bytes
|
||||
let (i, v) = take(4usize)(i)?;
|
||||
// Parse them as hex, where an error indicates invalid hex digits
|
||||
let v: u16 = u16::from_str_radix(v, 16).map_err(|_| {
|
||||
Err::Failure(ParseError::InvalidUnicode {
|
||||
tried: i,
|
||||
})
|
||||
})?;
|
||||
|
||||
if LEADING_SURROGATES.contains(&v) {
|
||||
let leading = v;
|
||||
|
||||
// Read the next \u.
|
||||
let (i, _) = tag("\\u")(i)?;
|
||||
// Take exactly 4 more bytes
|
||||
let (i, v) = take(4usize)(i)?;
|
||||
// Parse them as hex, where an error indicates invalid hex digits
|
||||
let trailing = u16::from_str_radix(v, 16).map_err(|_| {
|
||||
Err::Failure(ParseError::InvalidUnicode {
|
||||
tried: i,
|
||||
})
|
||||
})?;
|
||||
if !TRAILING_SURROGATES.contains(&trailing) {
|
||||
return Err(Err::Failure(ParseError::InvalidUnicode {
|
||||
tried: i,
|
||||
}));
|
||||
}
|
||||
// Compute the codepoint.
|
||||
// https://datacadamia.com/data/type/text/surrogate#from_surrogate_to_character_code
|
||||
let codepoint = 0x10000
|
||||
+ ((leading as u32 - *LEADING_SURROGATES.start() as u32) << 10)
|
||||
+ trailing as u32
|
||||
- *TRAILING_SURROGATES.start() as u32;
|
||||
// Convert to char
|
||||
let v = char::from_u32(codepoint).ok_or(Err::Failure(ParseError::InvalidUnicode {
|
||||
tried: i,
|
||||
}))?;
|
||||
// Return the char
|
||||
Ok((i, v))
|
||||
} else {
|
||||
// We can convert this to char or error in the case of invalid Unicode character
|
||||
let v = char::from_u32(v as u32).filter(|c| *c != 0 as char).ok_or(Err::Failure(
|
||||
ParseError::InvalidUnicode {
|
||||
tried: i,
|
||||
},
|
||||
))?;
|
||||
// Return the char
|
||||
Ok((i, v))
|
||||
}
|
||||
}
|
||||
|
||||
// \u{10ffff}
|
||||
fn char_unicode_bracketed(i: &str) -> IResult<&str, char> {
|
||||
// Read the { character
|
||||
let (i, _) = char('{')(i)?;
|
||||
// Let's up to 6 ascii hexadecimal characters
|
||||
let (i, v) = take_while_m_n(1, 6, |c: char| c.is_ascii_hexdigit())(i)?;
|
||||
// We can convert this to u32 as the max is 0xffffff
|
||||
let v = u32::from_str_radix(v, 16).unwrap();
|
||||
// We can convert this to char or error in the case of invalid Unicode character
|
||||
let v = char::from_u32(v).filter(|c| *c != 0 as char).ok_or(Err::Failure(
|
||||
ParseError::InvalidUnicode {
|
||||
tried: i,
|
||||
},
|
||||
))?;
|
||||
// Read the } character
|
||||
let (i, _) = char('}')(i)?;
|
||||
// Return the char
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::{sql::Value, syn::Parse};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn strand_empty() {
|
||||
let sql = r#""""#;
|
||||
let res = strand(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(r#"''"#, format!("{}", out));
|
||||
assert_eq!(out, Strand::from(""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_single() {
|
||||
let sql = r#"'test'"#;
|
||||
let res = strand(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(r#"'test'"#, format!("{}", out));
|
||||
assert_eq!(out, Strand::from("test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_double() {
|
||||
let sql = r#""test""#;
|
||||
let res = strand(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(r#"'test'"#, format!("{}", out));
|
||||
assert_eq!(out, Strand::from("test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_quoted_single() {
|
||||
let sql = r"'te\'st'";
|
||||
let res = strand(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(r#""te'st""#, format!("{}", out));
|
||||
assert_eq!(out, Strand::from(r#"te'st"#));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_quoted_double() {
|
||||
let sql = r#""te\"st""#;
|
||||
let res = strand(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(r#"'te"st'"#, format!("{}", out));
|
||||
assert_eq!(out, Strand::from(r#"te"st"#));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_quoted_escaped() {
|
||||
let sql = r#""te\"st\n\tand\bsome\u05d9""#;
|
||||
let res = strand(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'te\"st\n\tand\u{08}some\u{05d9}'", format!("{}", out));
|
||||
assert_eq!(out, Strand::from("te\"st\n\tand\u{08}some\u{05d9}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_nul_byte() {
|
||||
assert!(strand("'a\0b'").is_err());
|
||||
assert!(strand("'a\\u0000b'").is_err());
|
||||
assert!(strand("'a\\u{0}b'").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_fuzz_escape() {
|
||||
for n in (1..=char::MAX as u32).step_by(101) {
|
||||
if let Some(c) = char::from_u32(n) {
|
||||
let expected = format!("a{c}b");
|
||||
|
||||
let utf32 = format!("\"a\\u{{{n:x}}}b\"");
|
||||
let (rest, s) = strand(&utf32).unwrap();
|
||||
assert_eq!(rest, "");
|
||||
assert_eq!(s.as_str(), &expected);
|
||||
|
||||
let mut utf16 = String::with_capacity(16);
|
||||
utf16 += "\"a";
|
||||
let mut buf = [0; 2];
|
||||
for &mut n in c.encode_utf16(&mut buf) {
|
||||
utf16 += &format!("\\u{n:04x}");
|
||||
}
|
||||
utf16 += "b\"";
|
||||
let (rest, s) = strand(&utf16).unwrap();
|
||||
assert_eq!(rest, "");
|
||||
assert_eq!(s.as_str(), &expected);
|
||||
}
|
||||
}
|
||||
|
||||
// Unpaired surrogate.
|
||||
assert!(strand("\"\\u{DBFF}\"").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strand_prefix() {
|
||||
// ensure that strands which match other string like types are actually parsed as strand
|
||||
// when prefixed.
|
||||
|
||||
let v = Value::parse("s'2012-04-23T18:25:43.000051100Z'");
|
||||
if let Value::Strand(x) = v {
|
||||
assert_eq!(x.as_str(), "2012-04-23T18:25:43.000051100Z");
|
||||
} else {
|
||||
panic!("not a strand");
|
||||
}
|
||||
|
||||
let v = Value::parse("s'a:b'");
|
||||
if let Value::Strand(x) = v {
|
||||
assert_eq!(x.as_str(), "a:b");
|
||||
} else {
|
||||
panic!("not a strand");
|
||||
}
|
||||
|
||||
let v = Value::parse("s'e72bee20-f49b-11ec-b939-0242ac120002'");
|
||||
if let Value::Strand(x) = v {
|
||||
assert_eq!(x.as_str(), "e72bee20-f49b-11ec-b939-0242ac120002");
|
||||
} else {
|
||||
panic!("not a strand");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
use super::super::{common::commas, IResult};
|
||||
use crate::sql::Tokenizer;
|
||||
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::value, multi::separated_list1};
|
||||
|
||||
pub fn tokenizer(i: &str) -> IResult<&str, Tokenizer> {
|
||||
let (i, t) = alt((
|
||||
value(Tokenizer::Blank, tag_no_case("BLANK")),
|
||||
value(Tokenizer::Camel, tag_no_case("CAMEL")),
|
||||
value(Tokenizer::Class, tag_no_case("CLASS")),
|
||||
value(Tokenizer::Punct, tag_no_case("PUNCT")),
|
||||
))(i)?;
|
||||
Ok((i, t))
|
||||
}
|
||||
|
||||
pub fn tokenizers(i: &str) -> IResult<&str, Vec<Tokenizer>> {
|
||||
separated_list1(commas, tokenizer)(i)
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
use super::super::{common::is_hex, IResult};
|
||||
use crate::sql::Uuid;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{tag, take_while_m_n},
|
||||
character::complete::char,
|
||||
combinator::{cut, recognize},
|
||||
sequence::{delimited, tuple},
|
||||
};
|
||||
|
||||
pub fn uuid(i: &str) -> IResult<&str, Uuid> {
|
||||
alt((uuid_single, uuid_double))(i)
|
||||
}
|
||||
|
||||
fn uuid_single(i: &str) -> IResult<&str, Uuid> {
|
||||
alt((
|
||||
delimited(tag("u\'"), cut(uuid_raw), cut(char('\''))),
|
||||
delimited(char('\''), uuid_raw, char('\'')),
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn uuid_double(i: &str) -> IResult<&str, Uuid> {
|
||||
alt((
|
||||
delimited(tag("u\""), cut(uuid_raw), cut(char('\"'))),
|
||||
delimited(char('\"'), uuid_raw, char('\"')),
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn uuid_raw(i: &str) -> IResult<&str, Uuid> {
|
||||
let (i, v) = recognize(tuple((
|
||||
take_while_m_n(8, 8, is_hex),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_hex),
|
||||
char('-'),
|
||||
alt((
|
||||
char('1'),
|
||||
char('2'),
|
||||
char('3'),
|
||||
char('4'),
|
||||
char('5'),
|
||||
char('6'),
|
||||
char('7'),
|
||||
char('8'),
|
||||
)),
|
||||
take_while_m_n(3, 3, is_hex),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_hex),
|
||||
char('-'),
|
||||
take_while_m_n(12, 12, is_hex),
|
||||
)))(i)?;
|
||||
Ok((i, Uuid::try_from(v).unwrap()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::{sql::Value, syn::Parse};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn uuid_v1() {
|
||||
let sql = "e72bee20-f49b-11ec-b939-0242ac120002";
|
||||
let res = uuid_raw(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'e72bee20-f49b-11ec-b939-0242ac120002'", format!("{}", out));
|
||||
assert_eq!(out, Uuid::try_from("e72bee20-f49b-11ec-b939-0242ac120002").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uuid_v4() {
|
||||
let sql = "b19bc00b-aa98-486c-ae37-c8e1c54295b1";
|
||||
let res = uuid_raw(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("'b19bc00b-aa98-486c-ae37-c8e1c54295b1'", format!("{}", out));
|
||||
assert_eq!(out, Uuid::try_from("b19bc00b-aa98-486c-ae37-c8e1c54295b1").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uuid_v4_from_value() {
|
||||
let sql = "'b19bc00b-aa98-486c-ae37-c8e1c54295b1'";
|
||||
let res = Value::parse(sql);
|
||||
let Value::Uuid(out) = res else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!("'b19bc00b-aa98-486c-ae37-c8e1c54295b1'", format!("{}", out));
|
||||
assert_eq!(out, Uuid::try_from("b19bc00b-aa98-486c-ae37-c8e1c54295b1").unwrap());
|
||||
|
||||
let sql = "u'b19bc00b-aa98-486c-ae37-c8e1c54295b1'";
|
||||
let res = Value::parse(sql);
|
||||
let Value::Uuid(out) = res else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!("'b19bc00b-aa98-486c-ae37-c8e1c54295b1'", format!("{}", out));
|
||||
assert_eq!(out, Uuid::try_from("b19bc00b-aa98-486c-ae37-c8e1c54295b1").unwrap());
|
||||
}
|
||||
}
|
|
@ -1,417 +0,0 @@
|
|||
use crate::sql::{Datetime, Duration, Idiom, Query, Range, Thing, Value};
|
||||
use crate::{err::Error, sql::Subquery};
|
||||
use nom::{Err, Finish};
|
||||
|
||||
pub mod literal;
|
||||
mod part;
|
||||
mod stmt;
|
||||
|
||||
mod block;
|
||||
mod builtin;
|
||||
mod comment;
|
||||
mod common;
|
||||
mod depth;
|
||||
mod ending;
|
||||
mod error;
|
||||
mod expression;
|
||||
mod function;
|
||||
mod idiom;
|
||||
mod kind;
|
||||
mod omit;
|
||||
mod operator;
|
||||
mod special;
|
||||
mod subquery;
|
||||
mod thing;
|
||||
mod value;
|
||||
|
||||
pub use error::{IResult, ParseError};
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod test;
|
||||
|
||||
fn query(i: &str) -> IResult<&str, Query> {
|
||||
let (i, v) = stmt::statements(i)?;
|
||||
if !i.is_empty() {
|
||||
return Err(Err::Failure(ParseError::ExplainedExpected {
|
||||
tried: i,
|
||||
expected: "query to end",
|
||||
explained: "perhaps missing a semicolon on the previous statement?",
|
||||
}));
|
||||
}
|
||||
Ok((i, Query(v)))
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Query`]
|
||||
///
|
||||
/// During query parsing, the total depth of calls to parse values (including arrays, expressions,
|
||||
/// functions, objects, sub-queries), Javascript values, and geometry collections count against
|
||||
/// a computation depth limit. If the limit is reached, parsing will return
|
||||
/// [`Error::ComputationDepthExceeded`], as opposed to spending more time and potentially
|
||||
/// overflowing the call stack.
|
||||
///
|
||||
/// If you encounter this limit and believe that it should be increased,
|
||||
/// please [open an issue](https://github.com/surrealdb/surrealdb/issues)!
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn parse(input: &str) -> Result<Query, Error> {
|
||||
parse_impl(input, query)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Value`].
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn value(input: &str) -> Result<Value, Error> {
|
||||
parse_impl(input, value::value)
|
||||
}
|
||||
|
||||
/// Parses JSON into an inert SurrealQL [`Value`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn json(input: &str) -> Result<Value, Error> {
|
||||
parse_impl(input, value::json)
|
||||
}
|
||||
/// Parses a SurrealQL Subquery [`Subquery`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn subquery(input: &str) -> Result<Subquery, Error> {
|
||||
parse_impl(input, subquery::subquery)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Idiom`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn idiom(input: &str) -> Result<Idiom, Error> {
|
||||
parse_impl(input, idiom::plain)
|
||||
}
|
||||
|
||||
pub fn datetime_raw(input: &str) -> Result<Datetime, Error> {
|
||||
parse_impl(input, literal::datetime_all_raw)
|
||||
}
|
||||
|
||||
pub fn duration(input: &str) -> Result<Duration, Error> {
|
||||
parse_impl(input, literal::duration)
|
||||
}
|
||||
|
||||
pub fn range(input: &str) -> Result<Range, Error> {
|
||||
parse_impl(input, literal::range)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Thing`]
|
||||
pub fn thing(input: &str) -> Result<Thing, Error> {
|
||||
parse_impl(input, thing::thing_raw)
|
||||
}
|
||||
|
||||
fn parse_impl<O>(input: &str, parser: impl Fn(&str) -> IResult<&str, O>) -> Result<O, Error> {
|
||||
// Reset the parse depth limiter
|
||||
depth::reset();
|
||||
|
||||
// Check the length of the input
|
||||
match input.trim().len() {
|
||||
// The input query was empty
|
||||
0 => Err(Error::QueryEmpty),
|
||||
// Continue parsing the query
|
||||
_ => match parser(input).finish() {
|
||||
// The query was parsed successfully
|
||||
Ok(("", parsed)) => Ok(parsed),
|
||||
// There was unparsed SQL remaining
|
||||
Ok((_, _)) => Err(Error::QueryRemaining),
|
||||
// There was an error when parsing the query
|
||||
Err(e) => Err(Error::InvalidQuery(e.render_on(input))),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde::Serialize;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn no_ending() {
|
||||
let sql = "SELECT * FROM test";
|
||||
parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_query_string() {
|
||||
let sql = "SELECT * FROM test;";
|
||||
parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trim_query_string() {
|
||||
let sql = " SELECT * FROM test ; ";
|
||||
parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_complex_rubbish() {
|
||||
let sql = " SELECT * FROM test ; /* shouldbespace */ ;;; ";
|
||||
parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_complex_failure() {
|
||||
let sql = " SELECT * FROM { }} ";
|
||||
parse(sql).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ok_recursion() {
|
||||
let sql = "SELECT * FROM ((SELECT * FROM (5))) * 5;";
|
||||
parse(sql).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ok_recursion_deeper() {
|
||||
let sql = "SELECT * FROM (((( SELECT * FROM ((5)) + ((5)) + ((5)) )))) * ((( function() {return 5;} )));";
|
||||
let start = Instant::now();
|
||||
parse(sql).unwrap();
|
||||
let elapsed = start.elapsed();
|
||||
assert!(
|
||||
elapsed < Duration::from_millis(2000),
|
||||
"took {}ms, previously took ~1000ms in debug",
|
||||
elapsed.as_millis()
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_cast() {
|
||||
for n in [10, 100, 500] {
|
||||
recursive("SELECT * FROM ", "<int>", "5", "", n, n > 50);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_geometry() {
|
||||
for n in [1, 50, 100] {
|
||||
recursive(
|
||||
"SELECT * FROM ",
|
||||
r#"{type: "GeometryCollection",geometries: ["#,
|
||||
r#"{type: "MultiPoint",coordinates: [[10.0, 11.2],[10.5, 11.9]]}"#,
|
||||
"]}",
|
||||
n,
|
||||
n > 25,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_javascript() {
|
||||
for n in [10, 1000] {
|
||||
recursive("SELECT * FROM ", "function() {", "return 5;", "}", n, n > 500);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_mixed() {
|
||||
for n in [3, 15, 75] {
|
||||
recursive("", "SELECT * FROM ((((", "5 * 5", ")))) * 5", n, n > 5);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_select() {
|
||||
for n in [5, 10, 100] {
|
||||
recursive("SELECT * FROM ", "(SELECT * FROM ", "5", ")", n, n > 15);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_value_subquery() {
|
||||
for p in 1..=4 {
|
||||
recursive("SELECT * FROM ", "(", "5", ")", 10usize.pow(p), p > 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursion_if_subquery() {
|
||||
for p in 1..=3 {
|
||||
recursive("SELECT * FROM ", "IF true THEN ", "5", " ELSE 4 END", 6usize.pow(p), p > 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_try() {
|
||||
let sql = "
|
||||
SELECT
|
||||
*,
|
||||
tags[$].value,
|
||||
3s as duration,
|
||||
1.345 AS number,
|
||||
test AS `some thing`,
|
||||
'2012-04-23T18:25:43.511Z' AS utctime,
|
||||
'2012-04-23T18:25:43.511-08:00' AS pacifictime,
|
||||
{ key: (3 + 1 + 2), other: 9 * 7, 'some thing': { otherkey: 'text', } } AS object
|
||||
FROM $param, test, temp, test:thingy, |test:10|, |test:1..10|
|
||||
WHERE IF true THEN 'YAY' ELSE 'OOPS' END
|
||||
AND (0.1341, 0.5719) INSIDE { type: 'Polygon', coordinates: [[[0.1341, 0.5719], [0.1341, 0.5719]]] }
|
||||
AND (3 + 3 * 4)=6
|
||||
AND 3 + 3 * 4 = 6
|
||||
AND ages CONTAINS 18
|
||||
AND if IS true
|
||||
SPLIT test.things
|
||||
VERSION '2019-01-01T08:00:00Z'
|
||||
TIMEOUT 2w;
|
||||
CREATE person SET name = 'Tobie', age += 18;
|
||||
";
|
||||
let tmp = parse(sql).unwrap();
|
||||
|
||||
let enc: Vec<u8> = Vec::from(&tmp);
|
||||
let dec: Query = Query::from(enc);
|
||||
assert_eq!(tmp, dec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_full() {
|
||||
let sql = std::fs::read("test.surql").unwrap();
|
||||
let sql = std::str::from_utf8(&sql).unwrap();
|
||||
let res = parse(sql);
|
||||
let tmp = res.unwrap();
|
||||
|
||||
let enc: Vec<u8> = Vec::from(&tmp);
|
||||
let dec: Query = Query::from(enc);
|
||||
assert_eq!(tmp, dec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(debug_assertions, ignore)]
|
||||
fn json_benchmark() {
|
||||
// From the top level of the repository,
|
||||
// cargo test sql::parser::tests::json_benchmark --package surrealdb --lib --release -- --nocapture --exact
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
struct Data {
|
||||
boolean: bool,
|
||||
integer: i32,
|
||||
decimal: f32,
|
||||
string: String,
|
||||
inner: Option<Box<Self>>,
|
||||
inners: Vec<Self>,
|
||||
inner_map: HashMap<String, Self>,
|
||||
}
|
||||
|
||||
let inner = Data {
|
||||
boolean: true,
|
||||
integer: -1,
|
||||
decimal: 0.5,
|
||||
string: "foo".to_owned(),
|
||||
inner: None,
|
||||
inners: Vec::new(),
|
||||
inner_map: HashMap::new(),
|
||||
};
|
||||
let inners = vec![inner.clone(); 10];
|
||||
|
||||
let data = Data {
|
||||
boolean: false,
|
||||
integer: 42,
|
||||
decimal: 9000.0,
|
||||
string: "SurrealDB".to_owned(),
|
||||
inner_map: inners.iter().enumerate().map(|(i, d)| (i.to_string(), d.clone())).collect(),
|
||||
inners,
|
||||
inner: Some(Box::new(inner)),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&data).unwrap();
|
||||
let json_pretty = serde_json::to_string_pretty(&data).unwrap();
|
||||
|
||||
let benchmark = |de: fn(&str) -> Value| {
|
||||
let time = Instant::now();
|
||||
const ITERATIONS: u32 = 32;
|
||||
for _ in 0..ITERATIONS {
|
||||
std::hint::black_box(de(std::hint::black_box(&json)));
|
||||
std::hint::black_box(de(std::hint::black_box(&json_pretty)));
|
||||
}
|
||||
time.elapsed().as_secs_f32() / (2 * ITERATIONS) as f32
|
||||
};
|
||||
|
||||
println!("sql::json took {:.10}s/iter", benchmark(|s| crate::sql::json(s).unwrap()));
|
||||
}
|
||||
|
||||
/// Try parsing a query with O(n) recursion depth and expect to fail if and only if
|
||||
/// `excessive` is true.
|
||||
fn recursive(
|
||||
prefix: &str,
|
||||
recursive_start: &str,
|
||||
base: &str,
|
||||
recursive_end: &str,
|
||||
n: usize,
|
||||
excessive: bool,
|
||||
) {
|
||||
let mut sql = String::from(prefix);
|
||||
for _ in 0..n {
|
||||
sql.push_str(recursive_start);
|
||||
}
|
||||
sql.push_str(base);
|
||||
for _ in 0..n {
|
||||
sql.push_str(recursive_end);
|
||||
}
|
||||
let start = Instant::now();
|
||||
let res = query(&sql).finish();
|
||||
let elapsed = start.elapsed();
|
||||
if excessive {
|
||||
assert!(
|
||||
matches!(res, Err(ParseError::ExcessiveDepth(_))),
|
||||
"expected computation depth exceeded, got {:?}",
|
||||
res
|
||||
);
|
||||
} else {
|
||||
res.unwrap();
|
||||
}
|
||||
// The parser can terminate faster in the excessive case.
|
||||
let cutoff = if excessive {
|
||||
500
|
||||
} else {
|
||||
1000
|
||||
};
|
||||
assert!(
|
||||
elapsed < Duration::from_millis(cutoff),
|
||||
"took {}ms, previously much faster to parse {n} in debug mode",
|
||||
elapsed.as_millis()
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_query() {
|
||||
let sql = "CREATE test";
|
||||
let res = query(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CREATE test;", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_query() {
|
||||
let sql = "CREATE test; CREATE temp;";
|
||||
let res = query(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CREATE test;\nCREATE temp;", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_query_semicolons() {
|
||||
let sql = "CREATE test;;;CREATE temp;;;";
|
||||
let res = query(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CREATE test;\nCREATE temp;", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_query_semicolons_comments() {
|
||||
let sql = "CREATE test;;;CREATE temp;;;/* some comment */";
|
||||
let res = query(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CREATE test;\nCREATE temp;", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_query_semicolons_multi_comments() {
|
||||
let sql = "CREATE test;;;CREATE temp;;;/* some comment */;;;/* other comment */";
|
||||
let res = query(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CREATE test;\nCREATE temp;", format!("{}", out))
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
use super::{comment::shouldbespace, error::IResult, idiom::locals as idioms};
|
||||
use crate::sql::Idioms;
|
||||
use nom::{bytes::complete::tag_no_case, combinator::cut};
|
||||
|
||||
pub fn omit(i: &str) -> IResult<&str, Idioms> {
|
||||
let (i, _) = tag_no_case("OMIT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(idioms)(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn omit_statement() {
|
||||
let sql = "OMIT field";
|
||||
let res = omit(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn omit_statement_multiple() {
|
||||
let sql = "OMIT field, other.field";
|
||||
let res = omit(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("field, other.field", format!("{}", out));
|
||||
}
|
||||
}
|
|
@ -1,266 +0,0 @@
|
|||
use super::{
|
||||
comment::{mightbespace, shouldbespace},
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::index::Distance;
|
||||
use crate::sql::{Dir, Operator};
|
||||
use crate::syn::v1::part::index::minkowski;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{tag, tag_no_case},
|
||||
character::complete::{char, u32, u8},
|
||||
combinator::{cut, map, opt, value},
|
||||
};
|
||||
|
||||
pub fn assigner(i: &str) -> IResult<&str, Operator> {
|
||||
alt((
|
||||
value(Operator::Equal, char('=')),
|
||||
value(Operator::Inc, tag("+=")),
|
||||
value(Operator::Dec, tag("-=")),
|
||||
value(Operator::Ext, tag("+?=")),
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn unary(i: &str) -> IResult<&str, Operator> {
|
||||
unary_symbols(i)
|
||||
}
|
||||
|
||||
pub fn unary_symbols(i: &str) -> IResult<&str, Operator> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, v) = alt((value(Operator::Neg, tag("-")), value(Operator::Not, tag("!"))))(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
pub fn binary(i: &str) -> IResult<&str, Operator> {
|
||||
alt((binary_symbols, binary_phrases))(i)
|
||||
}
|
||||
|
||||
pub fn binary_symbols(i: &str) -> IResult<&str, Operator> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, v) = alt((
|
||||
alt((
|
||||
value(Operator::Or, tag("||")),
|
||||
value(Operator::And, tag("&&")),
|
||||
value(Operator::Tco, tag("?:")),
|
||||
value(Operator::Nco, tag("??")),
|
||||
)),
|
||||
alt((
|
||||
value(Operator::Exact, tag("==")),
|
||||
value(Operator::NotEqual, tag("!=")),
|
||||
value(Operator::AllEqual, tag("*=")),
|
||||
value(Operator::AnyEqual, tag("?=")),
|
||||
value(Operator::Equal, char('=')),
|
||||
)),
|
||||
alt((
|
||||
value(Operator::NotLike, tag("!~")),
|
||||
value(Operator::AllLike, tag("*~")),
|
||||
value(Operator::AnyLike, tag("?~")),
|
||||
value(Operator::Like, char('~')),
|
||||
matches,
|
||||
knn,
|
||||
)),
|
||||
alt((
|
||||
value(Operator::LessThanOrEqual, tag("<=")),
|
||||
value(Operator::LessThan, char('<')),
|
||||
value(Operator::MoreThanOrEqual, tag(">=")),
|
||||
value(Operator::MoreThan, char('>')),
|
||||
knn,
|
||||
)),
|
||||
alt((
|
||||
value(Operator::Pow, tag("**")),
|
||||
value(Operator::Add, char('+')),
|
||||
value(Operator::Sub, char('-')),
|
||||
value(Operator::Mul, char('*')),
|
||||
value(Operator::Mul, char('×')),
|
||||
value(Operator::Mul, char('∙')),
|
||||
value(Operator::Div, char('/')),
|
||||
value(Operator::Div, char('÷')),
|
||||
value(Operator::Rem, char('%')),
|
||||
)),
|
||||
alt((
|
||||
value(Operator::Contain, char('∋')),
|
||||
value(Operator::NotContain, char('∌')),
|
||||
value(Operator::Inside, char('∈')),
|
||||
value(Operator::NotInside, char('∉')),
|
||||
value(Operator::ContainAll, char('⊇')),
|
||||
value(Operator::ContainAny, char('⊃')),
|
||||
value(Operator::ContainNone, char('⊅')),
|
||||
value(Operator::AllInside, char('⊆')),
|
||||
value(Operator::AnyInside, char('⊂')),
|
||||
value(Operator::NoneInside, char('⊄')),
|
||||
)),
|
||||
))(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
pub fn binary_phrases(i: &str) -> IResult<&str, Operator> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = alt((
|
||||
alt((
|
||||
value(Operator::Or, tag_no_case("OR")),
|
||||
value(Operator::And, tag_no_case("AND")),
|
||||
value(Operator::NotEqual, tag_no_case("IS NOT")),
|
||||
value(Operator::Equal, tag_no_case("IS")),
|
||||
)),
|
||||
alt((
|
||||
value(Operator::ContainAll, tag_no_case("CONTAINSALL")),
|
||||
value(Operator::ContainAny, tag_no_case("CONTAINSANY")),
|
||||
value(Operator::ContainNone, tag_no_case("CONTAINSNONE")),
|
||||
value(Operator::NotContain, tag_no_case("CONTAINSNOT")),
|
||||
value(Operator::Contain, tag_no_case("CONTAINS")),
|
||||
value(Operator::AllInside, tag_no_case("ALLINSIDE")),
|
||||
value(Operator::AnyInside, tag_no_case("ANYINSIDE")),
|
||||
value(Operator::NoneInside, tag_no_case("NONEINSIDE")),
|
||||
value(Operator::NotInside, tag_no_case("NOTINSIDE")),
|
||||
value(Operator::Inside, tag_no_case("INSIDE")),
|
||||
value(Operator::Outside, tag_no_case("OUTSIDE")),
|
||||
value(Operator::Intersects, tag_no_case("INTERSECTS")),
|
||||
value(Operator::NotInside, tag_no_case("NOT IN")),
|
||||
value(Operator::Inside, tag_no_case("IN")),
|
||||
)),
|
||||
))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
Ok((i, v))
|
||||
}
|
||||
|
||||
pub fn matches(i: &str) -> IResult<&str, Operator> {
|
||||
let (i, _) = char('@')(i)?;
|
||||
cut(|i| {
|
||||
let (i, reference) = opt(u8)(i)?;
|
||||
let (i, _) = char('@')(i)?;
|
||||
Ok((i, Operator::Matches(reference)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn knn_distance(i: &str) -> IResult<&str, Distance> {
|
||||
let (i, _) = char(',')(i)?;
|
||||
alt((
|
||||
map(tag_no_case("CHEBYSHEV"), |_| Distance::Chebyshev),
|
||||
map(tag_no_case("COSINE"), |_| Distance::Cosine),
|
||||
map(tag_no_case("EUCLIDEAN"), |_| Distance::Euclidean),
|
||||
map(tag_no_case("HAMMING"), |_| Distance::Hamming),
|
||||
map(tag_no_case("JACCARD"), |_| Distance::Jaccard),
|
||||
map(tag_no_case("MANHATTAN"), |_| Distance::Manhattan),
|
||||
minkowski,
|
||||
map(tag_no_case("PEARSON"), |_| Distance::Pearson),
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn knn(i: &str) -> IResult<&str, Operator> {
|
||||
alt((
|
||||
|i| {
|
||||
let (i, _) = opt(tag_no_case("knn"))(i)?;
|
||||
let (i, _) = char('<')(i)?;
|
||||
let (i, k) = u32(i)?;
|
||||
let (i, dist) = opt(knn_distance)(i)?;
|
||||
let (i, _) = char('>')(i)?;
|
||||
Ok((i, Operator::Knn(k, dist)))
|
||||
},
|
||||
|i| {
|
||||
let (i, _) = tag("<|")(i)?;
|
||||
cut(|i| {
|
||||
let (i, k) = u32(i)?;
|
||||
let (i, dist) = opt(knn_distance)(i)?;
|
||||
let (i, _) = tag("|>")(i)?;
|
||||
Ok((i, Operator::Knn(k, dist)))
|
||||
})(i)
|
||||
},
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn dir(i: &str) -> IResult<&str, Dir> {
|
||||
alt((value(Dir::Both, tag("<->")), value(Dir::In, tag("<-")), value(Dir::Out, tag("->"))))(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn dir_in() {
|
||||
let sql = "<-";
|
||||
let res = dir(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<-", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dir_out() {
|
||||
let sql = "->";
|
||||
let res = dir(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("->", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dir_both() {
|
||||
let sql = "<->";
|
||||
let res = dir(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<->", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn matches_without_reference() {
|
||||
let res = matches("@@");
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("@@", format!("{}", out));
|
||||
assert_eq!(out, Operator::Matches(None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn matches_with_reference() {
|
||||
let res = matches("@12@");
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("@12@", format!("{}", out));
|
||||
assert_eq!(out, Operator::Matches(Some(12u8)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn matches_with_invalid_reference() {
|
||||
let res = matches("@256@");
|
||||
res.unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_knn() {
|
||||
let res = knn("<5>");
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<|5|>", format!("{}", out));
|
||||
assert_eq!(out, Operator::Knn(5, None));
|
||||
|
||||
let res = knn("<|5|>");
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<|5|>", format!("{}", out));
|
||||
assert_eq!(out, Operator::Knn(5, None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_knn_with_distance() {
|
||||
let res = knn("<3,EUCLIDEAN>");
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<|3,EUCLIDEAN|>", format!("{}", out));
|
||||
assert_eq!(out, Operator::Knn(3, Some(Distance::Euclidean)));
|
||||
|
||||
let res = knn("<|3,EUCLIDEAN|>");
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<|3,EUCLIDEAN|>", format!("{}", out));
|
||||
assert_eq!(out, Operator::Knn(3, Some(Distance::Euclidean)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_knn_with_prefix() {
|
||||
let res = knn("<|5|>");
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("<|5|>", format!("{}", out));
|
||||
assert_eq!(out, Operator::Knn(5, None));
|
||||
}
|
||||
}
|
|
@ -1,194 +0,0 @@
|
|||
use super::super::{
|
||||
comment::{mightbespace, shouldbespace},
|
||||
common::commas,
|
||||
idiom::plain,
|
||||
operator::assigner,
|
||||
value::value,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::Data;
|
||||
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::cut, multi::separated_list1};
|
||||
|
||||
pub fn data(i: &str) -> IResult<&str, Data> {
|
||||
alt((set, unset, patch, merge, replace, content))(i)
|
||||
}
|
||||
|
||||
fn set(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("SET")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(separated_list1(
|
||||
commas,
|
||||
cut(|i| {
|
||||
let (i, l) = plain(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, o) = assigner(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, r) = value(i)?;
|
||||
Ok((i, (l, o, r)))
|
||||
}),
|
||||
))(i)?;
|
||||
Ok((i, Data::SetExpression(v)))
|
||||
}
|
||||
|
||||
fn unset(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("UNSET")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(separated_list1(commas, plain))(i)?;
|
||||
Ok((i, Data::UnsetExpression(v)))
|
||||
}
|
||||
|
||||
fn patch(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("PATCH")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, Data::PatchExpression(v)))
|
||||
}
|
||||
|
||||
fn merge(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("MERGE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, Data::MergeExpression(v)))
|
||||
}
|
||||
|
||||
fn replace(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("REPLACE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, Data::ReplaceExpression(v)))
|
||||
}
|
||||
|
||||
fn content(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("CONTENT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, Data::ContentExpression(v)))
|
||||
}
|
||||
|
||||
pub fn single(i: &str) -> IResult<&str, Data> {
|
||||
let (i, v) = value(i)?;
|
||||
Ok((i, Data::SingleExpression(v)))
|
||||
}
|
||||
|
||||
pub fn values(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("(")(i)?;
|
||||
let (i, fields) = separated_list1(commas, plain)(i)?;
|
||||
let (i, _) = tag_no_case(")")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("VALUES")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, values) = separated_list1(commas, |i| {
|
||||
let (i, _) = tag_no_case("(")(i)?;
|
||||
let (i, v) = separated_list1(commas, value)(i)?;
|
||||
let (i, _) = tag_no_case(")")(i)?;
|
||||
Ok((i, v))
|
||||
})(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Data::ValuesExpression(
|
||||
values.into_iter().map(|row| fields.iter().cloned().zip(row).collect()).collect(),
|
||||
),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn update(i: &str) -> IResult<&str, Data> {
|
||||
let (i, _) = tag_no_case("ON DUPLICATE KEY UPDATE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = separated_list1(commas, |i| {
|
||||
let (i, l) = plain(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, o) = assigner(i)?;
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, r) = value(i)?;
|
||||
Ok((i, (l, o, r)))
|
||||
})(i)?;
|
||||
Ok((i, Data::UpdateExpression(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn data_set_statement() {
|
||||
let sql = "SET field = true";
|
||||
let res = data(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("SET field = true", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_set_statement_multiple() {
|
||||
let sql = "SET field = true, other.field = false";
|
||||
let res = data(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("SET field = true, other.field = false", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_unset_statement() {
|
||||
let sql = "UNSET field";
|
||||
let res = data(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("UNSET field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_unset_statement_multiple_fields() {
|
||||
let sql = "UNSET field, other.field";
|
||||
let res = data(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("UNSET field, other.field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_patch_statement() {
|
||||
let sql = "PATCH [{ field: true }]";
|
||||
let res = patch(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("PATCH [{ field: true }]", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_merge_statement() {
|
||||
let sql = "MERGE { field: true }";
|
||||
let res = data(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("MERGE { field: true }", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_content_statement() {
|
||||
let sql = "CONTENT { field: true }";
|
||||
let res = data(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CONTENT { field: true }", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_replace_statement() {
|
||||
let sql = "REPLACE { field: true }";
|
||||
let res = data(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("REPLACE { field: true }", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_values_statement() {
|
||||
let sql = "(one, two, three) VALUES ($param, true, [1, 2, 3]), ($param, false, [4, 5, 6])";
|
||||
let res = values(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
"(one, two, three) VALUES ($param, true, [1, 2, 3]), ($param, false, [4, 5, 6])",
|
||||
format!("{}", out)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn data_update_statement() {
|
||||
let sql = "ON DUPLICATE KEY UPDATE field = true, other.field = false";
|
||||
let res = update(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("ON DUPLICATE KEY UPDATE field = true, other.field = false", format!("{}", out));
|
||||
}
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
use super::super::{common::commas, ending::field as ending, idiom::plain, value::value, IResult};
|
||||
use crate::{
|
||||
sql::{Field, Fields},
|
||||
syn::v1::comment::shouldbespace,
|
||||
};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{cut, opt},
|
||||
multi::separated_list1,
|
||||
sequence::delimited,
|
||||
};
|
||||
|
||||
pub fn fields(i: &str) -> IResult<&str, Fields> {
|
||||
alt((field_one, field_many))(i)
|
||||
}
|
||||
|
||||
fn field_one(i: &str) -> IResult<&str, Fields> {
|
||||
let (i, _) = tag_no_case("VALUE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(|i| {
|
||||
let (i, f) = alone(i)?;
|
||||
let (i, _) = ending(i)?;
|
||||
Ok((i, Fields(vec![f], true)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn field(i: &str) -> IResult<&str, Field> {
|
||||
alt((all, alone))(i)
|
||||
}
|
||||
|
||||
fn field_many(i: &str) -> IResult<&str, Fields> {
|
||||
let (i, v) = separated_list1(commas, field)(i)?;
|
||||
Ok((i, Fields(v, false)))
|
||||
}
|
||||
|
||||
pub fn all(i: &str) -> IResult<&str, Field> {
|
||||
let (i, _) = tag_no_case("*")(i)?;
|
||||
Ok((i, Field::All))
|
||||
}
|
||||
|
||||
pub fn alone(i: &str) -> IResult<&str, Field> {
|
||||
let (i, expr) = value(i)?;
|
||||
let (i, alias) =
|
||||
if let (i, Some(_)) = opt(delimited(shouldbespace, tag_no_case("AS"), shouldbespace))(i)? {
|
||||
let (i, alias) = cut(plain)(i)?;
|
||||
(i, Some(alias))
|
||||
} else {
|
||||
(i, None)
|
||||
};
|
||||
Ok((
|
||||
i,
|
||||
Field::Single {
|
||||
expr,
|
||||
alias,
|
||||
},
|
||||
))
|
||||
}
|
|
@ -1,192 +0,0 @@
|
|||
use super::super::{
|
||||
comment::{mightbespace, shouldbespace},
|
||||
literal::{ident, scoring},
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{
|
||||
index::{Distance, Distance1, MTreeParams, SearchParams, VectorType},
|
||||
Ident, Index,
|
||||
};
|
||||
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{tag, tag_no_case},
|
||||
character::complete::{u16, u32},
|
||||
combinator::{cut, map, opt},
|
||||
};
|
||||
|
||||
pub fn index(i: &str) -> IResult<&str, Index> {
|
||||
alt((unique, search, mtree))(i)
|
||||
}
|
||||
|
||||
pub fn unique(i: &str) -> IResult<&str, Index> {
|
||||
let (i, _) = tag_no_case("UNIQUE")(i)?;
|
||||
Ok((i, Index::Uniq))
|
||||
}
|
||||
|
||||
pub fn analyzer(i: &str) -> IResult<&str, Ident> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = tag_no_case("ANALYZER")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, analyzer) = cut(ident)(i)?;
|
||||
Ok((i, analyzer))
|
||||
}
|
||||
|
||||
fn order<'a>(label: &'static str, i: &'a str) -> IResult<&'a str, u32> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = tag_no_case(label)(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, order) = cut(u32)(i)?;
|
||||
Ok((i, order))
|
||||
}
|
||||
|
||||
pub fn doc_ids_order(i: &str) -> IResult<&str, u32> {
|
||||
order("DOC_IDS_ORDER", i)
|
||||
}
|
||||
|
||||
pub fn doc_ids_cache(i: &str) -> IResult<&str, u32> {
|
||||
order("DOC_IDS_CACHE", i)
|
||||
}
|
||||
|
||||
pub fn doc_lengths_order(i: &str) -> IResult<&str, u32> {
|
||||
order("DOC_LENGTHS_ORDER", i)
|
||||
}
|
||||
|
||||
pub fn doc_lengths_cache(i: &str) -> IResult<&str, u32> {
|
||||
order("DOC_LENGTHS_CACHE", i)
|
||||
}
|
||||
|
||||
pub fn postings_order(i: &str) -> IResult<&str, u32> {
|
||||
order("POSTINGS_ORDER", i)
|
||||
}
|
||||
|
||||
pub fn postings_cache(i: &str) -> IResult<&str, u32> {
|
||||
order("POSTINGS_CACHE", i)
|
||||
}
|
||||
|
||||
pub fn terms_order(i: &str) -> IResult<&str, u32> {
|
||||
order("TERMS_ORDER", i)
|
||||
}
|
||||
|
||||
pub fn terms_cache(i: &str) -> IResult<&str, u32> {
|
||||
order("TERMS_CACHE", i)
|
||||
}
|
||||
|
||||
pub fn highlights(i: &str) -> IResult<&str, bool> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
map(opt(tag("HIGHLIGHTS")), |x| x.is_some())(i)
|
||||
}
|
||||
|
||||
pub fn search(i: &str) -> IResult<&str, Index> {
|
||||
let (i, _) = tag_no_case("SEARCH")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(|i| {
|
||||
let (i, az) = opt(analyzer)(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, sc) = scoring(i)?;
|
||||
let (i, o1) = opt(doc_ids_order)(i)?;
|
||||
let (i, o2) = opt(doc_lengths_order)(i)?;
|
||||
let (i, o3) = opt(postings_order)(i)?;
|
||||
let (i, o4) = opt(terms_order)(i)?;
|
||||
let (i, c1) = opt(doc_ids_cache)(i)?;
|
||||
let (i, c2) = opt(doc_lengths_cache)(i)?;
|
||||
let (i, c3) = opt(postings_cache)(i)?;
|
||||
let (i, c4) = opt(terms_cache)(i)?;
|
||||
let (i, hl) = highlights(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Index::Search(SearchParams {
|
||||
az: az.unwrap_or_else(|| Ident::from("like")),
|
||||
sc,
|
||||
hl,
|
||||
doc_ids_order: o1.unwrap_or(100),
|
||||
doc_lengths_order: o2.unwrap_or(100),
|
||||
postings_order: o3.unwrap_or(100),
|
||||
terms_order: o4.unwrap_or(100),
|
||||
doc_ids_cache: c1.unwrap_or(100),
|
||||
doc_lengths_cache: c2.unwrap_or(100),
|
||||
postings_cache: c3.unwrap_or(100),
|
||||
terms_cache: c4.unwrap_or(100),
|
||||
}),
|
||||
))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn mtree_distance(i: &str) -> IResult<&str, Distance> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = tag_no_case("DIST")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
alt((
|
||||
map(tag_no_case("EUCLIDEAN"), |_| Distance::Euclidean),
|
||||
map(tag_no_case("COSINE"), |_| Distance::Cosine),
|
||||
map(tag_no_case("MANHATTAN"), |_| Distance::Manhattan),
|
||||
minkowski,
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn minkowski(i: &str) -> IResult<&str, Distance> {
|
||||
let (i, _) = tag_no_case("MINKOWSKI")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, order) = u32(i)?;
|
||||
Ok((i, Distance::Minkowski(order.into())))
|
||||
}
|
||||
|
||||
pub fn vector_type(i: &str) -> IResult<&str, VectorType> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = tag_no_case("TYPE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
alt((
|
||||
map(tag_no_case("F64"), |_| VectorType::F64),
|
||||
map(tag_no_case("F32"), |_| VectorType::F32),
|
||||
map(tag_no_case("I64"), |_| VectorType::I64),
|
||||
map(tag_no_case("I32"), |_| VectorType::I32),
|
||||
map(tag_no_case("I16"), |_| VectorType::I16),
|
||||
))(i)
|
||||
}
|
||||
|
||||
pub fn dimension(i: &str) -> IResult<&str, u16> {
|
||||
let (i, _) = mightbespace(i)?;
|
||||
let (i, _) = tag_no_case("DIMENSION")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, dim) = u16(i)?;
|
||||
Ok((i, dim))
|
||||
}
|
||||
|
||||
pub fn capacity(i: &str) -> IResult<&str, u16> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("CAPACITY")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, capacity) = u16(i)?;
|
||||
Ok((i, capacity))
|
||||
}
|
||||
|
||||
pub fn mtree_cache(i: &str) -> IResult<&str, u32> {
|
||||
order("MTREE_CACHE", i)
|
||||
}
|
||||
|
||||
pub fn mtree(i: &str) -> IResult<&str, Index> {
|
||||
let (i, _) = tag_no_case("MTREE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(|i| {
|
||||
let (i, dimension) = dimension(i)?;
|
||||
let (i, distance) = opt(mtree_distance)(i)?;
|
||||
let (i, vector_type) = opt(vector_type)(i)?;
|
||||
let (i, capacity) = opt(capacity)(i)?;
|
||||
let (i, doc_ids_order) = opt(doc_ids_order)(i)?;
|
||||
let (i, doc_ids_cache) = opt(doc_ids_cache)(i)?;
|
||||
let (i, mtree_cache) = opt(mtree_cache)(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Index::MTree(MTreeParams {
|
||||
dimension,
|
||||
_distance: Distance1::Euclidean, // TODO remove once 1.0 && 1.1 are EOL
|
||||
distance: distance.unwrap_or(Distance::Euclidean),
|
||||
vector_type: vector_type.unwrap_or(VectorType::F64),
|
||||
capacity: capacity.unwrap_or(40),
|
||||
doc_ids_order: doc_ids_order.unwrap_or(100),
|
||||
doc_ids_cache: doc_ids_cache.unwrap_or(100),
|
||||
mtree_cache: mtree_cache.unwrap_or(100),
|
||||
}),
|
||||
))
|
||||
})(i)
|
||||
}
|
|
@ -1,665 +0,0 @@
|
|||
use super::{
|
||||
comment::shouldbespace,
|
||||
common::{closeparentheses, commas, openparentheses},
|
||||
error::expected,
|
||||
idiom::{basic, plain},
|
||||
literal::{datetime, duration, ident, table, tables},
|
||||
operator::dir,
|
||||
thing::thing,
|
||||
value::value,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{
|
||||
Base, ChangeFeed, Cond, Edges, Explain, Fetch, Fetchs, Group, Groups, Limit, Order, Orders,
|
||||
Output, Tables, Version,
|
||||
};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag_no_case,
|
||||
character::complete::char,
|
||||
combinator::{cut, into, map, opt, value as map_value},
|
||||
multi::separated_list1,
|
||||
sequence::{terminated, tuple},
|
||||
};
|
||||
|
||||
pub mod data;
|
||||
pub mod field;
|
||||
pub mod index;
|
||||
pub mod permission;
|
||||
pub mod split;
|
||||
pub mod start;
|
||||
pub mod timeout;
|
||||
pub mod view;
|
||||
pub mod with;
|
||||
|
||||
pub use data::data;
|
||||
pub use field::fields;
|
||||
pub use split::split;
|
||||
pub use start::start;
|
||||
pub use timeout::timeout;
|
||||
pub use view::view;
|
||||
pub use with::with;
|
||||
|
||||
pub fn base(i: &str) -> IResult<&str, Base> {
|
||||
expected(
|
||||
"a base, one of NAMESPACE, DATABASE, ROOT or KV",
|
||||
alt((
|
||||
map_value(Base::Ns, tag_no_case("NAMESPACE")),
|
||||
map_value(Base::Db, tag_no_case("DATABASE")),
|
||||
map_value(Base::Root, tag_no_case("ROOT")),
|
||||
map_value(Base::Ns, tag_no_case("NS")),
|
||||
map_value(Base::Db, tag_no_case("DB")),
|
||||
map_value(Base::Root, tag_no_case("KV")),
|
||||
)),
|
||||
)(i)
|
||||
}
|
||||
|
||||
pub fn base_or_scope(i: &str) -> IResult<&str, Base> {
|
||||
alt((base, |i| {
|
||||
let (i, _) = tag_no_case("SCOPE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(ident)(i)?;
|
||||
Ok((i, Base::Sc(v)))
|
||||
}))(i)
|
||||
}
|
||||
|
||||
pub fn changefeed(i: &str) -> IResult<&str, ChangeFeed> {
|
||||
let (i, _) = tag_no_case("CHANGEFEED")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(duration)(i)?;
|
||||
|
||||
let (i, store_original) = opt(|i| {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _): (&str, &str) = tag_no_case("INCLUDE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, b): (&str, &str) = tag_no_case("ORIGINAL")(i)?;
|
||||
Ok((i, b))
|
||||
})(i)?;
|
||||
|
||||
Ok((
|
||||
i,
|
||||
ChangeFeed {
|
||||
expiry: v.0,
|
||||
store_original: store_original.is_some(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
pub fn cond(i: &str) -> IResult<&str, Cond> {
|
||||
let (i, _) = tag_no_case("WHERE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, Cond(v)))
|
||||
}
|
||||
|
||||
pub fn edges(i: &str) -> IResult<&str, Edges> {
|
||||
let (i, from) = thing(i)?;
|
||||
let (i, dir) = dir(i)?;
|
||||
let (i, what) = alt((simple, custom))(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Edges {
|
||||
dir,
|
||||
from,
|
||||
what,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn simple(i: &str) -> IResult<&str, Tables> {
|
||||
alt((any, one))(i)
|
||||
}
|
||||
|
||||
fn custom(i: &str) -> IResult<&str, Tables> {
|
||||
let (i, _) = openparentheses(i)?;
|
||||
let (i, w) = alt((any, tables))(i)?;
|
||||
let (i, _) = cut(closeparentheses)(i)?;
|
||||
Ok((i, w))
|
||||
}
|
||||
|
||||
fn one(i: &str) -> IResult<&str, Tables> {
|
||||
into(table)(i)
|
||||
}
|
||||
|
||||
fn any(i: &str) -> IResult<&str, Tables> {
|
||||
map(char('?'), |_| Tables::default())(i)
|
||||
}
|
||||
|
||||
pub fn explain(i: &str) -> IResult<&str, Explain> {
|
||||
let (i, _) = tag_no_case("EXPLAIN")(i)?;
|
||||
let (i, full) = opt(tuple((shouldbespace, tag_no_case("FULL"))))(i)?;
|
||||
Ok((i, Explain(full.is_some())))
|
||||
}
|
||||
|
||||
pub fn fetch(i: &str) -> IResult<&str, Fetchs> {
|
||||
let (i, _) = tag_no_case("FETCH")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(separated_list1(commas, fetch_raw))(i)?;
|
||||
Ok((i, Fetchs(v)))
|
||||
}
|
||||
|
||||
fn fetch_raw(i: &str) -> IResult<&str, Fetch> {
|
||||
let (i, v) = plain(i)?;
|
||||
Ok((i, Fetch(v)))
|
||||
}
|
||||
|
||||
pub fn group(i: &str) -> IResult<&str, Groups> {
|
||||
let (i, _) = tag_no_case("GROUP")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(alt((group_all, group_any)))(i)
|
||||
}
|
||||
|
||||
fn group_all(i: &str) -> IResult<&str, Groups> {
|
||||
let (i, _) = tag_no_case("ALL")(i)?;
|
||||
Ok((i, Groups(vec![])))
|
||||
}
|
||||
|
||||
fn group_any(i: &str) -> IResult<&str, Groups> {
|
||||
let (i, _) = opt(terminated(tag_no_case("BY"), shouldbespace))(i)?;
|
||||
let (i, v) = separated_list1(commas, group_raw)(i)?;
|
||||
Ok((i, Groups(v)))
|
||||
}
|
||||
|
||||
fn group_raw(i: &str) -> IResult<&str, Group> {
|
||||
let (i, v) = basic(i)?;
|
||||
Ok((i, Group(v)))
|
||||
}
|
||||
|
||||
pub fn limit(i: &str) -> IResult<&str, Limit> {
|
||||
let (i, _) = tag_no_case("LIMIT")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = opt(tuple((shouldbespace, tag_no_case("BY"))))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = value(i)?;
|
||||
Ok((i, Limit(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn order(i: &str) -> IResult<&str, Orders> {
|
||||
let (i, _) = tag_no_case("ORDER")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = opt(tuple((shouldbespace, tag_no_case("BY"))))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = alt((order_rand, separated_list1(commas, order_raw)))(i)?;
|
||||
Ok((i, Orders(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
fn order_rand(i: &str) -> IResult<&str, Vec<Order>> {
|
||||
let (i, _) = tag_no_case("RAND()")(i)?;
|
||||
Ok((
|
||||
i,
|
||||
vec![Order {
|
||||
order: Default::default(),
|
||||
random: true,
|
||||
collate: false,
|
||||
numeric: false,
|
||||
direction: true,
|
||||
}],
|
||||
))
|
||||
}
|
||||
|
||||
fn order_raw(i: &str) -> IResult<&str, Order> {
|
||||
let (i, v) = basic(i)?;
|
||||
let (i, c) = opt(tuple((shouldbespace, tag_no_case("COLLATE"))))(i)?;
|
||||
let (i, n) = opt(tuple((shouldbespace, tag_no_case("NUMERIC"))))(i)?;
|
||||
let (i, d) = opt(alt((
|
||||
map_value(true, tuple((shouldbespace, tag_no_case("ASC")))),
|
||||
map_value(false, tuple((shouldbespace, tag_no_case("DESC")))),
|
||||
)))(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Order {
|
||||
order: v,
|
||||
random: false,
|
||||
collate: c.is_some(),
|
||||
numeric: n.is_some(),
|
||||
direction: d.unwrap_or(true),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
pub fn output(i: &str) -> IResult<&str, Output> {
|
||||
let (i, _) = tag_no_case("RETURN")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(|i| {
|
||||
let (i, v) = alt((
|
||||
map_value(Output::None, tag_no_case("NONE")),
|
||||
map_value(Output::Null, tag_no_case("NULL")),
|
||||
map_value(Output::Diff, tag_no_case("DIFF")),
|
||||
map_value(Output::After, tag_no_case("AFTER")),
|
||||
map_value(Output::Before, tag_no_case("BEFORE")),
|
||||
map(fields, Output::Fields),
|
||||
))(i)?;
|
||||
Ok((i, v))
|
||||
})(i)
|
||||
}
|
||||
|
||||
pub fn version(i: &str) -> IResult<&str, Version> {
|
||||
let (i, _) = tag_no_case("VERSION")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(datetime)(i)?;
|
||||
Ok((i, Version(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::sql::{Datetime, Idiom, Value};
|
||||
use crate::syn::Parse;
|
||||
use std::time;
|
||||
|
||||
#[test]
|
||||
fn changefeed_missing() {
|
||||
let sql: &str = "";
|
||||
let res = changefeed(sql);
|
||||
assert!(res.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn changefeed_enabled() {
|
||||
let sql = "CHANGEFEED 1h";
|
||||
let res = changefeed(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CHANGEFEED 1h", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
ChangeFeed {
|
||||
expiry: time::Duration::from_secs(3600),
|
||||
store_original: false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn changefeed_include_original() {
|
||||
let sql = "CHANGEFEED 1h INCLUDE ORIGINAL";
|
||||
let res = changefeed(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CHANGEFEED 1h INCLUDE ORIGINAL", format!("{}", out));
|
||||
assert_eq!(
|
||||
out,
|
||||
ChangeFeed {
|
||||
expiry: time::Duration::from_secs(3600),
|
||||
store_original: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cond_statement() {
|
||||
let sql = "WHERE field = true";
|
||||
let res = cond(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("WHERE field = true", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cond_statement_multiple() {
|
||||
let sql = "WHERE field = true AND other.field = false";
|
||||
let res = cond(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("WHERE field = true AND other.field = false", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn edges_in() {
|
||||
let sql = "person:test<-likes";
|
||||
let res = edges(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("person:test<-likes", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn edges_out() {
|
||||
let sql = "person:test->likes";
|
||||
let res = edges(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("person:test->likes", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn edges_both() {
|
||||
let sql = "person:test<->likes";
|
||||
let res = edges(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("person:test<->likes", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn edges_multiple() {
|
||||
let sql = "person:test->(likes, follows)";
|
||||
let res = edges(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("person:test->(likes, follows)", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explain_statement() {
|
||||
let sql = "EXPLAIN";
|
||||
let res = explain(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Explain(false));
|
||||
assert_eq!("EXPLAIN", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explain_full_statement() {
|
||||
let sql = "EXPLAIN FULL";
|
||||
let res = explain(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Explain(true));
|
||||
assert_eq!("EXPLAIN FULL", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fetch_statement() {
|
||||
let sql = "FETCH field";
|
||||
let res = fetch(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Fetchs(vec![Fetch(Idiom::parse("field"))]));
|
||||
assert_eq!("FETCH field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fetch_statement_multiple() {
|
||||
let sql = "FETCH field, other.field";
|
||||
let res = fetch(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Fetchs(vec![Fetch(Idiom::parse("field")), Fetch(Idiom::parse("other.field"))])
|
||||
);
|
||||
assert_eq!("FETCH field, other.field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_all() {
|
||||
let sql = "*";
|
||||
let res = fields(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("*", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_one() {
|
||||
let sql = "field";
|
||||
let res = fields(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_value() {
|
||||
let sql = "VALUE field";
|
||||
let res = fields(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("VALUE field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_alias() {
|
||||
let sql = "field AS one";
|
||||
let res = fields(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("field AS one", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_value_alias() {
|
||||
let sql = "VALUE field AS one";
|
||||
let res = fields(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("VALUE field AS one", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_multiple() {
|
||||
let sql = "field, other.field";
|
||||
let res = fields(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("field, other.field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_aliases() {
|
||||
let sql = "field AS one, other.field AS two";
|
||||
let res = fields(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("field AS one, other.field AS two", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_value_only_one() {
|
||||
let sql = "VALUE field, other.field";
|
||||
fields(sql).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn group_statement() {
|
||||
let sql = "GROUP field";
|
||||
let res = group(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Groups(vec![Group(Idiom::parse("field"))]));
|
||||
assert_eq!("GROUP BY field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn group_statement_by() {
|
||||
let sql = "GROUP BY field";
|
||||
let res = group(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Groups(vec![Group(Idiom::parse("field"))]));
|
||||
assert_eq!("GROUP BY field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn group_statement_multiple() {
|
||||
let sql = "GROUP field, other.field";
|
||||
let res = group(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Groups(vec![Group(Idiom::parse("field")), Group(Idiom::parse("other.field"))])
|
||||
);
|
||||
assert_eq!("GROUP BY field, other.field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn group_statement_all() {
|
||||
let sql = "GROUP ALL";
|
||||
let out = group(sql).unwrap().1;
|
||||
assert_eq!(out, Groups(Vec::new()));
|
||||
assert_eq!(sql, out.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn limit_statement() {
|
||||
let sql = "LIMIT 100";
|
||||
let res = limit(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Limit(Value::from(100)));
|
||||
assert_eq!("LIMIT 100", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn limit_statement_by() {
|
||||
let sql = "LIMIT BY 100";
|
||||
let res = limit(sql);
|
||||
assert!(res.is_ok());
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Limit(Value::from(100)));
|
||||
assert_eq!("LIMIT 100", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement() {
|
||||
let sql = "ORDER field";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![Order {
|
||||
order: Idiom::parse("field"),
|
||||
random: false,
|
||||
collate: false,
|
||||
numeric: false,
|
||||
direction: true,
|
||||
}])
|
||||
);
|
||||
assert_eq!("ORDER BY field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement_by() {
|
||||
let sql = "ORDER BY field";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![Order {
|
||||
order: Idiom::parse("field"),
|
||||
random: false,
|
||||
collate: false,
|
||||
numeric: false,
|
||||
direction: true,
|
||||
}])
|
||||
);
|
||||
assert_eq!("ORDER BY field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement_random() {
|
||||
let sql = "ORDER RAND()";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![Order {
|
||||
order: Default::default(),
|
||||
random: true,
|
||||
collate: false,
|
||||
numeric: false,
|
||||
direction: true,
|
||||
}])
|
||||
);
|
||||
assert_eq!("ORDER BY RAND()", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement_multiple() {
|
||||
let sql = "ORDER field, other.field";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![
|
||||
Order {
|
||||
order: Idiom::parse("field"),
|
||||
random: false,
|
||||
collate: false,
|
||||
numeric: false,
|
||||
direction: true,
|
||||
},
|
||||
Order {
|
||||
order: Idiom::parse("other.field"),
|
||||
random: false,
|
||||
collate: false,
|
||||
numeric: false,
|
||||
direction: true,
|
||||
},
|
||||
])
|
||||
);
|
||||
assert_eq!("ORDER BY field, other.field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement_collate() {
|
||||
let sql = "ORDER field COLLATE";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![Order {
|
||||
order: Idiom::parse("field"),
|
||||
random: false,
|
||||
collate: true,
|
||||
numeric: false,
|
||||
direction: true,
|
||||
}])
|
||||
);
|
||||
assert_eq!("ORDER BY field COLLATE", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement_numeric() {
|
||||
let sql = "ORDER field NUMERIC";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![Order {
|
||||
order: Idiom::parse("field"),
|
||||
random: false,
|
||||
collate: false,
|
||||
numeric: true,
|
||||
direction: true,
|
||||
}])
|
||||
);
|
||||
assert_eq!("ORDER BY field NUMERIC", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement_direction() {
|
||||
let sql = "ORDER field DESC";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![Order {
|
||||
order: Idiom::parse("field"),
|
||||
random: false,
|
||||
collate: false,
|
||||
numeric: false,
|
||||
direction: false,
|
||||
}])
|
||||
);
|
||||
assert_eq!("ORDER BY field DESC", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_statement_all() {
|
||||
let sql = "ORDER field COLLATE NUMERIC DESC";
|
||||
let res = order(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Orders(vec![Order {
|
||||
order: Idiom::parse("field"),
|
||||
random: false,
|
||||
collate: true,
|
||||
numeric: true,
|
||||
direction: false,
|
||||
}])
|
||||
);
|
||||
assert_eq!("ORDER BY field COLLATE NUMERIC DESC", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn output_statement() {
|
||||
let sql = "RETURN field, other.field";
|
||||
let res = output(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("RETURN field, other.field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_statement() {
|
||||
let sql = "VERSION '2020-01-01T00:00:00Z'";
|
||||
let res = version(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Version(Datetime::try_from("2020-01-01T00:00:00Z").unwrap()));
|
||||
assert_eq!("VERSION '2020-01-01T00:00:00Z'", format!("{}", out));
|
||||
}
|
||||
}
|
|
@ -1,173 +0,0 @@
|
|||
use super::super::{
|
||||
comment::shouldbespace,
|
||||
common::{commas, commasorspace},
|
||||
error::expected,
|
||||
value::value,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{
|
||||
permission::{Permission, PermissionKind},
|
||||
Permissions,
|
||||
};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{self, cut, map},
|
||||
multi::{separated_list0, separated_list1},
|
||||
sequence::tuple,
|
||||
};
|
||||
|
||||
pub fn permissions(i: &str, default: Permission) -> IResult<&str, Permissions> {
|
||||
let (i, _) = tag_no_case("PERMISSIONS")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(alt((none, full, specific(default))))(i)
|
||||
}
|
||||
|
||||
fn none(i: &str) -> IResult<&str, Permissions> {
|
||||
let (i, _) = tag_no_case("NONE")(i)?;
|
||||
Ok((i, Permissions::none()))
|
||||
}
|
||||
|
||||
fn full(i: &str) -> IResult<&str, Permissions> {
|
||||
let (i, _) = tag_no_case("FULL")(i)?;
|
||||
Ok((i, Permissions::full()))
|
||||
}
|
||||
|
||||
fn specific(default: Permission) -> impl Fn(&str) -> IResult<&str, Permissions> {
|
||||
move |i: &str| -> IResult<&str, Permissions> {
|
||||
let (i, perms) = separated_list1(commasorspace, rule)(i)?;
|
||||
Ok((
|
||||
i,
|
||||
Permissions {
|
||||
select: perms
|
||||
.iter()
|
||||
.find_map(|x| {
|
||||
x.iter().find_map(|y| match y {
|
||||
(PermissionKind::Select, ref v) => Some(v.to_owned()),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.unwrap_or(default.to_owned()),
|
||||
create: perms
|
||||
.iter()
|
||||
.find_map(|x| {
|
||||
x.iter().find_map(|y| match y {
|
||||
(PermissionKind::Create, ref v) => Some(v.to_owned()),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.unwrap_or(default.to_owned()),
|
||||
update: perms
|
||||
.iter()
|
||||
.find_map(|x| {
|
||||
x.iter().find_map(|y| match y {
|
||||
(PermissionKind::Update, ref v) => Some(v.to_owned()),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.unwrap_or(default.to_owned()),
|
||||
delete: perms
|
||||
.iter()
|
||||
.find_map(|x| {
|
||||
x.iter().find_map(|y| match y {
|
||||
(PermissionKind::Delete, ref v) => Some(v.to_owned()),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.unwrap_or(default.to_owned()),
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn permission(i: &str) -> IResult<&str, Permission> {
|
||||
expected(
|
||||
"a permission",
|
||||
alt((
|
||||
combinator::value(Permission::None, tag_no_case("NONE")),
|
||||
combinator::value(Permission::Full, tag_no_case("FULL")),
|
||||
map(tuple((tag_no_case("WHERE"), shouldbespace, value)), |(_, _, v)| {
|
||||
Permission::Specific(v)
|
||||
}),
|
||||
)),
|
||||
)(i)
|
||||
}
|
||||
|
||||
fn rule(i: &str) -> IResult<&str, Vec<(PermissionKind, Permission)>> {
|
||||
let (i, _) = tag_no_case("FOR")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(|i| {
|
||||
let (i, kind) = separated_list0(
|
||||
commas,
|
||||
alt((
|
||||
combinator::value(PermissionKind::Select, tag_no_case("SELECT")),
|
||||
combinator::value(PermissionKind::Create, tag_no_case("CREATE")),
|
||||
combinator::value(PermissionKind::Update, tag_no_case("UPDATE")),
|
||||
combinator::value(PermissionKind::Delete, tag_no_case("DELETE")),
|
||||
)),
|
||||
)(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, expr) = alt((
|
||||
combinator::value(Permission::None, tag_no_case("NONE")),
|
||||
combinator::value(Permission::Full, tag_no_case("FULL")),
|
||||
map(tuple((tag_no_case("WHERE"), shouldbespace, value)), |(_, _, v)| {
|
||||
Permission::Specific(v)
|
||||
}),
|
||||
))(i)?;
|
||||
Ok((i, kind.into_iter().map(|k| (k, expr.clone())).collect()))
|
||||
})(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::sql::{Expression, Value};
|
||||
use crate::syn::Parse;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn permissions_none() {
|
||||
let sql = "PERMISSIONS NONE";
|
||||
let res = permissions(sql, Permission::Full);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("PERMISSIONS NONE", format!("{}", out));
|
||||
assert_eq!(out, Permissions::none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn permissions_full() {
|
||||
let sql = "PERMISSIONS FULL";
|
||||
let res = permissions(sql, Permission::None);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("PERMISSIONS FULL", format!("{}", out));
|
||||
assert_eq!(out, Permissions::full());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn permissions_specific() {
|
||||
let sql =
|
||||
"PERMISSIONS FOR select FULL, FOR create, update WHERE public = true, FOR delete NONE";
|
||||
let res = permissions(sql, Permission::None);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
"PERMISSIONS FOR select FULL, FOR create, update WHERE public = true, FOR delete NONE",
|
||||
format!("{}", out)
|
||||
);
|
||||
assert_eq!(
|
||||
out,
|
||||
Permissions {
|
||||
select: Permission::Full,
|
||||
create: Permission::Specific(Value::from(Expression::parse("public = true"))),
|
||||
update: Permission::Specific(Value::from(Expression::parse("public = true"))),
|
||||
delete: Permission::None,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_empty_permissions() {
|
||||
// This was previouslly allowed,
|
||||
let sql = "PERMISSION ";
|
||||
permission(sql).unwrap_err();
|
||||
}
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
use super::super::{comment::shouldbespace, common::commas, idiom::basic, IResult};
|
||||
use crate::sql::{Split, Splits};
|
||||
use nom::{
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{cut, opt},
|
||||
multi::separated_list1,
|
||||
sequence::terminated,
|
||||
};
|
||||
|
||||
pub fn split(i: &str) -> IResult<&str, Splits> {
|
||||
let (i, _) = tag_no_case("SPLIT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = opt(terminated(tag_no_case("ON"), shouldbespace))(i)?;
|
||||
let (i, v) = cut(separated_list1(commas, split_raw))(i)?;
|
||||
Ok((i, Splits(v)))
|
||||
}
|
||||
|
||||
fn split_raw(i: &str) -> IResult<&str, Split> {
|
||||
let (i, v) = basic(i)?;
|
||||
Ok((i, Split(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::{sql::Idiom, syn::Parse};
|
||||
|
||||
#[test]
|
||||
fn split_statement() {
|
||||
let sql = "SPLIT field";
|
||||
let res = split(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Splits(vec![Split(Idiom::parse("field"))]),);
|
||||
assert_eq!("SPLIT ON field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_statement_on() {
|
||||
let sql = "SPLIT ON field";
|
||||
let res = split(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Splits(vec![Split(Idiom::parse("field"))]),);
|
||||
assert_eq!("SPLIT ON field", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_statement_multiple() {
|
||||
let sql = "SPLIT field, other.field";
|
||||
let res = split(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(
|
||||
out,
|
||||
Splits(vec![Split(Idiom::parse("field")), Split(Idiom::parse("other.field")),])
|
||||
);
|
||||
assert_eq!("SPLIT ON field, other.field", format!("{}", out));
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
use super::super::{comment::shouldbespace, value::value, IResult};
|
||||
use crate::sql::Start;
|
||||
use nom::{
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{cut, opt},
|
||||
sequence::terminated,
|
||||
};
|
||||
|
||||
pub fn start(i: &str) -> IResult<&str, Start> {
|
||||
let (i, _) = tag_no_case("START")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = opt(terminated(tag_no_case("AT"), shouldbespace))(i)?;
|
||||
let (i, v) = value(i)?;
|
||||
Ok((i, Start(v)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::sql::Value;
|
||||
|
||||
#[test]
|
||||
fn start_statement() {
|
||||
let sql = "START 100";
|
||||
let res = start(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Start(Value::from(100)));
|
||||
assert_eq!("START 100", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn start_statement_at() {
|
||||
let sql = "START AT 100";
|
||||
let res = start(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, Start(Value::from(100)));
|
||||
assert_eq!("START 100", format!("{}", out));
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
use super::{
|
||||
super::{comment::shouldbespace, IResult},
|
||||
duration::duration,
|
||||
};
|
||||
use crate::sql::Timeout;
|
||||
use nom::{bytes::complete::tag_no_case, combinator::cut};
|
||||
|
||||
pub fn timeout(i: &str) -> IResult<&str, Timeout> {
|
||||
let (i, _) = tag_no_case("TIMEOUT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(duration)(i)?;
|
||||
Ok((i, Timeout(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::sql::Duration;
|
||||
|
||||
#[test]
|
||||
fn timeout_statement() {
|
||||
let sql = "TIMEOUT 5s";
|
||||
let res = timeout(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("TIMEOUT 5s", format!("{}", out));
|
||||
assert_eq!(out, Timeout(Duration::try_from("5s").unwrap()));
|
||||
}
|
||||
}
|
|
@ -1,102 +0,0 @@
|
|||
use super::{
|
||||
super::{comment::shouldbespace, error::expect_tag_no_case, literal::tables, IResult},
|
||||
cond,
|
||||
field::fields,
|
||||
group,
|
||||
};
|
||||
use crate::{sql::View, syn::v1::error::expected};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{tag, tag_no_case},
|
||||
combinator::{cut, opt},
|
||||
sequence::preceded,
|
||||
};
|
||||
|
||||
pub fn view(i: &str) -> IResult<&str, View> {
|
||||
let select_view = |i| {
|
||||
let (i, _) = tag_no_case("SELECT")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, expr) = fields(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = expect_tag_no_case("FROM")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, what) = tables(i)?;
|
||||
let (i, cond) = opt(preceded(shouldbespace, cond))(i)?;
|
||||
let (i, group) = opt(preceded(shouldbespace, group))(i)?;
|
||||
Ok((i, (expr, what, cond, group)))
|
||||
})(i)
|
||||
};
|
||||
|
||||
let select_view_delimited = |i| {
|
||||
let (i, _) = tag("(")(i)?;
|
||||
cut(|i| {
|
||||
let (i, res) = select_view(i)?;
|
||||
let (i, _) = tag(")")(i)?;
|
||||
Ok((i, res))
|
||||
})(i)
|
||||
};
|
||||
|
||||
let (i, _) = tag_no_case("AS")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, (expr, what, cond, group)) =
|
||||
expected("SELECT or `(`", cut(alt((select_view, select_view_delimited))))(i)?;
|
||||
Ok((
|
||||
i,
|
||||
View {
|
||||
expr,
|
||||
what,
|
||||
cond,
|
||||
group,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn view_simple() {
|
||||
let sql = "AS SELECT * FROM test";
|
||||
let res = view(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("AS SELECT * FROM test", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn view_brackets() {
|
||||
let sql = "AS (SELECT * FROM test)";
|
||||
let res = view(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("AS SELECT * FROM test", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn view_brackets_where() {
|
||||
let sql = "AS (SELECT temp FROM test WHERE temp IS NOT NONE)";
|
||||
let res = view(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("AS SELECT temp FROM test WHERE temp != NONE", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn view_brackets_group() {
|
||||
let sql = "AS (SELECT temp FROM test WHERE temp IS NOT NONE GROUP BY temp)";
|
||||
let res = view(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("AS SELECT temp FROM test WHERE temp != NONE GROUP BY temp", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn view_disallow_unbalanced_brackets() {
|
||||
let sql = "AS (SELECT temp FROM test WHERE temp IS NOT NONE GROUP BY temp";
|
||||
view(sql).unwrap_err();
|
||||
let sql = "AS SELECT temp FROM test WHERE temp IS NOT NONE GROUP BY temp)";
|
||||
let (i, _) = view(sql).unwrap();
|
||||
// The above test won't return an error since the trailing ) might be part of a another
|
||||
// pair.
|
||||
assert_eq!(i, ")");
|
||||
}
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
use super::super::{comment::shouldbespace, common::commas, literal::ident_raw, IResult};
|
||||
use crate::sql::With;
|
||||
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::cut, multi::separated_list1};
|
||||
|
||||
fn no_index(i: &str) -> IResult<&str, With> {
|
||||
let (i, _) = tag_no_case("NOINDEX")(i)?;
|
||||
Ok((i, With::NoIndex))
|
||||
}
|
||||
|
||||
fn index(i: &str) -> IResult<&str, With> {
|
||||
let (i, _) = tag_no_case("INDEX")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(separated_list1(commas, ident_raw))(i)?;
|
||||
Ok((i, With::Index(v)))
|
||||
}
|
||||
|
||||
pub fn with(i: &str) -> IResult<&str, With> {
|
||||
let (i, _) = tag_no_case("WITH")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
cut(alt((no_index, index)))(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn with_no_index() {
|
||||
let sql = "WITH NOINDEX";
|
||||
let res = with(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, With::NoIndex);
|
||||
assert_eq!("WITH NOINDEX", format!("{}", out));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_index() {
|
||||
let sql = "WITH INDEX idx,uniq";
|
||||
let res = with(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, With::Index(vec!["idx".to_string(), "uniq".to_string()]));
|
||||
assert_eq!("WITH INDEX idx,uniq", format!("{}", out));
|
||||
}
|
||||
}
|
|
@ -1,128 +0,0 @@
|
|||
use super::ParseError;
|
||||
use crate::sql::{Field, Fields, Groups, Idiom, Orders, Splits, Value};
|
||||
use nom::Err;
|
||||
use nom::Err::Failure;
|
||||
|
||||
/// Check to see whether the expression is in the SELECT clause
|
||||
fn contains_idiom(fields: &Fields, idiom: &Idiom) -> bool {
|
||||
fields.iter().any(|field| {
|
||||
match field {
|
||||
// There is a SELECT * expression, so presume everything is ok
|
||||
Field::All => true,
|
||||
// Check each field
|
||||
Field::Single {
|
||||
expr,
|
||||
alias,
|
||||
} => {
|
||||
if let Some(i) = alias {
|
||||
// This field is aliased, so check the alias name
|
||||
i.as_ref() == idiom.as_ref()
|
||||
} else {
|
||||
// This field is not aliased, so check the field value
|
||||
match expr {
|
||||
// Use raw idiom (TODO: should this use `simplify`?)
|
||||
Value::Idiom(i) => i.as_ref() == idiom.as_ref(),
|
||||
// Check the expression
|
||||
v => v.to_idiom().as_ref() == idiom.as_ref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn check_split_on_fields<'a>(
|
||||
i: &'a str,
|
||||
fields: &Fields,
|
||||
splits: &Option<Splits>,
|
||||
) -> Result<(), Err<ParseError<&'a str>>> {
|
||||
// Check to see if a SPLIT ON clause has been defined
|
||||
if let Some(splits) = splits {
|
||||
// Loop over each of the expressions in the SPLIT ON clause
|
||||
for split in splits.iter() {
|
||||
if !contains_idiom(fields, &split.0) {
|
||||
// If the expression isn't specified in the SELECT clause, then error
|
||||
return Err(Failure(ParseError::Split(i, split.to_string())));
|
||||
}
|
||||
}
|
||||
}
|
||||
// This query is ok to run
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn check_order_by_fields<'a>(
|
||||
i: &'a str,
|
||||
fields: &Fields,
|
||||
orders: &Option<Orders>,
|
||||
) -> Result<(), Err<ParseError<&'a str>>> {
|
||||
// Check to see if a ORDER BY clause has been defined
|
||||
if let Some(orders) = orders {
|
||||
// Loop over each of the expressions in the ORDER BY clause
|
||||
for order in orders.iter() {
|
||||
if order.random {
|
||||
// don't check for a field if the order is random.
|
||||
continue;
|
||||
}
|
||||
if !contains_idiom(fields, order) {
|
||||
// If the expression isn't specified in the SELECT clause, then error
|
||||
return Err(Failure(ParseError::Order(i, order.to_string())));
|
||||
}
|
||||
}
|
||||
}
|
||||
// This query is ok to run
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn check_group_by_fields<'a>(
|
||||
i: &'a str,
|
||||
fields: &Fields,
|
||||
groups: &Option<Groups>,
|
||||
) -> Result<(), Err<ParseError<&'a str>>> {
|
||||
// Check to see if a GROUP BY clause has been defined
|
||||
if let Some(groups) = groups {
|
||||
// Loop over each of the expressions in the GROUP BY clause
|
||||
for group in groups.iter() {
|
||||
if !contains_idiom(fields, &group.0) {
|
||||
// If the expression isn't specified in the SELECT clause, then error
|
||||
return Err(Failure(ParseError::Group(i, group.to_string())));
|
||||
}
|
||||
}
|
||||
// Check if this is a GROUP ALL clause or a GROUP BY clause
|
||||
if !groups.is_empty() {
|
||||
// Loop over each of the expressions in the SELECT clause
|
||||
'outer: for field in fields.iter() {
|
||||
// Loop over each of the expressions in the GROUP BY clause
|
||||
for group in groups.iter() {
|
||||
// Check to see whether the expression is in the GROUP BY clause or is an aggregate
|
||||
if let Field::Single {
|
||||
expr,
|
||||
alias,
|
||||
} = field
|
||||
{
|
||||
if alias.as_ref().map(|i| i.as_ref() == group.as_ref()).unwrap_or(false) {
|
||||
// This field is aliased, and the alias name matched
|
||||
continue 'outer;
|
||||
} else {
|
||||
match expr {
|
||||
// If the expression in the SELECT clause is a field, check to see if it exists in the GROUP BY
|
||||
Value::Idiom(i) if i == &group.0 => continue 'outer,
|
||||
// If the expression in the SELECT clause is a function, check to see if it is an aggregate function
|
||||
Value::Function(f) if f.is_aggregate() => continue 'outer,
|
||||
// Otherwise check if the expression itself exists in the GROUP BY clause
|
||||
v if v.to_idiom() == group.0 => continue 'outer,
|
||||
// Check if this is a static value which can be used in the GROUP BY clause
|
||||
v if v.is_static() => continue 'outer,
|
||||
// If not, then this query should fail
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If the expression isn't an aggregate function and isn't specified in the GROUP BY clause, then error
|
||||
return Err(Failure(ParseError::Field(i, field.to_string())));
|
||||
}
|
||||
}
|
||||
}
|
||||
// This query is ok to run
|
||||
Ok(())
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
use super::super::{comment::shouldbespace, literal::ident, IResult};
|
||||
use crate::sql::statements::AnalyzeStatement;
|
||||
use nom::{bytes::complete::tag_no_case, combinator::cut};
|
||||
|
||||
pub fn analyze(i: &str) -> IResult<&str, AnalyzeStatement> {
|
||||
let (i, _) = tag_no_case("ANALYZE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("INDEX")(i)?;
|
||||
cut(|i| {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, idx) = ident(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("ON")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, tb) = ident(i)?;
|
||||
Ok((i, AnalyzeStatement::Idx(tb, idx)))
|
||||
})(i)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::sql::Ident;
|
||||
|
||||
#[test]
|
||||
fn analyze_index() {
|
||||
let sql = "ANALYZE INDEX my_index ON my_table";
|
||||
let res = analyze(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(out, AnalyzeStatement::Idx(Ident::from("my_table"), Ident::from("my_index")));
|
||||
assert_eq!("ANALYZE INDEX my_index ON my_table", format!("{}", out));
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use super::super::{comment::shouldbespace, IResult};
|
||||
use crate::sql::statements::BeginStatement;
|
||||
use nom::{bytes::complete::tag_no_case, combinator::opt, sequence::tuple};
|
||||
|
||||
pub fn begin(i: &str) -> IResult<&str, BeginStatement> {
|
||||
let (i, _) = tag_no_case("BEGIN")(i)?;
|
||||
let (i, _) = opt(tuple((shouldbespace, tag_no_case("TRANSACTION"))))(i)?;
|
||||
Ok((i, BeginStatement))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn begin_basic() {
|
||||
let sql = "BEGIN";
|
||||
let res = begin(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("BEGIN TRANSACTION", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn begin_query() {
|
||||
let sql = "BEGIN TRANSACTION";
|
||||
let res = begin(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("BEGIN TRANSACTION", format!("{}", out))
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use super::super::{comment::shouldbespace, IResult};
|
||||
use crate::sql::statements::CancelStatement;
|
||||
use nom::{bytes::complete::tag_no_case, combinator::opt, sequence::tuple};
|
||||
|
||||
pub fn cancel(i: &str) -> IResult<&str, CancelStatement> {
|
||||
let (i, _) = tag_no_case("CANCEL")(i)?;
|
||||
let (i, _) = opt(tuple((shouldbespace, tag_no_case("TRANSACTION"))))(i)?;
|
||||
Ok((i, CancelStatement))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn cancel_basic() {
|
||||
let sql = "CANCEL";
|
||||
let res = cancel(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CANCEL TRANSACTION", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cancel_query() {
|
||||
let sql = "CANCEL TRANSACTION";
|
||||
let res = cancel(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CANCEL TRANSACTION", format!("{}", out))
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use super::super::{comment::shouldbespace, IResult};
|
||||
use crate::sql::statements::CommitStatement;
|
||||
use nom::{bytes::complete::tag_no_case, combinator::opt, sequence::tuple};
|
||||
|
||||
pub fn commit(i: &str) -> IResult<&str, CommitStatement> {
|
||||
let (i, _) = tag_no_case("COMMIT")(i)?;
|
||||
let (i, _) = opt(tuple((shouldbespace, tag_no_case("TRANSACTION"))))(i)?;
|
||||
Ok((i, CommitStatement))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn commit_basic() {
|
||||
let sql = "COMMIT";
|
||||
let res = commit(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("COMMIT TRANSACTION", format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn commit_query() {
|
||||
let sql = "COMMIT TRANSACTION";
|
||||
let res = commit(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("COMMIT TRANSACTION", format!("{}", out))
|
||||
}
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
use super::super::{
|
||||
comment::shouldbespace,
|
||||
part::{data, output, timeout},
|
||||
value::whats,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::statements::CreateStatement;
|
||||
use nom::{
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{cut, opt},
|
||||
sequence::preceded,
|
||||
};
|
||||
|
||||
pub fn create(i: &str) -> IResult<&str, CreateStatement> {
|
||||
let (i, _) = tag_no_case("CREATE")(i)?;
|
||||
let (i, only) = opt(preceded(shouldbespace, tag_no_case("ONLY")))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, what) = whats(i)?;
|
||||
let (i, (data, output, timeout, parallel)) = cut(|i| {
|
||||
let (i, data) = opt(preceded(shouldbespace, data))(i)?;
|
||||
let (i, output) = opt(preceded(shouldbespace, output))(i)?;
|
||||
let (i, timeout) = opt(preceded(shouldbespace, timeout))(i)?;
|
||||
let (i, parallel) = opt(preceded(shouldbespace, tag_no_case("PARALLEL")))(i)?;
|
||||
Ok((i, (data, output, timeout, parallel)))
|
||||
})(i)?;
|
||||
Ok((
|
||||
i,
|
||||
CreateStatement {
|
||||
only: only.is_some(),
|
||||
what,
|
||||
data,
|
||||
output,
|
||||
timeout,
|
||||
parallel: parallel.is_some(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn create_statement() {
|
||||
let sql = "CREATE test";
|
||||
let res = create(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!("CREATE test", format!("{}", out))
|
||||
}
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
use super::super::super::{
|
||||
comment::shouldbespace,
|
||||
ending,
|
||||
error::expected,
|
||||
literal::{filters, ident, strand, tokenizer::tokenizers},
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::Ident;
|
||||
use crate::sql::{filter::Filter, statements::DefineAnalyzerStatement, Strand, Tokenizer};
|
||||
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::cut, multi::many0};
|
||||
use nom::{bytes::complete::tag, combinator::opt, sequence::tuple};
|
||||
|
||||
pub fn analyzer(i: &str) -> IResult<&str, DefineAnalyzerStatement> {
|
||||
let (i, _) = tag_no_case("ANALYZER")(i)?;
|
||||
let (i, if_not_exists) = opt(tuple((
|
||||
shouldbespace,
|
||||
tag_no_case("IF"),
|
||||
cut(tuple((shouldbespace, tag_no_case("NOT"), shouldbespace, tag_no_case("EXISTS")))),
|
||||
)))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, name) = cut(ident)(i)?;
|
||||
let (i, opts) = many0(analyzer_opts)(i)?;
|
||||
let (i, _) = expected("one of FUNCTION, FILTERS, TOKENIZERS, or COMMENT", ending::query)(i)?;
|
||||
// Create the base statement
|
||||
let mut res = DefineAnalyzerStatement {
|
||||
name,
|
||||
if_not_exists: if_not_exists.is_some(),
|
||||
..Default::default()
|
||||
};
|
||||
// Assign any defined options
|
||||
for opt in opts {
|
||||
match opt {
|
||||
DefineAnalyzerOption::Function(i) => {
|
||||
res.function = Some(i);
|
||||
}
|
||||
DefineAnalyzerOption::Comment(v) => {
|
||||
res.comment = Some(v);
|
||||
}
|
||||
DefineAnalyzerOption::Filters(v) => {
|
||||
res.filters = Some(v);
|
||||
}
|
||||
DefineAnalyzerOption::Tokenizers(v) => {
|
||||
res.tokenizers = Some(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Return the statement
|
||||
Ok((i, res))
|
||||
}
|
||||
|
||||
enum DefineAnalyzerOption {
|
||||
Function(Ident),
|
||||
Comment(Strand),
|
||||
Filters(Vec<Filter>),
|
||||
Tokenizers(Vec<Tokenizer>),
|
||||
}
|
||||
|
||||
fn analyzer_opts(i: &str) -> IResult<&str, DefineAnalyzerOption> {
|
||||
alt((analyzer_function, analyzer_comment, analyzer_filters, analyzer_tokenizers))(i)
|
||||
}
|
||||
|
||||
fn analyzer_function(i: &str) -> IResult<&str, DefineAnalyzerOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("FUNCTION")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag("fn::")(i)?;
|
||||
let (i, name) = ident(i)?;
|
||||
Ok((i, DefineAnalyzerOption::Function(name)))
|
||||
}
|
||||
|
||||
fn analyzer_comment(i: &str) -> IResult<&str, DefineAnalyzerOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("COMMENT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(strand)(i)?;
|
||||
Ok((i, DefineAnalyzerOption::Comment(v)))
|
||||
}
|
||||
|
||||
fn analyzer_filters(i: &str) -> IResult<&str, DefineAnalyzerOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("FILTERS")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(filters)(i)?;
|
||||
Ok((i, DefineAnalyzerOption::Filters(v)))
|
||||
}
|
||||
|
||||
fn analyzer_tokenizers(i: &str) -> IResult<&str, DefineAnalyzerOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("TOKENIZERS")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(tokenizers)(i)?;
|
||||
Ok((i, DefineAnalyzerOption::Tokenizers(v)))
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
use super::super::super::{
|
||||
comment::shouldbespace,
|
||||
ending,
|
||||
error::expected,
|
||||
literal::{ident, strand},
|
||||
part::changefeed,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{statements::DefineDatabaseStatement, ChangeFeed, Strand};
|
||||
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::cut, multi::many0};
|
||||
use nom::{combinator::opt, sequence::tuple};
|
||||
|
||||
pub fn database(i: &str) -> IResult<&str, DefineDatabaseStatement> {
|
||||
let (i, _) = alt((tag_no_case("DB"), tag_no_case("DATABASE")))(i)?;
|
||||
let (i, if_not_exists) = opt(tuple((
|
||||
shouldbespace,
|
||||
tag_no_case("IF"),
|
||||
cut(tuple((shouldbespace, tag_no_case("NOT"), shouldbespace, tag_no_case("EXISTS")))),
|
||||
)))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, name) = cut(ident)(i)?;
|
||||
let (i, opts) = many0(database_opts)(i)?;
|
||||
let (i, _) = expected("COMMENT or CHANGEFEED", ending::query)(i)?;
|
||||
|
||||
// Create the base statement
|
||||
let mut res = DefineDatabaseStatement {
|
||||
name,
|
||||
if_not_exists: if_not_exists.is_some(),
|
||||
..Default::default()
|
||||
};
|
||||
// Assign any defined options
|
||||
for opt in opts {
|
||||
match opt {
|
||||
DefineDatabaseOption::Comment(v) => {
|
||||
res.comment = Some(v);
|
||||
}
|
||||
DefineDatabaseOption::ChangeFeed(v) => {
|
||||
res.changefeed = Some(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Return the statement
|
||||
Ok((i, res))
|
||||
}
|
||||
|
||||
enum DefineDatabaseOption {
|
||||
Comment(Strand),
|
||||
ChangeFeed(ChangeFeed),
|
||||
}
|
||||
|
||||
fn database_opts(i: &str) -> IResult<&str, DefineDatabaseOption> {
|
||||
alt((database_comment, database_changefeed))(i)
|
||||
}
|
||||
|
||||
fn database_comment(i: &str) -> IResult<&str, DefineDatabaseOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("COMMENT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(strand)(i)?;
|
||||
Ok((i, DefineDatabaseOption::Comment(v)))
|
||||
}
|
||||
|
||||
fn database_changefeed(i: &str) -> IResult<&str, DefineDatabaseOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = changefeed(i)?;
|
||||
Ok((i, DefineDatabaseOption::ChangeFeed(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn define_database_with_changefeed() {
|
||||
let sql = "DATABASE mydatabase CHANGEFEED 1h";
|
||||
let res = database(sql);
|
||||
let out = res.unwrap().1;
|
||||
assert_eq!(format!("DEFINE {sql}"), format!("{}", out));
|
||||
|
||||
let serialized: Vec<u8> = (&out).into();
|
||||
let deserialized = DefineDatabaseStatement::from(&serialized);
|
||||
assert_eq!(out, deserialized);
|
||||
}
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
use super::super::super::{
|
||||
comment::shouldbespace,
|
||||
ending,
|
||||
error::{expect_tag_no_case, expected},
|
||||
literal::{ident, strand},
|
||||
value::{value, values},
|
||||
IResult, ParseError,
|
||||
};
|
||||
use crate::sql::{statements::DefineEventStatement, Strand, Value, Values};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{cut, opt},
|
||||
multi::many0,
|
||||
sequence::tuple,
|
||||
Err,
|
||||
};
|
||||
|
||||
pub fn event(i: &str) -> IResult<&str, DefineEventStatement> {
|
||||
let (i, _) = tag_no_case("EVENT")(i)?;
|
||||
let (i, if_not_exists) = opt(tuple((
|
||||
shouldbespace,
|
||||
tag_no_case("IF"),
|
||||
cut(tuple((shouldbespace, tag_no_case("NOT"), shouldbespace, tag_no_case("EXISTS")))),
|
||||
)))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, (name, what, opts)) = cut(|i| {
|
||||
let (i, name) = ident(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = expect_tag_no_case("ON")(i)?;
|
||||
let (i, _) = opt(tuple((shouldbespace, tag_no_case("TABLE"))))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, what) = ident(i)?;
|
||||
let (i, opts) = many0(event_opts)(i)?;
|
||||
let (i, _) = expected("WHEN, THEN, or COMMENT", ending::query)(i)?;
|
||||
Ok((i, (name, what, opts)))
|
||||
})(i)?;
|
||||
// Create the base statement
|
||||
let mut res = DefineEventStatement {
|
||||
name,
|
||||
what,
|
||||
when: Value::Bool(true),
|
||||
if_not_exists: if_not_exists.is_some(),
|
||||
..Default::default()
|
||||
};
|
||||
// Assign any defined options
|
||||
for opt in opts {
|
||||
match opt {
|
||||
DefineEventOption::When(v) => {
|
||||
res.when = v;
|
||||
}
|
||||
DefineEventOption::Then(v) => {
|
||||
res.then = v;
|
||||
}
|
||||
DefineEventOption::Comment(v) => {
|
||||
res.comment = Some(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check necessary options
|
||||
if res.then.is_empty() {
|
||||
return Err(Err::Failure(ParseError::ExplainedExpected {
|
||||
tried: i,
|
||||
expected: "a THEN clause",
|
||||
explained: "An event requires a THEN clause to be defined.",
|
||||
}));
|
||||
}
|
||||
// Return the statement
|
||||
Ok((i, res))
|
||||
}
|
||||
|
||||
enum DefineEventOption {
|
||||
When(Value),
|
||||
Then(Values),
|
||||
Comment(Strand),
|
||||
}
|
||||
|
||||
fn event_opts(i: &str) -> IResult<&str, DefineEventOption> {
|
||||
alt((event_when, event_then, event_comment))(i)
|
||||
}
|
||||
|
||||
fn event_when(i: &str) -> IResult<&str, DefineEventOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("WHEN")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, DefineEventOption::When(v)))
|
||||
}
|
||||
|
||||
fn event_then(i: &str) -> IResult<&str, DefineEventOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("THEN")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(values)(i)?;
|
||||
Ok((i, DefineEventOption::Then(v)))
|
||||
}
|
||||
|
||||
fn event_comment(i: &str) -> IResult<&str, DefineEventOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("COMMENT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(strand)(i)?;
|
||||
Ok((i, DefineEventOption::Comment(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn define_event_without_then_clause() {
|
||||
let sql = "EVENT test ON test";
|
||||
let res = event(sql);
|
||||
|
||||
assert!(res.is_err())
|
||||
}
|
||||
}
|
|
@ -1,178 +0,0 @@
|
|||
use super::super::super::{
|
||||
comment::shouldbespace,
|
||||
ending,
|
||||
error::{expect_tag_no_case, expected},
|
||||
idiom::{self},
|
||||
kind::kind,
|
||||
literal::{ident, strand},
|
||||
part::permission::permissions,
|
||||
value::value,
|
||||
IResult,
|
||||
};
|
||||
use crate::sql::{statements::DefineFieldStatement, Kind, Permission, Permissions, Strand, Value};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag_no_case,
|
||||
combinator::{cut, opt},
|
||||
multi::many0,
|
||||
sequence::tuple,
|
||||
};
|
||||
|
||||
pub fn field(i: &str) -> IResult<&str, DefineFieldStatement> {
|
||||
let (i, _) = tag_no_case("FIELD")(i)?;
|
||||
let (i, if_not_exists) = opt(tuple((
|
||||
shouldbespace,
|
||||
tag_no_case("IF"),
|
||||
cut(tuple((shouldbespace, tag_no_case("NOT"), shouldbespace, tag_no_case("EXISTS")))),
|
||||
)))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, (name, what, opts)) = cut(|i| {
|
||||
let (i, name) = idiom::local(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = expect_tag_no_case("ON")(i)?;
|
||||
let (i, _) = opt(tuple((shouldbespace, tag_no_case("TABLE"))))(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, what) = ident(i)?;
|
||||
let (i, opts) = many0(field_opts)(i)?;
|
||||
let one_of = "one of FLEX(IBLE), TYPE, READONLY, VALUE, ASSERT, DEFAULT, or COMMENT";
|
||||
let (i, _) = expected(one_of, cut(ending::query))(i)?;
|
||||
Ok((i, (name, what, opts)))
|
||||
})(i)?;
|
||||
// Create the base statement
|
||||
let mut res = DefineFieldStatement {
|
||||
name,
|
||||
what,
|
||||
if_not_exists: if_not_exists.is_some(),
|
||||
..Default::default()
|
||||
};
|
||||
// Assign any defined options
|
||||
for opt in opts {
|
||||
match opt {
|
||||
DefineFieldOption::Flex => {
|
||||
res.flex = true;
|
||||
}
|
||||
DefineFieldOption::Kind(v) => {
|
||||
res.kind = Some(v);
|
||||
}
|
||||
DefineFieldOption::ReadOnly => {
|
||||
res.readonly = true;
|
||||
}
|
||||
DefineFieldOption::Value(v) => {
|
||||
res.value = Some(v);
|
||||
}
|
||||
DefineFieldOption::Assert(v) => {
|
||||
res.assert = Some(v);
|
||||
}
|
||||
DefineFieldOption::Default(v) => {
|
||||
res.default = Some(v);
|
||||
}
|
||||
DefineFieldOption::Comment(v) => {
|
||||
res.comment = Some(v);
|
||||
}
|
||||
DefineFieldOption::Permissions(v) => {
|
||||
res.permissions = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Return the statement
|
||||
Ok((i, res))
|
||||
}
|
||||
|
||||
enum DefineFieldOption {
|
||||
Flex,
|
||||
Kind(Kind),
|
||||
ReadOnly,
|
||||
Value(Value),
|
||||
Assert(Value),
|
||||
Default(Value),
|
||||
Comment(Strand),
|
||||
Permissions(Permissions),
|
||||
}
|
||||
|
||||
fn field_opts(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
alt((
|
||||
field_flex,
|
||||
field_kind,
|
||||
field_readonly,
|
||||
field_value,
|
||||
field_assert,
|
||||
field_default,
|
||||
field_comment,
|
||||
field_permissions,
|
||||
))(i)
|
||||
}
|
||||
|
||||
fn field_flex(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = alt((tag_no_case("FLEXIBLE"), tag_no_case("FLEXI"), tag_no_case("FLEX")))(i)?;
|
||||
Ok((i, DefineFieldOption::Flex))
|
||||
}
|
||||
|
||||
fn field_kind(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("TYPE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(kind)(i)?;
|
||||
Ok((i, DefineFieldOption::Kind(v)))
|
||||
}
|
||||
|
||||
fn field_readonly(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("READONLY")(i)?;
|
||||
Ok((i, DefineFieldOption::ReadOnly))
|
||||
}
|
||||
|
||||
fn field_value(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("VALUE")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, DefineFieldOption::Value(v)))
|
||||
}
|
||||
|
||||
fn field_assert(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("ASSERT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, DefineFieldOption::Assert(v)))
|
||||
}
|
||||
|
||||
fn field_default(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("DEFAULT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = cut(value)(i)?;
|
||||
Ok((i, DefineFieldOption::Default(v)))
|
||||
}
|
||||
|
||||
fn field_comment(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, _) = tag_no_case("COMMENT")(i)?;
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = strand(i)?;
|
||||
Ok((i, DefineFieldOption::Comment(v)))
|
||||
}
|
||||
|
||||
fn field_permissions(i: &str) -> IResult<&str, DefineFieldOption> {
|
||||
let (i, _) = shouldbespace(i)?;
|
||||
let (i, v) = permissions(i, Permission::Full)?;
|
||||
Ok((i, DefineFieldOption::Permissions(v)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::field;
|
||||
|
||||
fn assert_parsable(sql: &str) {
|
||||
let res = field(sql);
|
||||
assert!(res.is_ok());
|
||||
let (_, out) = res.unwrap();
|
||||
assert_eq!(format!("DEFINE {}", sql), format!("{}", out))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn define_field_record_type_permissions() {
|
||||
assert_parsable("FIELD attributes[*] ON listing TYPE record PERMISSIONS FULL")
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue