Implement reblessive into the parser to prevent any overflows. (#3591)
This commit is contained in:
parent
0985b1e18d
commit
567832825a
57 changed files with 921 additions and 537 deletions
4
.github/workflows/bench.yml
vendored
4
.github/workflows/bench.yml
vendored
|
@ -32,7 +32,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
|
@ -120,7 +120,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Setup cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
|
44
.github/workflows/ci.yml
vendored
44
.github/workflows/ci.yml
vendored
|
@ -22,7 +22,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
components: rustfmt
|
||||
|
||||
- name: Checkout sources
|
||||
|
@ -46,7 +46,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -73,7 +73,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
components: rustfmt
|
||||
|
||||
- name: Checkout sources
|
||||
|
@ -114,7 +114,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
targets: wasm32-unknown-unknown
|
||||
|
||||
- name: Checkout sources
|
||||
|
@ -138,7 +138,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
components: clippy
|
||||
|
||||
- name: Checkout sources
|
||||
|
@ -166,7 +166,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -202,7 +202,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -229,7 +229,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -256,7 +256,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -283,7 +283,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -322,7 +322,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -362,7 +362,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -398,7 +398,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -434,7 +434,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -470,7 +470,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -493,7 +493,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -516,7 +516,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -539,7 +539,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -562,7 +562,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -605,7 +605,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -642,7 +642,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
@ -675,7 +675,7 @@ jobs:
|
|||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: 1.75.0
|
||||
toolchain: stable
|
||||
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
|
11
Cargo.lock
generated
11
Cargo.lock
generated
|
@ -4489,6 +4489,16 @@ dependencies = [
|
|||
"yasna",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reblessive"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fa873c785a072f2d26c6c1e56e5ea60ff2763bd6c3560227198efd8b9204707"
|
||||
dependencies = [
|
||||
"pin-project-lite",
|
||||
"pin-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.4.1"
|
||||
|
@ -5937,6 +5947,7 @@ dependencies = [
|
|||
"quick_cache",
|
||||
"radix_trie",
|
||||
"rand 0.8.5",
|
||||
"reblessive",
|
||||
"regex",
|
||||
"regex-syntax 0.8.2",
|
||||
"reqwest",
|
||||
|
|
|
@ -1131,6 +1131,9 @@ allow_unsafe = true
|
|||
[pkg.rquickjs-core]
|
||||
allow_unsafe = true
|
||||
|
||||
[pkg.reblessive]
|
||||
allow_unsafe = true
|
||||
|
||||
[pkg.tungstenite]
|
||||
allow_unsafe = true
|
||||
|
||||
|
|
|
@ -148,6 +148,7 @@ unicase = { version = "2.7.0", optional = true }
|
|||
arbitrary = { version = "1.3.2", features = ["derive"], optional = true }
|
||||
regex-syntax = { version = "0.8.2", optional = true, features = ["arbitrary"] }
|
||||
geo-types = { version = "0.7.12", features = ["arbitrary"] }
|
||||
reblessive = { version = "0.3.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.5.1", features = ["async_tokio"] }
|
||||
|
|
|
@ -119,8 +119,8 @@ pub async fn sc(
|
|||
let vars = Some(vars.0);
|
||||
// Setup the system session for finding the signin record
|
||||
let mut sess = Session::editor().with_ns(&ns).with_db(&db);
|
||||
sess.ip = session.ip.clone();
|
||||
sess.or = session.or.clone();
|
||||
sess.ip.clone_from(&session.ip);
|
||||
sess.or.clone_from(&session.or);
|
||||
// Compute the value with the params
|
||||
match kvs.evaluate(val, &sess, vars).await {
|
||||
// The signin value succeeded
|
||||
|
|
|
@ -59,8 +59,8 @@ pub async fn sc(
|
|||
let vars = Some(vars.0);
|
||||
// Setup the system session for creating the signup record
|
||||
let mut sess = Session::editor().with_ns(&ns).with_db(&db);
|
||||
sess.ip = session.ip.clone();
|
||||
sess.or = session.or.clone();
|
||||
sess.ip.clone_from(&session.ip);
|
||||
sess.or.clone_from(&session.or);
|
||||
// Compute the value with the params
|
||||
match kvs.evaluate(val, &sess, vars).await {
|
||||
// The signin value succeeded
|
||||
|
|
|
@ -262,17 +262,16 @@ mod tests {
|
|||
panic!()
|
||||
};
|
||||
let a: Analyzer = az.into();
|
||||
let tokens = a
|
||||
.generate_tokens(
|
||||
&Context::default(),
|
||||
&Options::default(),
|
||||
&txn,
|
||||
FilteringStage::Indexing,
|
||||
input.to_string(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
tokens
|
||||
|
||||
a.generate_tokens(
|
||||
&Context::default(),
|
||||
&Options::default(),
|
||||
&txn,
|
||||
FilteringStage::Indexing,
|
||||
input.to_string(),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub(super) async fn test_analyzer(def: &str, input: &str, expected: &[&str]) {
|
||||
|
|
|
@ -5,7 +5,6 @@ use std::collections::btree_map::Entry as BEntry;
|
|||
use std::collections::hash_map::Entry as HEntry;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::default::Default;
|
||||
|
||||
pub(super) struct Highlighter {
|
||||
prefix: Vec<char>,
|
||||
|
|
|
@ -202,7 +202,7 @@ impl IndexRangeThingIterator {
|
|||
.await?;
|
||||
let res = res.values;
|
||||
if let Some((key, _)) = res.last() {
|
||||
self.r.beg = key.clone();
|
||||
self.r.beg.clone_from(key);
|
||||
self.r.beg.push(0x00);
|
||||
}
|
||||
let mut r = Vec::with_capacity(res.len());
|
||||
|
|
|
@ -24,7 +24,7 @@ impl RemoveAnalyzerStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Analyzer, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -40,8 +40,8 @@ impl RemoveAnalyzerStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::AzNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -25,7 +25,7 @@ impl RemoveDatabaseStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Database, &Base::Ns)?;
|
||||
// Claim transaction
|
||||
|
@ -43,8 +43,8 @@ impl RemoveDatabaseStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::DbNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RemoveEventStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Event, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -44,8 +44,8 @@ impl RemoveEventStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::EvNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RemoveFieldStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Field, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -46,8 +46,8 @@ impl RemoveFieldStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::FdNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -25,7 +25,7 @@ impl RemoveFunctionStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Function, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -40,8 +40,8 @@ impl RemoveFunctionStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::FcNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RemoveIndexStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Index, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -47,8 +47,8 @@ impl RemoveIndexStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::IxNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RemoveModelStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Model, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -41,8 +41,8 @@ impl RemoveModelStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::MlNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -25,7 +25,7 @@ impl RemoveNamespaceStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Namespace, &Base::Root)?;
|
||||
// Claim transaction
|
||||
|
@ -44,8 +44,8 @@ impl RemoveNamespaceStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::NsNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -25,7 +25,7 @@ impl RemoveParamStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Parameter, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -40,8 +40,8 @@ impl RemoveParamStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::PaNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -25,7 +25,7 @@ impl RemoveScopeStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Scope, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -43,8 +43,8 @@ impl RemoveScopeStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::ScNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RemoveTableStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Table, &Base::Db)?;
|
||||
// Claim transaction
|
||||
|
@ -55,8 +55,8 @@ impl RemoveTableStatement {
|
|||
// Ok all good
|
||||
Ok(Value::None)
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(Error::TbNotFound {
|
||||
..
|
||||
}) if self.if_exists => Ok(Value::None),
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RemoveTokenStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Actor, &self.base)?;
|
||||
|
||||
|
@ -73,8 +73,8 @@ impl RemoveTokenStatement {
|
|||
_ => Err(Error::InvalidLevel(self.base.to_string())),
|
||||
}
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(e) if self.if_exists => match e {
|
||||
Error::NtNotFound {
|
||||
..
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RemoveUserStatement {
|
|||
opt: &Options,
|
||||
txn: &Transaction,
|
||||
) -> Result<Value, Error> {
|
||||
match async {
|
||||
let future = async {
|
||||
// Allowed to run?
|
||||
opt.is_allowed(Action::Edit, ResourceKind::Actor, &self.base)?;
|
||||
|
||||
|
@ -73,8 +73,8 @@ impl RemoveUserStatement {
|
|||
_ => Err(Error::InvalidLevel(self.base.to_string())),
|
||||
}
|
||||
}
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match future {
|
||||
Err(e) if self.if_exists => match e {
|
||||
Error::UserRootNotFound {
|
||||
..
|
||||
|
|
|
@ -70,7 +70,6 @@ impl serde::ser::SerializeStruct for SerializeKillStatement {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::sql::statements::kill::KillStatement;
|
||||
|
||||
#[test]
|
||||
fn default() {
|
||||
|
|
|
@ -2855,16 +2855,16 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn check_size() {
|
||||
assert_eq!(64, std::mem::size_of::<Value>());
|
||||
assert_eq!(104, std::mem::size_of::<Error>());
|
||||
assert_eq!(104, std::mem::size_of::<Result<Value, Error>>());
|
||||
assert!(64 >= std::mem::size_of::<Value>(), "size of value too big");
|
||||
assert_eq!(112, std::mem::size_of::<Error>());
|
||||
assert_eq!(112, std::mem::size_of::<Result<Value, Error>>());
|
||||
assert_eq!(24, std::mem::size_of::<crate::sql::number::Number>());
|
||||
assert_eq!(24, std::mem::size_of::<crate::sql::strand::Strand>());
|
||||
assert_eq!(16, std::mem::size_of::<crate::sql::duration::Duration>());
|
||||
assert_eq!(12, std::mem::size_of::<crate::sql::datetime::Datetime>());
|
||||
assert_eq!(24, std::mem::size_of::<crate::sql::array::Array>());
|
||||
assert_eq!(24, std::mem::size_of::<crate::sql::object::Object>());
|
||||
assert_eq!(56, std::mem::size_of::<crate::sql::geometry::Geometry>());
|
||||
assert_eq!(48, std::mem::size_of::<crate::sql::geometry::Geometry>());
|
||||
assert_eq!(24, std::mem::size_of::<crate::sql::param::Param>());
|
||||
assert_eq!(24, std::mem::size_of::<crate::sql::idiom::Idiom>());
|
||||
assert_eq!(24, std::mem::size_of::<crate::sql::table::Table>());
|
||||
|
@ -2880,19 +2880,19 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn check_serialize() {
|
||||
let enc: Vec<u8> = Value::None.try_into().unwrap();
|
||||
let enc: Vec<u8> = Value::None.into();
|
||||
assert_eq!(2, enc.len());
|
||||
let enc: Vec<u8> = Value::Null.try_into().unwrap();
|
||||
let enc: Vec<u8> = Value::Null.into();
|
||||
assert_eq!(2, enc.len());
|
||||
let enc: Vec<u8> = Value::Bool(true).try_into().unwrap();
|
||||
let enc: Vec<u8> = Value::Bool(true).into();
|
||||
assert_eq!(3, enc.len());
|
||||
let enc: Vec<u8> = Value::Bool(false).try_into().unwrap();
|
||||
let enc: Vec<u8> = Value::Bool(false).into();
|
||||
assert_eq!(3, enc.len());
|
||||
let enc: Vec<u8> = Value::from("test").try_into().unwrap();
|
||||
let enc: Vec<u8> = Value::from("test").into();
|
||||
assert_eq!(8, enc.len());
|
||||
let enc: Vec<u8> = Value::parse("{ hello: 'world' }").try_into().unwrap();
|
||||
let enc: Vec<u8> = Value::parse("{ hello: 'world' }").into();
|
||||
assert_eq!(19, enc.len());
|
||||
let enc: Vec<u8> = Value::parse("{ compact: true, schema: 0 }").try_into().unwrap();
|
||||
let enc: Vec<u8> = Value::parse("{ compact: true, schema: 0 }").into();
|
||||
assert_eq!(27, enc.len());
|
||||
}
|
||||
|
||||
|
@ -2904,8 +2904,8 @@ mod tests {
|
|||
let res = Value::parse(
|
||||
"{ test: { something: [1, 'two', null, test:tobie, { trueee: false, noneee: nulll }] } }",
|
||||
);
|
||||
let enc: Vec<u8> = val.try_into().unwrap();
|
||||
let dec: Value = enc.try_into().unwrap();
|
||||
let enc: Vec<u8> = val.into();
|
||||
let dec: Value = enc.into();
|
||||
assert_eq!(res, dec);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ mod test;
|
|||
|
||||
use lexer::Lexer;
|
||||
use parser::{ParseError, ParseErrorKind, Parser};
|
||||
use reblessive::Stack;
|
||||
|
||||
/// Takes a string and returns if it could be a reserved keyword in certain contexts.
|
||||
pub fn could_be_reserved_keyword(s: &str) -> bool {
|
||||
|
@ -34,7 +35,12 @@ pub fn could_be_reserved_keyword(s: &str) -> bool {
|
|||
pub fn parse(input: &str) -> Result<Query, Error> {
|
||||
debug!("parsing query, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
parser.parse_query().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_query(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Value`].
|
||||
|
@ -42,7 +48,12 @@ pub fn parse(input: &str) -> Result<Query, Error> {
|
|||
pub fn value(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing value, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
parser.parse_value_field().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_value_field(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Value`].
|
||||
|
@ -50,8 +61,13 @@ pub fn value(input: &str) -> Result<Value, Error> {
|
|||
pub fn value_legacy_strand(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing value, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
parser.allow_legacy_strand(true);
|
||||
parser.parse_value().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
stack
|
||||
.enter(|stk| parser.parse_value(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses JSON into an inert SurrealQL [`Value`]
|
||||
|
@ -59,7 +75,12 @@ pub fn value_legacy_strand(input: &str) -> Result<Value, Error> {
|
|||
pub fn json(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing json, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
parser.parse_json().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_json(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses JSON into an inert SurrealQL [`Value`]
|
||||
|
@ -67,15 +88,25 @@ pub fn json(input: &str) -> Result<Value, Error> {
|
|||
pub fn json_legacy_strand(input: &str) -> Result<Value, Error> {
|
||||
debug!("parsing json, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
parser.allow_legacy_strand(true);
|
||||
parser.parse_json().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
stack
|
||||
.enter(|stk| parser.parse_json(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
/// Parses a SurrealQL Subquery [`Subquery`]
|
||||
#[instrument(level = "debug", name = "parser", skip_all, fields(length = input.len()))]
|
||||
pub fn subquery(input: &str) -> Result<Subquery, Error> {
|
||||
debug!("parsing subquery, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
parser.parse_full_subquery().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_full_subquery(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parses a SurrealQL [`Idiom`]
|
||||
|
@ -83,7 +114,12 @@ pub fn subquery(input: &str) -> Result<Subquery, Error> {
|
|||
pub fn idiom(input: &str) -> Result<Idiom, Error> {
|
||||
debug!("parsing idiom, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
parser.parse_plain_idiom().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_plain_idiom(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parse a datetime without enclosing delimiters from a string.
|
||||
|
@ -117,12 +153,22 @@ pub fn duration(input: &str) -> Result<Duration, Error> {
|
|||
pub fn range(input: &str) -> Result<Range, Error> {
|
||||
debug!("parsing range, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
parser.parse_range().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_range(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
||||
/// Parse a record id.
|
||||
pub fn thing(input: &str) -> Result<Thing, Error> {
|
||||
debug!("parsing thing, input = {input}");
|
||||
let mut parser = Parser::new(input.as_bytes());
|
||||
parser.parse_thing().map_err(|e| e.render_on(input)).map_err(Error::InvalidQuery)
|
||||
let mut stack = Stack::new();
|
||||
stack
|
||||
.enter(|stk| parser.parse_thing(stk))
|
||||
.finish()
|
||||
.map_err(|e| e.render_on(input))
|
||||
.map_err(Error::InvalidQuery)
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ use crate::{
|
|||
},
|
||||
};
|
||||
use phf::phf_map;
|
||||
use reblessive::Stk;
|
||||
use unicase::UniCase;
|
||||
|
||||
const MAX_LEVENSTHEIN_CUT_OFF: u8 = 4;
|
||||
|
@ -408,7 +409,7 @@ pub(crate) static PATHS: phf::Map<UniCase<&'static str>, PathKind> = phf_map! {
|
|||
|
||||
impl Parser<'_> {
|
||||
/// Parse a builtin path.
|
||||
pub fn parse_builtin(&mut self, start: Span) -> ParseResult<Value> {
|
||||
pub async fn parse_builtin(&mut self, stk: &mut Stk, start: Span) -> ParseResult<Value> {
|
||||
let mut last_span = start;
|
||||
while self.eat(t!("::")) {
|
||||
self.next_token_value::<Ident>()?;
|
||||
|
@ -423,8 +424,9 @@ impl Parser<'_> {
|
|||
|
||||
match PATHS.get_entry(&UniCase::ascii(str)) {
|
||||
Some((_, PathKind::Constant(x))) => Ok(Value::Constant(x.clone())),
|
||||
Some((k, PathKind::Function)) => self
|
||||
.parse_builtin_function(k.into_inner().to_owned())
|
||||
Some((k, PathKind::Function)) => stk
|
||||
.run(|ctx| self.parse_builtin_function(ctx, k.into_inner().to_owned()))
|
||||
.await
|
||||
.map(|x| Value::Function(Box::new(x))),
|
||||
None => {
|
||||
// Generate an suggestion.
|
||||
|
@ -463,7 +465,11 @@ impl Parser<'_> {
|
|||
}
|
||||
|
||||
/// Parse a call to a builtin function.
|
||||
pub fn parse_builtin_function(&mut self, name: String) -> ParseResult<Function> {
|
||||
pub async fn parse_builtin_function(
|
||||
&mut self,
|
||||
stk: &mut Stk,
|
||||
name: String,
|
||||
) -> ParseResult<Function> {
|
||||
let start = expected!(self, t!("(")).span;
|
||||
let mut args = Vec::new();
|
||||
loop {
|
||||
|
@ -471,7 +477,8 @@ impl Parser<'_> {
|
|||
break;
|
||||
}
|
||||
|
||||
args.push(self.parse_value_field()?);
|
||||
let arg = stk.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
args.push(arg);
|
||||
|
||||
if !self.eat(t!(",")) {
|
||||
self.expect_closing_delimiter(t!(")"), start)?;
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//! This module defines the pratt parser for operators.
|
||||
|
||||
use reblessive::Stk;
|
||||
|
||||
use super::mac::unexpected;
|
||||
use super::ParseError;
|
||||
use crate::sql::{value::TryNeg, Cast, Expression, Number, Operator, Value};
|
||||
|
@ -15,10 +17,10 @@ impl Parser<'_> {
|
|||
/// A generic loose ident like `foo` in for example `foo.bar` can be two different values
|
||||
/// depending on context: a table or a field the current document. This function parses loose
|
||||
/// idents as a table, see [`parse_value_field`] for parsing loose idents as fields
|
||||
pub fn parse_value(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_value(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
let old = self.table_as_field;
|
||||
self.table_as_field = false;
|
||||
let res = self.pratt_parse_expr(0);
|
||||
let res = self.pratt_parse_expr(ctx, 0).await;
|
||||
self.table_as_field = old;
|
||||
res
|
||||
}
|
||||
|
@ -28,10 +30,10 @@ impl Parser<'_> {
|
|||
/// A generic loose ident like `foo` in for example `foo.bar` can be two different values
|
||||
/// depending on context: a table or a field the current document. This function parses loose
|
||||
/// idents as a field, see [`parse_value`] for parsing loose idents as table
|
||||
pub fn parse_value_field(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_value_field(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
let old = self.table_as_field;
|
||||
self.table_as_field = true;
|
||||
let res = self.pratt_parse_expr(0);
|
||||
let res = self.pratt_parse_expr(ctx, 0).await;
|
||||
self.table_as_field = old;
|
||||
res
|
||||
}
|
||||
|
@ -126,7 +128,7 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_prefix_op(&mut self, min_bp: u8) -> ParseResult<Value> {
|
||||
async fn parse_prefix_op(&mut self, ctx: &mut Stk, min_bp: u8) -> ParseResult<Value> {
|
||||
const I64_ABS_MAX: u64 = 9223372036854775808;
|
||||
|
||||
let token = self.next();
|
||||
|
@ -135,8 +137,8 @@ impl Parser<'_> {
|
|||
t!("-") => Operator::Neg,
|
||||
t!("!") => Operator::Not,
|
||||
t!("<") => {
|
||||
let kind = self.parse_kind(token.span)?;
|
||||
let value = self.pratt_parse_expr(min_bp)?;
|
||||
let kind = self.parse_kind(ctx, token.span).await?;
|
||||
let value = ctx.run(|ctx| self.pratt_parse_expr(ctx, min_bp)).await?;
|
||||
let cast = Cast(kind, value);
|
||||
return Ok(Value::Cast(Box::new(cast)));
|
||||
}
|
||||
|
@ -167,7 +169,7 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
let v = self.pratt_parse_expr(min_bp)?;
|
||||
let v = ctx.run(|ctx| self.pratt_parse_expr(ctx, min_bp)).await?;
|
||||
|
||||
// HACK: For compatiblity with the old parser apply + and - operator immediately if the
|
||||
// left value is a number.
|
||||
|
@ -193,7 +195,12 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_infix_op(&mut self, min_bp: u8, lhs: Value) -> ParseResult<Value> {
|
||||
async fn parse_infix_op(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
min_bp: u8,
|
||||
lhs: Value,
|
||||
) -> ParseResult<Value> {
|
||||
let token = self.next();
|
||||
let operator = match token.kind {
|
||||
// TODO: change operator name?
|
||||
|
@ -263,7 +270,7 @@ impl Parser<'_> {
|
|||
// should be unreachable as we previously check if the token was a prefix op.
|
||||
x => unreachable!("found non-operator token {x:?}"),
|
||||
};
|
||||
let rhs = self.pratt_parse_expr(min_bp)?;
|
||||
let rhs = ctx.run(|ctx| self.pratt_parse_expr(ctx, min_bp)).await?;
|
||||
Ok(Value::Expression(Box::new(Expression::Binary {
|
||||
l: lhs,
|
||||
o: operator,
|
||||
|
@ -273,12 +280,12 @@ impl Parser<'_> {
|
|||
|
||||
/// The pratt parsing loop.
|
||||
/// Parses expression according to binding power.
|
||||
fn pratt_parse_expr(&mut self, min_bp: u8) -> ParseResult<Value> {
|
||||
async fn pratt_parse_expr(&mut self, ctx: &mut Stk, min_bp: u8) -> ParseResult<Value> {
|
||||
let peek = self.peek();
|
||||
let mut lhs = if let Some(((), r_bp)) = self.prefix_binding_power(peek.kind) {
|
||||
self.parse_prefix_op(r_bp)?
|
||||
self.parse_prefix_op(ctx, r_bp).await?
|
||||
} else {
|
||||
self.parse_idiom_expression()?
|
||||
self.parse_idiom_expression(ctx).await?
|
||||
};
|
||||
|
||||
loop {
|
||||
|
@ -302,7 +309,7 @@ impl Parser<'_> {
|
|||
break;
|
||||
}
|
||||
|
||||
lhs = self.parse_infix_op(r_bp, lhs)?;
|
||||
lhs = self.parse_infix_op(ctx, r_bp, lhs).await?;
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{Function, Ident, Model},
|
||||
syn::v2::{
|
||||
|
@ -12,7 +14,7 @@ impl Parser<'_> {
|
|||
/// Parse a custom function function call
|
||||
///
|
||||
/// Expects `fn` to already be called.
|
||||
pub fn parse_custom_function(&mut self) -> ParseResult<Function> {
|
||||
pub async fn parse_custom_function(&mut self, ctx: &mut Stk) -> ParseResult<Function> {
|
||||
expected!(self, t!("::"));
|
||||
let mut name = self.next_token_value::<Ident>()?.0;
|
||||
while self.eat(t!("::")) {
|
||||
|
@ -26,7 +28,8 @@ impl Parser<'_> {
|
|||
break;
|
||||
}
|
||||
|
||||
args.push(self.parse_value_field()?);
|
||||
let arg = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
args.push(arg);
|
||||
|
||||
if !self.eat(t!(",")) {
|
||||
self.expect_closing_delimiter(t!(")"), start)?;
|
||||
|
@ -40,7 +43,7 @@ impl Parser<'_> {
|
|||
/// Parse a model invocation
|
||||
///
|
||||
/// Expects `ml` to already be called.
|
||||
pub fn parse_model(&mut self) -> ParseResult<Model> {
|
||||
pub async fn parse_model(&mut self, ctx: &mut Stk) -> ParseResult<Model> {
|
||||
expected!(self, t!("::"));
|
||||
let mut name = self.next_token_value::<Ident>()?.0;
|
||||
while self.eat(t!("::")) {
|
||||
|
@ -80,7 +83,8 @@ impl Parser<'_> {
|
|||
break;
|
||||
}
|
||||
|
||||
args.push(self.parse_value_field()?);
|
||||
let arg = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
args.push(arg);
|
||||
|
||||
if !self.eat(t!(",")) {
|
||||
self.expect_closing_delimiter(t!(")"), start)?;
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{Dir, Edges, Field, Fields, Graph, Ident, Idiom, Part, Table, Tables, Value},
|
||||
syn::v2::token::{t, Span, TokenKind},
|
||||
|
@ -10,10 +12,14 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects the next tokens to be of a field set.
|
||||
pub fn parse_fields(&mut self) -> ParseResult<Fields> {
|
||||
pub async fn parse_fields(&mut self, ctx: &mut Stk) -> ParseResult<Fields> {
|
||||
if self.eat(t!("VALUE")) {
|
||||
let expr = self.parse_value_field()?;
|
||||
let alias = self.eat(t!("AS")).then(|| self.parse_plain_idiom()).transpose()?;
|
||||
let expr = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
let alias = if self.eat(t!("AS")) {
|
||||
Some(self.parse_plain_idiom(ctx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Fields(
|
||||
vec![Field::Single {
|
||||
expr,
|
||||
|
@ -27,8 +33,12 @@ impl Parser<'_> {
|
|||
let field = if self.eat(t!("*")) {
|
||||
Field::All
|
||||
} else {
|
||||
let expr = self.parse_value_field()?;
|
||||
let alias = self.eat(t!("AS")).then(|| self.parse_plain_idiom()).transpose()?;
|
||||
let expr = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
let alias = if self.eat(t!("AS")) {
|
||||
Some(self.parse_plain_idiom(ctx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Field::Single {
|
||||
expr,
|
||||
alias,
|
||||
|
@ -44,10 +54,10 @@ impl Parser<'_> {
|
|||
}
|
||||
|
||||
/// Parses a list of idioms seperated by a `,`
|
||||
pub fn parse_idiom_list(&mut self) -> ParseResult<Vec<Idiom>> {
|
||||
let mut res = vec![self.parse_plain_idiom()?];
|
||||
pub async fn parse_idiom_list(&mut self, ctx: &mut Stk) -> ParseResult<Vec<Idiom>> {
|
||||
let mut res = vec![self.parse_plain_idiom(ctx).await?];
|
||||
while self.eat(t!(",")) {
|
||||
res.push(self.parse_plain_idiom()?);
|
||||
res.push(self.parse_plain_idiom(ctx).await?);
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -56,7 +66,11 @@ impl Parser<'_> {
|
|||
///
|
||||
/// This function differes from [`Parser::parse_remaining_value_idiom`] in how it handles graph
|
||||
/// parsing. Graphs inside a plain idioms will remain a normal graph production.
|
||||
pub(crate) fn parse_remaining_idiom(&mut self, start: Vec<Part>) -> ParseResult<Idiom> {
|
||||
pub(crate) async fn parse_remaining_idiom(
|
||||
&mut self,
|
||||
stk: &mut Stk,
|
||||
start: Vec<Part>,
|
||||
) -> ParseResult<Idiom> {
|
||||
let mut res = start;
|
||||
loop {
|
||||
match self.peek_kind() {
|
||||
|
@ -70,19 +84,23 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("[") => {
|
||||
let span = self.pop_peek().span;
|
||||
res.push(self.parse_bracket_part(span)?)
|
||||
let part = self.parse_bracket_part(stk, span).await?;
|
||||
res.push(part)
|
||||
}
|
||||
t!("->") => {
|
||||
self.pop_peek();
|
||||
res.push(Part::Graph(self.parse_graph(Dir::Out)?))
|
||||
let graph = stk.run(|stk| self.parse_graph(stk, Dir::Out)).await?;
|
||||
res.push(Part::Graph(graph))
|
||||
}
|
||||
t!("<->") => {
|
||||
self.pop_peek();
|
||||
res.push(Part::Graph(self.parse_graph(Dir::Both)?))
|
||||
let graph = stk.run(|stk| self.parse_graph(stk, Dir::Both)).await?;
|
||||
res.push(Part::Graph(graph))
|
||||
}
|
||||
t!("<-") => {
|
||||
self.pop_peek();
|
||||
res.push(Part::Graph(self.parse_graph(Dir::In)?))
|
||||
let graph = stk.run(|stk| self.parse_graph(stk, Dir::In)).await?;
|
||||
res.push(Part::Graph(graph))
|
||||
}
|
||||
t!("..") => {
|
||||
return Err(ParseError::new(
|
||||
|
@ -106,7 +124,11 @@ impl Parser<'_> {
|
|||
/// This function differes from [`Parser::parse_remaining_value_idiom`] in how it handles graph
|
||||
/// parsing. When parsing a idiom like production which can be a value, the initial start value
|
||||
/// might need to be changed to a Edge depending on what is parsed next.
|
||||
pub(crate) fn parse_remaining_value_idiom(&mut self, start: Vec<Part>) -> ParseResult<Value> {
|
||||
pub(crate) async fn parse_remaining_value_idiom(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
start: Vec<Part>,
|
||||
) -> ParseResult<Value> {
|
||||
let mut res = start;
|
||||
loop {
|
||||
match self.peek_kind() {
|
||||
|
@ -120,23 +142,24 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("[") => {
|
||||
let span = self.pop_peek().span;
|
||||
res.push(self.parse_bracket_part(span)?)
|
||||
let part = self.parse_bracket_part(ctx, span).await?;
|
||||
res.push(part)
|
||||
}
|
||||
t!("->") => {
|
||||
self.pop_peek();
|
||||
if let Some(x) = self.parse_graph_idiom(&mut res, Dir::Out)? {
|
||||
if let Some(x) = self.parse_graph_idiom(ctx, &mut res, Dir::Out).await? {
|
||||
return Ok(x);
|
||||
}
|
||||
}
|
||||
t!("<->") => {
|
||||
self.pop_peek();
|
||||
if let Some(x) = self.parse_graph_idiom(&mut res, Dir::Out)? {
|
||||
if let Some(x) = self.parse_graph_idiom(ctx, &mut res, Dir::Both).await? {
|
||||
return Ok(x);
|
||||
}
|
||||
}
|
||||
t!("<-") => {
|
||||
self.pop_peek();
|
||||
if let Some(x) = self.parse_graph_idiom(&mut res, Dir::Out)? {
|
||||
if let Some(x) = self.parse_graph_idiom(ctx, &mut res, Dir::In).await? {
|
||||
return Ok(x);
|
||||
}
|
||||
}
|
||||
|
@ -158,8 +181,13 @@ impl Parser<'_> {
|
|||
|
||||
/// Parse a graph idiom and possibly rewrite the starting value to be an edge whenever the
|
||||
/// parsed production matches `Thing -> Ident`.
|
||||
fn parse_graph_idiom(&mut self, res: &mut Vec<Part>, dir: Dir) -> ParseResult<Option<Value>> {
|
||||
let graph = self.parse_graph(dir)?;
|
||||
async fn parse_graph_idiom(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
res: &mut Vec<Part>,
|
||||
dir: Dir,
|
||||
) -> ParseResult<Option<Value>> {
|
||||
let graph = ctx.run(|ctx| self.parse_graph(ctx, dir)).await?;
|
||||
// the production `Thing Graph` is reparsed as an edge if the graph does not contain an
|
||||
// alias or a condition.
|
||||
if res.len() == 1 && graph.alias.is_none() && graph.cond.is_none() {
|
||||
|
@ -194,24 +222,27 @@ impl Parser<'_> {
|
|||
|
||||
/// Parse a idiom which can only start with a graph or an identifier.
|
||||
/// Other expressions are not allowed as start of this idiom
|
||||
pub fn parse_plain_idiom(&mut self) -> ParseResult<Idiom> {
|
||||
pub async fn parse_plain_idiom(&mut self, ctx: &mut Stk) -> ParseResult<Idiom> {
|
||||
let start = match self.peek_kind() {
|
||||
t!("->") => {
|
||||
self.pop_peek();
|
||||
Part::Graph(self.parse_graph(Dir::Out)?)
|
||||
let graph = ctx.run(|ctx| self.parse_graph(ctx, Dir::Out)).await?;
|
||||
Part::Graph(graph)
|
||||
}
|
||||
t!("<->") => {
|
||||
self.pop_peek();
|
||||
Part::Graph(self.parse_graph(Dir::Both)?)
|
||||
let graph = ctx.run(|ctx| self.parse_graph(ctx, Dir::Both)).await?;
|
||||
Part::Graph(graph)
|
||||
}
|
||||
t!("<-") => {
|
||||
self.pop_peek();
|
||||
Part::Graph(self.parse_graph(Dir::In)?)
|
||||
let graph = ctx.run(|ctx| self.parse_graph(ctx, Dir::In)).await?;
|
||||
Part::Graph(graph)
|
||||
}
|
||||
_ => Part::Field(self.next_token_value()?),
|
||||
};
|
||||
let start = vec![start];
|
||||
self.parse_remaining_idiom(start)
|
||||
self.parse_remaining_idiom(ctx, start).await
|
||||
}
|
||||
|
||||
/// Parse the part after the `.` in a idiom
|
||||
|
@ -226,7 +257,7 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
/// Parse the part after the `[` in a idiom
|
||||
pub fn parse_bracket_part(&mut self, start: Span) -> ParseResult<Part> {
|
||||
pub async fn parse_bracket_part(&mut self, ctx: &mut Stk, start: Span) -> ParseResult<Part> {
|
||||
let res = match self.peek_kind() {
|
||||
t!("*") => {
|
||||
self.pop_peek();
|
||||
|
@ -239,7 +270,8 @@ impl Parser<'_> {
|
|||
t!("123") => Part::Index(self.next_token_value()?),
|
||||
t!("?") | t!("WHERE") => {
|
||||
self.pop_peek();
|
||||
Part::Where(self.parse_value_field()?)
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
Part::Where(value)
|
||||
}
|
||||
t!("$param") => Part::Value(Value::Param(self.next_token_value()?)),
|
||||
TokenKind::Strand => Part::Value(Value::Strand(self.next_token_value()?)),
|
||||
|
@ -361,10 +393,10 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser state
|
||||
/// Expects to be at the start of a what list.
|
||||
pub fn parse_what_list(&mut self) -> ParseResult<Vec<Value>> {
|
||||
let mut res = vec![self.parse_what_value()?];
|
||||
pub async fn parse_what_list(&mut self, ctx: &mut Stk) -> ParseResult<Vec<Value>> {
|
||||
let mut res = vec![self.parse_what_value(ctx).await?];
|
||||
while self.eat(t!(",")) {
|
||||
res.push(self.parse_what_value()?)
|
||||
res.push(self.parse_what_value(ctx).await?)
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -373,8 +405,8 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser state
|
||||
/// Expects to be at the start of a what value
|
||||
pub fn parse_what_value(&mut self) -> ParseResult<Value> {
|
||||
let start = self.parse_what_primary()?;
|
||||
pub async fn parse_what_value(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
let start = self.parse_what_primary(ctx).await?;
|
||||
if start.can_start_idiom() && Self::continues_idiom(self.peek_kind()) {
|
||||
let start = match start {
|
||||
Value::Table(Table(x)) => vec![Part::Field(Ident(x))],
|
||||
|
@ -382,7 +414,7 @@ impl Parser<'_> {
|
|||
x => vec![Part::Start(x)],
|
||||
};
|
||||
|
||||
let idiom = self.parse_remaining_value_idiom(start)?;
|
||||
let idiom = self.parse_remaining_value_idiom(ctx, start).await?;
|
||||
Ok(idiom)
|
||||
} else {
|
||||
Ok(start)
|
||||
|
@ -394,7 +426,7 @@ impl Parser<'_> {
|
|||
/// # Parser state
|
||||
/// Expects to just have eaten a direction (e.g. <-, <->, or ->) and be at the field like part
|
||||
/// of the graph
|
||||
pub fn parse_graph(&mut self, dir: Dir) -> ParseResult<Graph> {
|
||||
pub async fn parse_graph(&mut self, ctx: &mut Stk, dir: Dir) -> ParseResult<Graph> {
|
||||
match self.peek_kind() {
|
||||
t!("?") => {
|
||||
self.pop_peek();
|
||||
|
@ -423,8 +455,12 @@ impl Parser<'_> {
|
|||
x => unexpected!(self, x, "`?` or an identifier"),
|
||||
};
|
||||
|
||||
let cond = self.try_parse_condition()?;
|
||||
let alias = self.eat(t!("AS")).then(|| self.parse_plain_idiom()).transpose()?;
|
||||
let cond = self.try_parse_condition(ctx).await?;
|
||||
let alias = if self.eat(t!("AS")) {
|
||||
Some(self.parse_plain_idiom(ctx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
self.expect_closing_delimiter(t!(")"), span)?;
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{Array, Ident, Object, Strand, Value},
|
||||
syn::v2::{
|
||||
|
@ -11,19 +13,19 @@ use crate::{
|
|||
use super::{ParseResult, Parser};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_json(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_json(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
let token = self.next();
|
||||
match token.kind {
|
||||
t!("NULL") => Ok(Value::Null),
|
||||
t!("true") => Ok(Value::Bool(true)),
|
||||
t!("false") => Ok(Value::Bool(false)),
|
||||
t!("{") => self.parse_json_object(token.span).map(Value::Object),
|
||||
t!("[") => self.parse_json_array(token.span).map(Value::Array),
|
||||
t!("{") => self.parse_json_object(ctx, token.span).await.map(Value::Object),
|
||||
t!("[") => self.parse_json_array(ctx, token.span).await.map(Value::Array),
|
||||
TokenKind::Duration => self.token_value(token).map(Value::Duration),
|
||||
TokenKind::DateTime => self.token_value(token).map(Value::Datetime),
|
||||
TokenKind::Strand => {
|
||||
if self.legacy_strands {
|
||||
self.parse_legacy_strand()
|
||||
self.parse_legacy_strand(ctx).await
|
||||
} else {
|
||||
Ok(Value::Strand(Strand(self.lexer.string.take().unwrap())))
|
||||
}
|
||||
|
@ -32,12 +34,12 @@ impl Parser<'_> {
|
|||
TokenKind::Uuid => self.token_value(token).map(Value::Uuid),
|
||||
_ => {
|
||||
let ident = self.token_value::<Ident>(token)?.0;
|
||||
self.parse_thing_from_ident(ident).map(Value::Thing)
|
||||
self.parse_thing_from_ident(ctx, ident).await.map(Value::Thing)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_json_object(&mut self, start: Span) -> ParseResult<Object> {
|
||||
async fn parse_json_object(&mut self, ctx: &mut Stk, start: Span) -> ParseResult<Object> {
|
||||
let mut obj = BTreeMap::new();
|
||||
loop {
|
||||
if self.eat(t!("}")) {
|
||||
|
@ -45,7 +47,7 @@ impl Parser<'_> {
|
|||
}
|
||||
let key = self.parse_object_key()?;
|
||||
expected!(self, t!(":"));
|
||||
let value = self.parse_json()?;
|
||||
let value = ctx.run(|ctx| self.parse_json(ctx)).await?;
|
||||
obj.insert(key, value);
|
||||
|
||||
if !self.eat(t!(",")) {
|
||||
|
@ -55,13 +57,13 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_json_array(&mut self, start: Span) -> ParseResult<Array> {
|
||||
async fn parse_json_array(&mut self, ctx: &mut Stk, start: Span) -> ParseResult<Array> {
|
||||
let mut array = Vec::new();
|
||||
loop {
|
||||
if self.eat(t!("]")) {
|
||||
return Ok(Array(array));
|
||||
}
|
||||
let value = self.parse_json()?;
|
||||
let value = ctx.run(|ctx| self.parse_json(ctx)).await?;
|
||||
array.push(value);
|
||||
|
||||
if !self.eat(t!(",")) {
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::Kind,
|
||||
syn::v2::{
|
||||
|
@ -13,14 +15,14 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// expects the first `<` to already be eaten
|
||||
pub fn parse_kind(&mut self, delim: Span) -> ParseResult<Kind> {
|
||||
let kind = self.parse_inner_kind()?;
|
||||
pub async fn parse_kind(&mut self, ctx: &mut Stk, delim: Span) -> ParseResult<Kind> {
|
||||
let kind = self.parse_inner_kind(ctx).await?;
|
||||
self.expect_closing_delimiter(t!(">"), delim)?;
|
||||
Ok(kind)
|
||||
}
|
||||
|
||||
/// Parse an inner kind, a kind without enclosing `<` `>`.
|
||||
pub fn parse_inner_kind(&mut self) -> ParseResult<Kind> {
|
||||
pub async fn parse_inner_kind(&mut self, ctx: &mut Stk) -> ParseResult<Kind> {
|
||||
match self.peek_kind() {
|
||||
t!("ANY") => {
|
||||
self.pop_peek();
|
||||
|
@ -30,11 +32,11 @@ impl Parser<'_> {
|
|||
self.pop_peek();
|
||||
|
||||
let delim = expected!(self, t!("<")).span;
|
||||
let mut first = self.parse_concrete_kind()?;
|
||||
let mut first = ctx.run(|ctx| self.parse_concrete_kind(ctx)).await?;
|
||||
if self.peek_kind() == t!("|") {
|
||||
let mut kind = vec![first];
|
||||
while self.eat(t!("|")) {
|
||||
kind.push(self.parse_concrete_kind()?);
|
||||
kind.push(ctx.run(|ctx| self.parse_concrete_kind(ctx)).await?);
|
||||
}
|
||||
first = Kind::Either(kind);
|
||||
}
|
||||
|
@ -42,11 +44,11 @@ impl Parser<'_> {
|
|||
Ok(Kind::Option(Box::new(first)))
|
||||
}
|
||||
_ => {
|
||||
let first = self.parse_concrete_kind()?;
|
||||
let first = ctx.run(|ctx| self.parse_concrete_kind(ctx)).await?;
|
||||
if self.peek_kind() == t!("|") {
|
||||
let mut kind = vec![first];
|
||||
while self.eat(t!("|")) {
|
||||
kind.push(self.parse_concrete_kind()?);
|
||||
kind.push(ctx.run(|ctx| self.parse_concrete_kind(ctx)).await?);
|
||||
}
|
||||
Ok(Kind::Either(kind))
|
||||
} else {
|
||||
|
@ -57,7 +59,7 @@ impl Parser<'_> {
|
|||
}
|
||||
|
||||
/// Parse a single kind which is not any, option, or either.
|
||||
fn parse_concrete_kind(&mut self) -> ParseResult<Kind> {
|
||||
async fn parse_concrete_kind(&mut self, ctx: &mut Stk) -> ParseResult<Kind> {
|
||||
match self.next().kind {
|
||||
t!("BOOL") => Ok(Kind::Bool),
|
||||
t!("NULL") => Ok(Kind::Null),
|
||||
|
@ -132,7 +134,7 @@ impl Parser<'_> {
|
|||
t!("ARRAY") => {
|
||||
let span = self.peek().span;
|
||||
if self.eat(t!("<")) {
|
||||
let kind = self.parse_inner_kind()?;
|
||||
let kind = ctx.run(|ctx| self.parse_inner_kind(ctx)).await?;
|
||||
let size = self.eat(t!(",")).then(|| self.next_token_value()).transpose()?;
|
||||
self.expect_closing_delimiter(t!(">"), span)?;
|
||||
Ok(Kind::Array(Box::new(kind), size))
|
||||
|
@ -143,7 +145,7 @@ impl Parser<'_> {
|
|||
t!("SET") => {
|
||||
let span = self.peek().span;
|
||||
if self.eat(t!("<")) {
|
||||
let kind = self.parse_inner_kind()?;
|
||||
let kind = ctx.run(|ctx| self.parse_inner_kind(ctx)).await?;
|
||||
let size = self.eat(t!(",")).then(|| self.next_token_value()).transpose()?;
|
||||
self.expect_closing_delimiter(t!(">"), span)?;
|
||||
Ok(Kind::Set(Box::new(kind), size))
|
||||
|
@ -175,12 +177,15 @@ impl Parser<'_> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use reblessive::Stack;
|
||||
|
||||
use super::*;
|
||||
use crate::sql::table::Table;
|
||||
|
||||
fn kind(i: &str) -> ParseResult<Kind> {
|
||||
let mut parser = Parser::new(i.as_bytes());
|
||||
parser.parse_inner_kind()
|
||||
let mut stack = Stack::new();
|
||||
stack.enter(|ctx| parser.parse_inner_kind(ctx)).finish()
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -69,7 +69,8 @@ macro_rules! expected {
|
|||
macro_rules! test_parse {
|
||||
($func:ident$( ( $($e:expr),* $(,)? ))? , $t:literal) => {{
|
||||
let mut parser = $crate::syn::v2::parser::Parser::new($t.as_bytes());
|
||||
parser.$func($($($e),*)*)
|
||||
let mut stack = reblessive::Stack::new();
|
||||
stack.enter(|ctx| parser.$func(ctx,$($($e),*)*)).finish()
|
||||
}};
|
||||
}
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ mod token_buffer;
|
|||
pub mod test;
|
||||
|
||||
pub use error::{IntErrorKind, ParseError, ParseErrorKind};
|
||||
use reblessive::Stk;
|
||||
|
||||
/// The result returned by most parser function.
|
||||
pub type ParseResult<T> = Result<T, ParseError>;
|
||||
|
@ -76,6 +77,8 @@ pub struct Parser<'a> {
|
|||
token_buffer: TokenBuffer<4>,
|
||||
table_as_field: bool,
|
||||
legacy_strands: bool,
|
||||
object_recursion: usize,
|
||||
query_recursion: usize,
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
|
@ -87,9 +90,26 @@ impl<'a> Parser<'a> {
|
|||
token_buffer: TokenBuffer::new(),
|
||||
table_as_field: false,
|
||||
legacy_strands: false,
|
||||
object_recursion: 100,
|
||||
query_recursion: 20,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_object_recursion_limit(mut self, limit: usize) -> Self {
|
||||
self.object_recursion = limit;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_query_recursion_limit(mut self, limit: usize) -> Self {
|
||||
self.query_recursion = limit;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_allow_legacy_strand(mut self, value: bool) -> Self {
|
||||
self.legacy_strands = value;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set whether to parse strands as legacy strands.
|
||||
pub fn allow_legacy_strand(&mut self, value: bool) {
|
||||
self.legacy_strands = value;
|
||||
|
@ -111,6 +131,8 @@ impl<'a> Parser<'a> {
|
|||
token_buffer: TokenBuffer::new(),
|
||||
legacy_strands: self.legacy_strands,
|
||||
table_as_field: false,
|
||||
object_recursion: self.object_recursion,
|
||||
query_recursion: self.query_recursion,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -217,24 +239,27 @@ impl<'a> Parser<'a> {
|
|||
/// Parse a full query.
|
||||
///
|
||||
/// This is the primary entry point of the parser.
|
||||
pub fn parse_query(&mut self) -> ParseResult<sql::Query> {
|
||||
let statements = self.parse_stmt_list()?;
|
||||
pub async fn parse_query(&mut self, ctx: &mut Stk) -> ParseResult<sql::Query> {
|
||||
let statements = self.parse_stmt_list(ctx).await?;
|
||||
Ok(sql::Query(statements))
|
||||
}
|
||||
|
||||
/// Parse a single statement.
|
||||
pub fn parse_statement(&mut self) -> ParseResult<sql::Statement> {
|
||||
self.parse_stmt()
|
||||
pub async fn parse_statement(&mut self, ctx: &mut Stk) -> ParseResult<sql::Statement> {
|
||||
self.parse_stmt(ctx).await
|
||||
}
|
||||
|
||||
/// Parse a possibly partial statement.
|
||||
///
|
||||
/// This will try to parse a statement if a full statement can be parsed from the buffer parser
|
||||
/// is operating on.
|
||||
pub fn parse_partial_statement(&mut self) -> PartialResult<sql::Statement> {
|
||||
pub async fn parse_partial_statement(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
) -> PartialResult<sql::Statement> {
|
||||
while self.eat(t!(";")) {}
|
||||
|
||||
let res = self.parse_stmt();
|
||||
let res = ctx.run(|ctx| self.parse_stmt(ctx)).await;
|
||||
match res {
|
||||
Err(ParseError {
|
||||
kind: ParseErrorKind::UnexpectedEof {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use geo_types::{LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon};
|
||||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{Block, Geometry, Object, Strand, Value},
|
||||
|
@ -16,7 +17,11 @@ impl Parser<'_> {
|
|||
/// Parse an production which starts with an `{`
|
||||
///
|
||||
/// Either a block statemnt, a object or geometry.
|
||||
pub(super) fn parse_object_like(&mut self, start: Span) -> ParseResult<Value> {
|
||||
pub(super) async fn parse_object_like(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
start: Span,
|
||||
) -> ParseResult<Value> {
|
||||
if self.eat(t!("}")) {
|
||||
// empty object, just return
|
||||
return Ok(Value::Object(Object::default()));
|
||||
|
@ -24,18 +29,18 @@ impl Parser<'_> {
|
|||
|
||||
// Check first if it can be an object.
|
||||
if self.peek_token_at(1).kind == t!(":") {
|
||||
return self.parse_object_or_geometry(start);
|
||||
return self.parse_object_or_geometry(ctx, start).await;
|
||||
}
|
||||
|
||||
// not an object so instead parse as a block.
|
||||
self.parse_block(start).map(Box::new).map(Value::Block)
|
||||
self.parse_block(ctx, start).await.map(Box::new).map(Value::Block)
|
||||
}
|
||||
|
||||
/// Parse a production starting with an `{` as either an object or a geometry.
|
||||
///
|
||||
/// This function tries to match an object to an geometry like object and if it is unable
|
||||
/// fallsback to parsing normal objects.
|
||||
fn parse_object_or_geometry(&mut self, start: Span) -> ParseResult<Value> {
|
||||
async fn parse_object_or_geometry(&mut self, ctx: &mut Stk, start: Span) -> ParseResult<Value> {
|
||||
// empty object was already matched previously so next must be a key.
|
||||
let key = self.parse_object_key()?;
|
||||
expected!(self, t!(":"));
|
||||
|
@ -55,48 +60,70 @@ impl Parser<'_> {
|
|||
//
|
||||
// we can unwrap strand since we just matched it to not be an err.
|
||||
self.parse_geometry_after_type(
|
||||
ctx,
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_point,
|
||||
|x| Value::Geometry(Geometry::Point(x)),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Ok("LineString") => {
|
||||
self.parse_geometry_after_type(
|
||||
ctx,
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_line,
|
||||
|x| Value::Geometry(Geometry::Line(x)),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Ok("Polygon") => {
|
||||
self.parse_geometry_after_type(
|
||||
ctx,
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_polygon,
|
||||
|x| Value::Geometry(Geometry::Polygon(x)),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Ok("MultiPoint") => {
|
||||
self.parse_geometry_after_type(
|
||||
ctx,
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_multipoint,
|
||||
|x| Value::Geometry(Geometry::MultiPoint(x)),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Ok("MultiLineString") => {
|
||||
self.parse_geometry_after_type(
|
||||
ctx,
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_multiline,
|
||||
|x| Value::Geometry(Geometry::MultiLine(x)),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Ok("MultiPolygon") => {
|
||||
self.parse_geometry_after_type(
|
||||
ctx,
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_multipolygon,
|
||||
|x| Value::Geometry(Geometry::MultiPolygon(x)),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Ok("LineString") => self.parse_geometry_after_type(
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_line,
|
||||
|x| Value::Geometry(Geometry::Line(x)),
|
||||
),
|
||||
Ok("Polygon") => self.parse_geometry_after_type(
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_polygon,
|
||||
|x| Value::Geometry(Geometry::Polygon(x)),
|
||||
),
|
||||
Ok("MultiPoint") => self.parse_geometry_after_type(
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_multipoint,
|
||||
|x| Value::Geometry(Geometry::MultiPoint(x)),
|
||||
),
|
||||
Ok("MultiLineString") => self.parse_geometry_after_type(
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_multiline,
|
||||
|x| Value::Geometry(Geometry::MultiLine(x)),
|
||||
),
|
||||
Ok("MultiPolygon") => self.parse_geometry_after_type(
|
||||
start,
|
||||
key,
|
||||
strand.unwrap(),
|
||||
Self::to_multipolygon,
|
||||
|x| Value::Geometry(Geometry::MultiPolygon(x)),
|
||||
),
|
||||
Ok("GeometryCollection") => {
|
||||
self.next();
|
||||
let strand = strand.unwrap();
|
||||
|
@ -104,9 +131,11 @@ impl Parser<'_> {
|
|||
// missing next field, not a geometry.
|
||||
return self
|
||||
.parse_object_from_map(
|
||||
ctx,
|
||||
BTreeMap::from([(key, Value::Strand(strand))]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object);
|
||||
}
|
||||
let coord_key = self.parse_object_key()?;
|
||||
|
@ -115,13 +144,15 @@ impl Parser<'_> {
|
|||
// invalid field key, not a Geometry
|
||||
return self
|
||||
.parse_object_from_key(
|
||||
ctx,
|
||||
coord_key,
|
||||
BTreeMap::from([(key, Value::Strand(strand))]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object);
|
||||
}
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
let comma = self.eat(t!(","));
|
||||
if !self.eat(t!("}")) {
|
||||
if !comma {
|
||||
|
@ -138,12 +169,14 @@ impl Parser<'_> {
|
|||
// A comma and then no brace. more then two fields, not a geometry.
|
||||
return self
|
||||
.parse_object_from_map(
|
||||
ctx,
|
||||
BTreeMap::from([
|
||||
(key, Value::Strand(strand)),
|
||||
(coord_key, value),
|
||||
]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object);
|
||||
}
|
||||
|
||||
|
@ -185,19 +218,24 @@ impl Parser<'_> {
|
|||
)]))))
|
||||
} else {
|
||||
self.parse_object_from_map(
|
||||
ctx,
|
||||
BTreeMap::from([(key, Value::Strand(strand.unwrap()))]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object)
|
||||
}
|
||||
}
|
||||
_ => self.parse_object_from_key(key, BTreeMap::new(), start).map(Value::Object),
|
||||
_ => self
|
||||
.parse_object_from_key(ctx, key, BTreeMap::new(), start)
|
||||
.await
|
||||
.map(Value::Object),
|
||||
}
|
||||
}
|
||||
"coordinates" => {
|
||||
// found coordinates field, next must be a coordinates value but we don't know
|
||||
// which until we match type.
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
if !self.eat(t!(",")) {
|
||||
// no comma object must end early.
|
||||
self.expect_closing_delimiter(t!("}"), start)?;
|
||||
|
@ -214,7 +252,8 @@ impl Parser<'_> {
|
|||
if type_key != "type" {
|
||||
// not the right field, return object.
|
||||
return self
|
||||
.parse_object_from_key(type_key, BTreeMap::from([(key, value)]), start)
|
||||
.parse_object_from_key(ctx, type_key, BTreeMap::from([(key, value)]), start)
|
||||
.await
|
||||
.map(Value::Object);
|
||||
}
|
||||
let peek = self.peek();
|
||||
|
@ -285,7 +324,7 @@ impl Parser<'_> {
|
|||
(ate_comma, Value::Strand(strand.unwrap()))
|
||||
}
|
||||
_ => {
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
(self.eat(t!(",")), value)
|
||||
}
|
||||
};
|
||||
|
@ -300,13 +339,15 @@ impl Parser<'_> {
|
|||
]))));
|
||||
}
|
||||
self.parse_object_from_map(
|
||||
ctx,
|
||||
BTreeMap::from([(key, value), (type_key, type_value)]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object)
|
||||
}
|
||||
"geometries" => {
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
if !self.eat(t!(",")) {
|
||||
self.expect_closing_delimiter(t!("}"), start)?;
|
||||
return Ok(Value::Object(Object(BTreeMap::from([(key, value)]))));
|
||||
|
@ -315,7 +356,8 @@ impl Parser<'_> {
|
|||
expected!(self, t!(":"));
|
||||
if type_key != "type" {
|
||||
return self
|
||||
.parse_object_from_key(type_key, BTreeMap::from([(key, value)]), start)
|
||||
.parse_object_from_key(ctx, type_key, BTreeMap::from([(key, value)]), start)
|
||||
.await
|
||||
.map(Value::Object);
|
||||
}
|
||||
let peek = self.peek();
|
||||
|
@ -346,7 +388,7 @@ impl Parser<'_> {
|
|||
}
|
||||
(ate_comma, Value::Strand(strand.unwrap()))
|
||||
} else {
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
(self.eat(t!(",")), value)
|
||||
};
|
||||
|
||||
|
@ -358,17 +400,23 @@ impl Parser<'_> {
|
|||
]))));
|
||||
}
|
||||
self.parse_object_from_map(
|
||||
ctx,
|
||||
BTreeMap::from([(key, value), (type_key, type_value)]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object)
|
||||
}
|
||||
_ => self.parse_object_from_key(key, BTreeMap::new(), start).map(Value::Object),
|
||||
_ => self
|
||||
.parse_object_from_key(ctx, key, BTreeMap::new(), start)
|
||||
.await
|
||||
.map(Value::Object),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_geometry_after_type<F, Fm, R>(
|
||||
async fn parse_geometry_after_type<F, Fm, R>(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
start: Span,
|
||||
key: String,
|
||||
strand: Strand,
|
||||
|
@ -392,13 +440,15 @@ impl Parser<'_> {
|
|||
// next field was not correct, fallback to parsing plain object.
|
||||
return self
|
||||
.parse_object_from_key(
|
||||
ctx,
|
||||
coord_key,
|
||||
BTreeMap::from([(key, Value::Strand(strand))]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object);
|
||||
}
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
let comma = self.eat(t!(","));
|
||||
if !self.eat(t!("}")) {
|
||||
// the object didn't end, either an error or not a geometry.
|
||||
|
@ -414,9 +464,11 @@ impl Parser<'_> {
|
|||
|
||||
return self
|
||||
.parse_object_from_map(
|
||||
ctx,
|
||||
BTreeMap::from([(key, Value::Strand(strand)), (coord_key, value)]),
|
||||
start,
|
||||
)
|
||||
.await
|
||||
.map(Value::Object);
|
||||
}
|
||||
|
||||
|
@ -507,19 +559,20 @@ impl Parser<'_> {
|
|||
Some(Point::from((a.clone().try_into().ok()?, b.clone().try_into().ok()?)))
|
||||
}
|
||||
|
||||
fn parse_object_from_key(
|
||||
async fn parse_object_from_key(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
key: String,
|
||||
mut map: BTreeMap<String, Value>,
|
||||
start: Span,
|
||||
) -> ParseResult<Object> {
|
||||
let v = self.parse_value_field()?;
|
||||
let v = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
map.insert(key, v);
|
||||
if !self.eat(t!(",")) {
|
||||
self.expect_closing_delimiter(t!("}"), start)?;
|
||||
return Ok(Object(map));
|
||||
}
|
||||
self.parse_object_from_map(map, start)
|
||||
self.parse_object_from_map(ctx, map, start).await
|
||||
}
|
||||
|
||||
/// Parses an object.
|
||||
|
@ -528,12 +581,13 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser state
|
||||
/// Expects the first `{` to already have been eaten.
|
||||
pub(super) fn parse_object(&mut self, start: Span) -> ParseResult<Object> {
|
||||
self.parse_object_from_map(BTreeMap::new(), start)
|
||||
pub(super) async fn parse_object(&mut self, ctx: &mut Stk, start: Span) -> ParseResult<Object> {
|
||||
self.parse_object_from_map(ctx, BTreeMap::new(), start).await
|
||||
}
|
||||
|
||||
fn parse_object_from_map(
|
||||
async fn parse_object_from_map(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
mut map: BTreeMap<String, Value>,
|
||||
start: Span,
|
||||
) -> ParseResult<Object> {
|
||||
|
@ -542,7 +596,7 @@ impl Parser<'_> {
|
|||
return Ok(Object(map));
|
||||
}
|
||||
|
||||
let (key, value) = self.parse_object_entry()?;
|
||||
let (key, value) = self.parse_object_entry(ctx).await?;
|
||||
// TODO: Error on duplicate key?
|
||||
map.insert(key, value);
|
||||
|
||||
|
@ -558,7 +612,7 @@ impl Parser<'_> {
|
|||
/// # Parser State
|
||||
/// Expects the starting `{` to have already been eaten and its span to be handed to this
|
||||
/// functions as the `start` parameter.
|
||||
pub(super) fn parse_block(&mut self, start: Span) -> ParseResult<Block> {
|
||||
pub(super) async fn parse_block(&mut self, ctx: &mut Stk, start: Span) -> ParseResult<Block> {
|
||||
let mut statements = Vec::new();
|
||||
loop {
|
||||
while self.eat(t!(";")) {}
|
||||
|
@ -566,7 +620,7 @@ impl Parser<'_> {
|
|||
break;
|
||||
}
|
||||
|
||||
let stmt = self.parse_entry()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_entry(ctx)).await?;
|
||||
statements.push(stmt);
|
||||
if !self.eat(t!(";")) {
|
||||
self.expect_closing_delimiter(t!("}"), start)?;
|
||||
|
@ -578,10 +632,10 @@ impl Parser<'_> {
|
|||
|
||||
/// Parse a single entry in the object, i.e. `field: value + 1` in the object `{ field: value +
|
||||
/// 1 }`
|
||||
fn parse_object_entry(&mut self) -> ParseResult<(String, Value)> {
|
||||
async fn parse_object_entry(&mut self, ctx: &mut Stk) -> ParseResult<(String, Value)> {
|
||||
let text = self.parse_object_key()?;
|
||||
expected!(self, t!(":"));
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
Ok((text, value))
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use geo::Point;
|
||||
use reblessive::Stk;
|
||||
|
||||
use super::{ParseResult, Parser};
|
||||
use crate::{
|
||||
|
@ -20,7 +21,7 @@ impl Parser<'_> {
|
|||
/// Parse a what primary.
|
||||
///
|
||||
/// What's are values which are more restricted in what expressions they can contain.
|
||||
pub fn parse_what_primary(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_what_primary(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
match self.peek_kind() {
|
||||
TokenKind::Duration => {
|
||||
let duration = self.next_token_value()?;
|
||||
|
@ -32,11 +33,13 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("r\"") => {
|
||||
self.pop_peek();
|
||||
Ok(Value::Thing(self.parse_record_string(true)?))
|
||||
let thing = self.parse_record_string(ctx, true).await?;
|
||||
Ok(Value::Thing(thing))
|
||||
}
|
||||
t!("r'") => {
|
||||
self.pop_peek();
|
||||
Ok(Value::Thing(self.parse_record_string(false)?))
|
||||
let thing = self.parse_record_string(ctx, false).await?;
|
||||
Ok(Value::Thing(thing))
|
||||
}
|
||||
t!("$param") => {
|
||||
let param = self.next_token_value()?;
|
||||
|
@ -44,22 +47,25 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("FUNCTION") => {
|
||||
self.pop_peek();
|
||||
Ok(Value::Function(Box::new(self.parse_script()?)))
|
||||
let func = self.parse_script(ctx).await?;
|
||||
Ok(Value::Function(Box::new(func)))
|
||||
}
|
||||
t!("IF") => {
|
||||
let stmt = self.parse_if_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_if_stmt(ctx)).await?;
|
||||
Ok(Value::Subquery(Box::new(Subquery::Ifelse(stmt))))
|
||||
}
|
||||
t!("(") => {
|
||||
let token = self.pop_peek();
|
||||
self.parse_inner_subquery(Some(token.span)).map(|x| Value::Subquery(Box::new(x)))
|
||||
self.parse_inner_subquery(ctx, Some(token.span))
|
||||
.await
|
||||
.map(|x| Value::Subquery(Box::new(x)))
|
||||
}
|
||||
t!("<") => {
|
||||
self.pop_peek();
|
||||
expected!(self, t!("FUTURE"));
|
||||
expected!(self, t!(">"));
|
||||
let start = expected!(self, t!("{")).span;
|
||||
let block = self.parse_block(start)?;
|
||||
let block = self.parse_block(ctx, start).await?;
|
||||
Ok(Value::Future(Box::new(crate::sql::Future(block))))
|
||||
}
|
||||
t!("|") => {
|
||||
|
@ -78,9 +84,11 @@ impl Parser<'_> {
|
|||
| t!("DELETE")
|
||||
| t!("RELATE")
|
||||
| t!("DEFINE")
|
||||
| t!("REMOVE") => self.parse_inner_subquery(None).map(|x| Value::Subquery(Box::new(x))),
|
||||
t!("fn") => self.parse_custom_function().map(|x| Value::Function(Box::new(x))),
|
||||
t!("ml") => self.parse_model().map(|x| Value::Model(Box::new(x))),
|
||||
| t!("REMOVE") => {
|
||||
self.parse_inner_subquery(ctx, None).await.map(|x| Value::Subquery(Box::new(x)))
|
||||
}
|
||||
t!("fn") => self.parse_custom_function(ctx).await.map(|x| Value::Function(Box::new(x))),
|
||||
t!("ml") => self.parse_model(ctx).await.map(|x| Value::Model(Box::new(x))),
|
||||
x => {
|
||||
if !self.peek_can_be_ident() {
|
||||
unexpected!(self, x, "a value")
|
||||
|
@ -88,10 +96,10 @@ impl Parser<'_> {
|
|||
|
||||
let token = self.next();
|
||||
match self.peek_kind() {
|
||||
t!("::") | t!("(") => self.parse_builtin(token.span),
|
||||
t!("::") | t!("(") => self.parse_builtin(ctx, token.span).await,
|
||||
t!(":") => {
|
||||
let str = self.token_value::<Ident>(token)?.0;
|
||||
self.parse_thing_or_range(str)
|
||||
self.parse_thing_or_range(ctx, str).await
|
||||
}
|
||||
x => {
|
||||
if x.has_data() {
|
||||
|
@ -108,7 +116,7 @@ impl Parser<'_> {
|
|||
}
|
||||
|
||||
/// Parse an expressions
|
||||
pub fn parse_idiom_expression(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_idiom_expression(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
let token = self.peek();
|
||||
let value = match token.kind {
|
||||
t!("NONE") => {
|
||||
|
@ -133,13 +141,13 @@ impl Parser<'_> {
|
|||
expected!(self, t!("FUTURE"));
|
||||
self.expect_closing_delimiter(t!(">"), token.span)?;
|
||||
let next = expected!(self, t!("{")).span;
|
||||
let block = self.parse_block(next)?;
|
||||
let block = self.parse_block(ctx, next).await?;
|
||||
return Ok(Value::Future(Box::new(crate::sql::Future(block))));
|
||||
}
|
||||
TokenKind::Strand => {
|
||||
self.pop_peek();
|
||||
if self.legacy_strands {
|
||||
return self.parse_legacy_strand();
|
||||
return self.parse_legacy_strand(ctx).await;
|
||||
} else {
|
||||
let strand = self.token_value(token)?;
|
||||
return Ok(Value::Strand(strand));
|
||||
|
@ -167,11 +175,13 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("r\"") => {
|
||||
self.pop_peek();
|
||||
Value::Thing(self.parse_record_string(true)?)
|
||||
let thing = self.parse_record_string(ctx, true).await?;
|
||||
Value::Thing(thing)
|
||||
}
|
||||
t!("r'") => {
|
||||
self.pop_peek();
|
||||
Value::Thing(self.parse_record_string(false)?)
|
||||
let thing = self.parse_record_string(ctx, false).await?;
|
||||
Value::Thing(thing)
|
||||
}
|
||||
t!("$param") => {
|
||||
self.pop_peek();
|
||||
|
@ -180,30 +190,31 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("FUNCTION") => {
|
||||
self.pop_peek();
|
||||
Value::Function(Box::new(self.parse_script()?))
|
||||
let script = self.parse_script(ctx).await?;
|
||||
Value::Function(Box::new(script))
|
||||
}
|
||||
t!("->") => {
|
||||
self.pop_peek();
|
||||
let graph = self.parse_graph(Dir::Out)?;
|
||||
let graph = ctx.run(|ctx| self.parse_graph(ctx, Dir::Out)).await?;
|
||||
Value::Idiom(Idiom(vec![Part::Graph(graph)]))
|
||||
}
|
||||
t!("<->") => {
|
||||
self.pop_peek();
|
||||
let graph = self.parse_graph(Dir::Both)?;
|
||||
let graph = ctx.run(|ctx| self.parse_graph(ctx, Dir::Both)).await?;
|
||||
Value::Idiom(Idiom(vec![Part::Graph(graph)]))
|
||||
}
|
||||
t!("<-") => {
|
||||
self.pop_peek();
|
||||
let graph = self.parse_graph(Dir::In)?;
|
||||
let graph = ctx.run(|ctx| self.parse_graph(ctx, Dir::In)).await?;
|
||||
Value::Idiom(Idiom(vec![Part::Graph(graph)]))
|
||||
}
|
||||
t!("[") => {
|
||||
self.pop_peek();
|
||||
self.parse_array(token.span).map(Value::Array)?
|
||||
self.parse_array(ctx, token.span).await.map(Value::Array)?
|
||||
}
|
||||
t!("{") => {
|
||||
self.pop_peek();
|
||||
self.parse_object_like(token.span)?
|
||||
self.parse_object_like(ctx, token.span).await?
|
||||
}
|
||||
t!("|") => {
|
||||
self.pop_peek();
|
||||
|
@ -211,12 +222,12 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("IF") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_if_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_if_stmt(ctx)).await?;
|
||||
Value::Subquery(Box::new(Subquery::Ifelse(stmt)))
|
||||
}
|
||||
t!("(") => {
|
||||
self.pop_peek();
|
||||
self.parse_inner_subquery_or_coordinate(token.span)?
|
||||
self.parse_inner_subquery_or_coordinate(ctx, token.span).await?
|
||||
}
|
||||
t!("/") => {
|
||||
self.pop_peek();
|
||||
|
@ -230,22 +241,24 @@ impl Parser<'_> {
|
|||
| t!("DELETE")
|
||||
| t!("RELATE")
|
||||
| t!("DEFINE")
|
||||
| t!("REMOVE") => self.parse_inner_subquery(None).map(|x| Value::Subquery(Box::new(x)))?,
|
||||
| t!("REMOVE") => {
|
||||
self.parse_inner_subquery(ctx, None).await.map(|x| Value::Subquery(Box::new(x)))?
|
||||
}
|
||||
t!("fn") => {
|
||||
self.pop_peek();
|
||||
self.parse_custom_function().map(|x| Value::Function(Box::new(x)))?
|
||||
self.parse_custom_function(ctx).await.map(|x| Value::Function(Box::new(x)))?
|
||||
}
|
||||
t!("ml") => {
|
||||
self.pop_peek();
|
||||
self.parse_model().map(|x| Value::Model(Box::new(x)))?
|
||||
self.parse_model(ctx).await.map(|x| Value::Model(Box::new(x)))?
|
||||
}
|
||||
_ => {
|
||||
self.pop_peek();
|
||||
match self.peek_kind() {
|
||||
t!("::") | t!("(") => self.parse_builtin(token.span)?,
|
||||
t!("::") | t!("(") => self.parse_builtin(ctx, token.span).await?,
|
||||
t!(":") => {
|
||||
let str = self.token_value::<Ident>(token)?.0;
|
||||
self.parse_thing_or_range(str)?
|
||||
self.parse_thing_or_range(ctx, str).await?
|
||||
}
|
||||
x => {
|
||||
if x.has_data() {
|
||||
|
@ -268,11 +281,11 @@ impl Parser<'_> {
|
|||
| Value::Bool(_)
|
||||
| Value::Future(_)
|
||||
| Value::Strand(_) => unreachable!(),
|
||||
Value::Idiom(Idiom(x)) => self.parse_remaining_value_idiom(x),
|
||||
Value::Idiom(Idiom(x)) => self.parse_remaining_value_idiom(ctx, x).await,
|
||||
Value::Table(Table(x)) => {
|
||||
self.parse_remaining_value_idiom(vec![Part::Field(Ident(x))])
|
||||
self.parse_remaining_value_idiom(ctx, vec![Part::Field(Ident(x))]).await
|
||||
}
|
||||
x => self.parse_remaining_value_idiom(vec![Part::Start(x)]),
|
||||
x => self.parse_remaining_value_idiom(ctx, vec![Part::Start(x)]).await,
|
||||
}
|
||||
} else {
|
||||
Ok(value)
|
||||
|
@ -283,13 +296,15 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser state
|
||||
/// Expects the starting `[` to already be eaten and its span passed as an argument.
|
||||
pub fn parse_array(&mut self, start: Span) -> ParseResult<Array> {
|
||||
pub async fn parse_array(&mut self, ctx: &mut Stk, start: Span) -> ParseResult<Array> {
|
||||
let mut values = Vec::new();
|
||||
loop {
|
||||
if self.eat(t!("]")) {
|
||||
break;
|
||||
}
|
||||
values.push(self.parse_value_field()?);
|
||||
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
values.push(value);
|
||||
|
||||
if !self.eat(t!(",")) {
|
||||
self.expect_closing_delimiter(t!("]"), start)?;
|
||||
|
@ -317,59 +332,63 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn parse_full_subquery(&mut self) -> ParseResult<Subquery> {
|
||||
pub async fn parse_full_subquery(&mut self, ctx: &mut Stk) -> ParseResult<Subquery> {
|
||||
let peek = self.peek();
|
||||
match peek.kind {
|
||||
t!("(") => {
|
||||
self.pop_peek();
|
||||
dbg!("called");
|
||||
self.parse_inner_subquery(Some(peek.span))
|
||||
self.parse_inner_subquery(ctx, Some(peek.span)).await
|
||||
}
|
||||
t!("IF") => {
|
||||
self.pop_peek();
|
||||
let if_stmt = self.parse_if_stmt()?;
|
||||
let if_stmt = ctx.run(|ctx| self.parse_if_stmt(ctx)).await?;
|
||||
Ok(Subquery::Ifelse(if_stmt))
|
||||
}
|
||||
_ => self.parse_inner_subquery(None),
|
||||
_ => self.parse_inner_subquery(ctx, None).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_inner_subquery_or_coordinate(&mut self, start: Span) -> ParseResult<Value> {
|
||||
pub async fn parse_inner_subquery_or_coordinate(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
start: Span,
|
||||
) -> ParseResult<Value> {
|
||||
let peek = self.peek();
|
||||
let res = match peek.kind {
|
||||
t!("RETURN") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_return_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_return_stmt(ctx)).await?;
|
||||
Subquery::Output(stmt)
|
||||
}
|
||||
t!("SELECT") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_select_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_select_stmt(ctx)).await?;
|
||||
Subquery::Select(stmt)
|
||||
}
|
||||
t!("CREATE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_create_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_create_stmt(ctx)).await?;
|
||||
Subquery::Create(stmt)
|
||||
}
|
||||
t!("UPDATE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_update_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_update_stmt(ctx)).await?;
|
||||
Subquery::Update(stmt)
|
||||
}
|
||||
t!("DELETE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_delete_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_delete_stmt(ctx)).await?;
|
||||
Subquery::Delete(stmt)
|
||||
}
|
||||
t!("RELATE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_relate_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_relate_stmt(ctx)).await?;
|
||||
Subquery::Relate(stmt)
|
||||
}
|
||||
t!("DEFINE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_define_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_define_stmt(ctx)).await?;
|
||||
Subquery::Define(stmt)
|
||||
}
|
||||
t!("REMOVE") => {
|
||||
|
@ -407,7 +426,7 @@ impl Parser<'_> {
|
|||
}
|
||||
self.lexer.string = Some(number_value);
|
||||
}
|
||||
Subquery::Value(self.parse_value_field()?)
|
||||
Subquery::Value(ctx.run(|ctx| self.parse_value_field(ctx)).await?)
|
||||
}
|
||||
TokenKind::Number(kind) => {
|
||||
// handle possible coordinate in the shape of ([-+]?number,[-+]?number)
|
||||
|
@ -439,10 +458,10 @@ impl Parser<'_> {
|
|||
return Ok(Value::Geometry(Geometry::Point(Point::from((a, b)))));
|
||||
}
|
||||
self.lexer.string = Some(number_value);
|
||||
Subquery::Value(self.parse_value_field()?)
|
||||
Subquery::Value(ctx.run(|ctx| self.parse_value_field(ctx)).await?)
|
||||
}
|
||||
_ => {
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
Subquery::Value(value)
|
||||
}
|
||||
};
|
||||
|
@ -466,42 +485,46 @@ impl Parser<'_> {
|
|||
Ok(Value::Subquery(Box::new(res)))
|
||||
}
|
||||
|
||||
pub fn parse_inner_subquery(&mut self, start: Option<Span>) -> ParseResult<Subquery> {
|
||||
pub async fn parse_inner_subquery(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
start: Option<Span>,
|
||||
) -> ParseResult<Subquery> {
|
||||
let peek = self.peek();
|
||||
let res = match peek.kind {
|
||||
t!("RETURN") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_return_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_return_stmt(ctx)).await?;
|
||||
Subquery::Output(stmt)
|
||||
}
|
||||
t!("SELECT") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_select_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_select_stmt(ctx)).await?;
|
||||
Subquery::Select(stmt)
|
||||
}
|
||||
t!("CREATE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_create_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_create_stmt(ctx)).await?;
|
||||
Subquery::Create(stmt)
|
||||
}
|
||||
t!("UPDATE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_update_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_update_stmt(ctx)).await?;
|
||||
Subquery::Update(stmt)
|
||||
}
|
||||
t!("DELETE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_delete_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_delete_stmt(ctx)).await?;
|
||||
Subquery::Delete(stmt)
|
||||
}
|
||||
t!("RELATE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_relate_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_relate_stmt(ctx)).await?;
|
||||
Subquery::Relate(stmt)
|
||||
}
|
||||
t!("DEFINE") => {
|
||||
self.pop_peek();
|
||||
let stmt = self.parse_define_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_define_stmt(ctx)).await?;
|
||||
Subquery::Define(stmt)
|
||||
}
|
||||
t!("REMOVE") => {
|
||||
|
@ -510,7 +533,7 @@ impl Parser<'_> {
|
|||
Subquery::Remove(stmt)
|
||||
}
|
||||
_ => {
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
Subquery::Value(value)
|
||||
}
|
||||
};
|
||||
|
@ -555,9 +578,9 @@ impl Parser<'_> {
|
|||
|
||||
/// Parses a strand with legacy rules, parsing to a record id, datetime or uuid if the string
|
||||
/// matches.
|
||||
pub fn parse_legacy_strand(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_legacy_strand(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
let text = self.lexer.string.take().unwrap();
|
||||
if let Ok(x) = Parser::new(text.as_bytes()).parse_thing() {
|
||||
if let Ok(x) = Parser::new(text.as_bytes()).parse_thing(ctx).await {
|
||||
return Ok(Value::Thing(x));
|
||||
}
|
||||
if let Ok(x) = Lexer::new(text.as_bytes()).lex_only_datetime() {
|
||||
|
@ -569,7 +592,7 @@ impl Parser<'_> {
|
|||
Ok(Value::Strand(Strand(text)))
|
||||
}
|
||||
|
||||
fn parse_script(&mut self) -> ParseResult<Function> {
|
||||
async fn parse_script(&mut self, ctx: &mut Stk) -> ParseResult<Function> {
|
||||
let start = expected!(self, t!("(")).span;
|
||||
let mut args = Vec::new();
|
||||
loop {
|
||||
|
@ -577,7 +600,8 @@ impl Parser<'_> {
|
|||
break;
|
||||
}
|
||||
|
||||
args.push(self.parse_value_field()?);
|
||||
let arg = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
args.push(arg);
|
||||
|
||||
if !self.eat(t!(",")) {
|
||||
self.expect_closing_delimiter(t!(")"), start)?;
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{statements::CreateStatement, Values},
|
||||
syn::v2::{
|
||||
|
@ -7,11 +9,11 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_create_stmt(&mut self) -> ParseResult<CreateStatement> {
|
||||
pub async fn parse_create_stmt(&mut self, ctx: &mut Stk) -> ParseResult<CreateStatement> {
|
||||
let only = self.eat(t!("ONLY"));
|
||||
let what = Values(self.parse_what_list()?);
|
||||
let data = self.try_parse_data()?;
|
||||
let output = self.try_parse_output()?;
|
||||
let what = Values(self.parse_what_list(ctx).await?);
|
||||
let data = self.try_parse_data(ctx).await?;
|
||||
let output = self.try_parse_output(ctx).await?;
|
||||
let timeout = self.try_parse_timeout()?;
|
||||
let parallel = self.eat(t!("PARALLEL"));
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use crate::sql::{table_type, TableType};
|
||||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{
|
||||
filter::Filter,
|
||||
|
@ -9,8 +10,9 @@ use crate::{
|
|||
DefineNamespaceStatement, DefineParamStatement, DefineScopeStatement, DefineStatement,
|
||||
DefineTableStatement, DefineTokenStatement, DefineUserStatement,
|
||||
},
|
||||
table_type,
|
||||
tokenizer::Tokenizer,
|
||||
Ident, Idioms, Index, Kind, Param, Permissions, Scoring, Strand, Values,
|
||||
Ident, Idioms, Index, Kind, Param, Permissions, Scoring, Strand, TableType, Values,
|
||||
},
|
||||
syn::v2::{
|
||||
parser::{
|
||||
|
@ -22,18 +24,22 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_define_stmt(&mut self) -> ParseResult<DefineStatement> {
|
||||
pub async fn parse_define_stmt(&mut self, ctx: &mut Stk) -> ParseResult<DefineStatement> {
|
||||
match self.next().kind {
|
||||
t!("NAMESPACE") => self.parse_define_namespace().map(DefineStatement::Namespace),
|
||||
t!("DATABASE") => self.parse_define_database().map(DefineStatement::Database),
|
||||
t!("FUNCTION") => self.parse_define_function().map(DefineStatement::Function),
|
||||
t!("FUNCTION") => self.parse_define_function(ctx).await.map(DefineStatement::Function),
|
||||
t!("USER") => self.parse_define_user().map(DefineStatement::User),
|
||||
t!("TOKEN") => self.parse_define_token().map(DefineStatement::Token),
|
||||
t!("SCOPE") => self.parse_define_scope().map(DefineStatement::Scope),
|
||||
t!("PARAM") => self.parse_define_param().map(DefineStatement::Param),
|
||||
t!("TABLE") => self.parse_define_table().map(DefineStatement::Table),
|
||||
t!("EVENT") => self.parse_define_event().map(DefineStatement::Event),
|
||||
t!("FIELD") => self.parse_define_field().map(DefineStatement::Field),
|
||||
t!("SCOPE") => self.parse_define_scope(ctx).await.map(DefineStatement::Scope),
|
||||
t!("PARAM") => self.parse_define_param(ctx).await.map(DefineStatement::Param),
|
||||
t!("TABLE") => self.parse_define_table(ctx).await.map(DefineStatement::Table),
|
||||
t!("EVENT") => {
|
||||
ctx.run(|ctx| self.parse_define_event(ctx)).await.map(DefineStatement::Event)
|
||||
}
|
||||
t!("FIELD") => {
|
||||
ctx.run(|ctx| self.parse_define_field(ctx)).await.map(DefineStatement::Field)
|
||||
}
|
||||
t!("INDEX") => self.parse_define_index().map(DefineStatement::Index),
|
||||
t!("ANALYZER") => self.parse_define_analyzer().map(DefineStatement::Analyzer),
|
||||
x => unexpected!(self, x, "a define statement keyword"),
|
||||
|
@ -56,14 +62,9 @@ impl Parser<'_> {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
loop {
|
||||
match self.peek_kind() {
|
||||
t!("COMMENT") => {
|
||||
self.pop_peek();
|
||||
res.comment = Some(self.next_token_value()?);
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
while let t!("COMMENT") = self.peek_kind() {
|
||||
self.pop_peek();
|
||||
res.comment = Some(self.next_token_value()?);
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
|
@ -100,7 +101,10 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn parse_define_function(&mut self) -> ParseResult<DefineFunctionStatement> {
|
||||
pub async fn parse_define_function(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
) -> ParseResult<DefineFunctionStatement> {
|
||||
let if_not_exists = if self.eat(t!("IF")) {
|
||||
expected!(self, t!("NOT"));
|
||||
expected!(self, t!("EXISTS"));
|
||||
|
@ -118,7 +122,7 @@ impl Parser<'_> {
|
|||
|
||||
let param = self.next_token_value::<Param>()?.0;
|
||||
expected!(self, t!(":"));
|
||||
let kind = self.parse_inner_kind()?;
|
||||
let kind = ctx.run(|ctx| self.parse_inner_kind(ctx)).await?;
|
||||
|
||||
args.push((param, kind));
|
||||
|
||||
|
@ -129,7 +133,7 @@ impl Parser<'_> {
|
|||
}
|
||||
|
||||
let next = expected!(self, t!("{")).span;
|
||||
let block = self.parse_block(next)?;
|
||||
let block = self.parse_block(ctx, next).await?;
|
||||
|
||||
let mut res = DefineFunctionStatement {
|
||||
name,
|
||||
|
@ -147,7 +151,7 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("PERMISSIONS") => {
|
||||
self.pop_peek();
|
||||
res.permissions = self.parse_permission_value()?;
|
||||
res.permissions = ctx.run(|ctx| self.parse_permission_value(ctx)).await?;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
|
@ -251,7 +255,7 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn parse_define_scope(&mut self) -> ParseResult<DefineScopeStatement> {
|
||||
pub async fn parse_define_scope(&mut self, stk: &mut Stk) -> ParseResult<DefineScopeStatement> {
|
||||
let if_not_exists = if self.eat(t!("IF")) {
|
||||
expected!(self, t!("NOT"));
|
||||
expected!(self, t!("EXISTS"));
|
||||
|
@ -279,11 +283,11 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("SIGNUP") => {
|
||||
self.pop_peek();
|
||||
res.signup = Some(self.parse_value()?);
|
||||
res.signup = Some(stk.run(|stk| self.parse_value(stk)).await?);
|
||||
}
|
||||
t!("SIGNIN") => {
|
||||
self.pop_peek();
|
||||
res.signin = Some(self.parse_value()?);
|
||||
res.signin = Some(stk.run(|stk| self.parse_value(stk)).await?);
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
|
@ -292,7 +296,7 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn parse_define_param(&mut self) -> ParseResult<DefineParamStatement> {
|
||||
pub async fn parse_define_param(&mut self, ctx: &mut Stk) -> ParseResult<DefineParamStatement> {
|
||||
let if_not_exists = if self.eat(t!("IF")) {
|
||||
expected!(self, t!("NOT"));
|
||||
expected!(self, t!("EXISTS"));
|
||||
|
@ -312,7 +316,7 @@ impl Parser<'_> {
|
|||
match self.peek_kind() {
|
||||
t!("VALUE") => {
|
||||
self.pop_peek();
|
||||
res.value = self.parse_value()?;
|
||||
res.value = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
}
|
||||
t!("COMMENT") => {
|
||||
self.pop_peek();
|
||||
|
@ -320,7 +324,7 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("PERMISSIONS") => {
|
||||
self.pop_peek();
|
||||
res.permissions = self.parse_permission_value()?;
|
||||
res.permissions = ctx.run(|ctx| self.parse_permission_value(ctx)).await?;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
|
@ -328,7 +332,7 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn parse_define_table(&mut self) -> ParseResult<DefineTableStatement> {
|
||||
pub async fn parse_define_table(&mut self, ctx: &mut Stk) -> ParseResult<DefineTableStatement> {
|
||||
let if_not_exists = if self.eat(t!("IF")) {
|
||||
expected!(self, t!("NOT"));
|
||||
expected!(self, t!("EXISTS"));
|
||||
|
@ -382,7 +386,7 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("PERMISSIONS") => {
|
||||
self.pop_peek();
|
||||
res.permissions = self.parse_permission(false)?;
|
||||
res.permissions = ctx.run(|ctx| self.parse_permission(ctx, false)).await?;
|
||||
}
|
||||
t!("CHANGEFEED") => {
|
||||
self.pop_peek();
|
||||
|
@ -393,11 +397,11 @@ impl Parser<'_> {
|
|||
match self.peek_kind() {
|
||||
t!("(") => {
|
||||
let open = self.pop_peek().span;
|
||||
res.view = Some(self.parse_view()?);
|
||||
res.view = Some(self.parse_view(ctx).await?);
|
||||
self.expect_closing_delimiter(t!(")"), open)?;
|
||||
}
|
||||
t!("SELECT") => {
|
||||
res.view = Some(self.parse_view()?);
|
||||
res.view = Some(self.parse_view(ctx).await?);
|
||||
}
|
||||
x => unexpected!(self, x, "`SELECT`"),
|
||||
}
|
||||
|
@ -409,7 +413,7 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn parse_define_event(&mut self) -> ParseResult<DefineEventStatement> {
|
||||
pub async fn parse_define_event(&mut self, ctx: &mut Stk) -> ParseResult<DefineEventStatement> {
|
||||
let if_not_exists = if self.eat(t!("IF")) {
|
||||
expected!(self, t!("NOT"));
|
||||
expected!(self, t!("EXISTS"));
|
||||
|
@ -433,13 +437,13 @@ impl Parser<'_> {
|
|||
match self.peek_kind() {
|
||||
t!("WHEN") => {
|
||||
self.pop_peek();
|
||||
res.when = self.parse_value()?;
|
||||
res.when = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
}
|
||||
t!("THEN") => {
|
||||
self.pop_peek();
|
||||
res.then = Values(vec![self.parse_value()?]);
|
||||
res.then = Values(vec![ctx.run(|ctx| self.parse_value(ctx)).await?]);
|
||||
while self.eat(t!(",")) {
|
||||
res.then.0.push(self.parse_value()?)
|
||||
res.then.0.push(ctx.run(|ctx| self.parse_value(ctx)).await?)
|
||||
}
|
||||
}
|
||||
t!("COMMENT") => {
|
||||
|
@ -452,7 +456,7 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn parse_define_field(&mut self) -> ParseResult<DefineFieldStatement> {
|
||||
pub async fn parse_define_field(&mut self, ctx: &mut Stk) -> ParseResult<DefineFieldStatement> {
|
||||
let if_not_exists = if self.eat(t!("IF")) {
|
||||
expected!(self, t!("NOT"));
|
||||
expected!(self, t!("EXISTS"));
|
||||
|
@ -481,7 +485,7 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("TYPE") => {
|
||||
self.pop_peek();
|
||||
res.kind = Some(self.parse_inner_kind()?);
|
||||
res.kind = Some(ctx.run(|ctx| self.parse_inner_kind(ctx)).await?);
|
||||
}
|
||||
t!("READONLY") => {
|
||||
self.pop_peek();
|
||||
|
@ -489,19 +493,19 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("VALUE") => {
|
||||
self.pop_peek();
|
||||
res.value = Some(self.parse_value()?);
|
||||
res.value = Some(ctx.run(|ctx| self.parse_value(ctx)).await?);
|
||||
}
|
||||
t!("ASSERT") => {
|
||||
self.pop_peek();
|
||||
res.assert = Some(self.parse_value()?);
|
||||
res.assert = Some(ctx.run(|ctx| self.parse_value(ctx)).await?);
|
||||
}
|
||||
t!("DEFAULT") => {
|
||||
self.pop_peek();
|
||||
res.default = Some(self.parse_value()?);
|
||||
res.default = Some(ctx.run(|ctx| self.parse_value(ctx)).await?);
|
||||
}
|
||||
t!("PERMISSIONS") => {
|
||||
self.pop_peek();
|
||||
res.permissions = self.parse_permission(true)?;
|
||||
res.permissions = ctx.run(|ctx| self.parse_permission(ctx, true)).await?;
|
||||
}
|
||||
t!("COMMENT") => {
|
||||
self.pop_peek();
|
||||
|
@ -530,6 +534,7 @@ impl Parser<'_> {
|
|||
let mut res = DefineIndexStatement {
|
||||
name,
|
||||
what,
|
||||
|
||||
if_not_exists,
|
||||
..Default::default()
|
||||
};
|
||||
|
@ -692,10 +697,12 @@ impl Parser<'_> {
|
|||
let name = self.next_token_value()?;
|
||||
let mut res = DefineAnalyzerStatement {
|
||||
name,
|
||||
|
||||
function: None,
|
||||
tokenizers: None,
|
||||
filters: None,
|
||||
comment: None,
|
||||
|
||||
if_not_exists,
|
||||
};
|
||||
loop {
|
||||
|
@ -763,6 +770,7 @@ impl Parser<'_> {
|
|||
}
|
||||
res.tokenizers = Some(tokenizers);
|
||||
}
|
||||
|
||||
t!("FUNCTION") => {
|
||||
self.pop_peek();
|
||||
expected!(self, t!("fn"));
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{statements::DeleteStatement, Values},
|
||||
syn::v2::{
|
||||
|
@ -7,12 +9,12 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_delete_stmt(&mut self) -> ParseResult<DeleteStatement> {
|
||||
pub async fn parse_delete_stmt(&mut self, ctx: &mut Stk) -> ParseResult<DeleteStatement> {
|
||||
self.eat(t!("FROM"));
|
||||
let only = self.eat(t!("ONLY"));
|
||||
let what = Values(self.parse_what_list()?);
|
||||
let cond = self.try_parse_condition()?;
|
||||
let output = self.try_parse_output()?;
|
||||
let what = Values(self.parse_what_list(ctx).await?);
|
||||
let cond = self.try_parse_condition(ctx).await?;
|
||||
let output = self.try_parse_output(ctx).await?;
|
||||
let timeout = self.try_parse_timeout()?;
|
||||
let parallel = self.eat(t!("PARALLEL"));
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::statements::IfelseStatement,
|
||||
syn::v2::{
|
||||
|
@ -10,8 +12,8 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_if_stmt(&mut self) -> ParseResult<IfelseStatement> {
|
||||
let condition = self.parse_value_field()?;
|
||||
pub async fn parse_if_stmt(&mut self, ctx: &mut Stk) -> ParseResult<IfelseStatement> {
|
||||
let condition = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
|
||||
let mut res = IfelseStatement {
|
||||
exprs: Vec::new(),
|
||||
|
@ -21,15 +23,15 @@ impl Parser<'_> {
|
|||
let next = self.next();
|
||||
match next.kind {
|
||||
t!("THEN") => {
|
||||
let body = self.parse_value_field()?;
|
||||
let body = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
self.eat(t!(";"));
|
||||
res.exprs.push((condition, body));
|
||||
self.parse_worded_tail(&mut res)?;
|
||||
self.parse_worded_tail(ctx, &mut res).await?;
|
||||
}
|
||||
t!("{") => {
|
||||
let body = self.parse_block(next.span)?;
|
||||
let body = self.parse_block(ctx, next.span).await?;
|
||||
res.exprs.push((condition, body.into()));
|
||||
self.parse_bracketed_tail(&mut res)?;
|
||||
self.parse_bracketed_tail(ctx, &mut res).await?;
|
||||
}
|
||||
x => unexpected!(self, x, "THEN or '{'"),
|
||||
}
|
||||
|
@ -37,19 +39,23 @@ impl Parser<'_> {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
fn parse_worded_tail(&mut self, res: &mut IfelseStatement) -> ParseResult<()> {
|
||||
async fn parse_worded_tail(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
res: &mut IfelseStatement,
|
||||
) -> ParseResult<()> {
|
||||
loop {
|
||||
match self.next().kind {
|
||||
t!("END") => return Ok(()),
|
||||
t!("ELSE") => {
|
||||
if self.eat(t!("IF")) {
|
||||
let condition = self.parse_value_field()?;
|
||||
let condition = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
expected!(self, t!("THEN"));
|
||||
let body = self.parse_value_field()?;
|
||||
let body = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
self.eat(t!(";"));
|
||||
res.exprs.push((condition, body));
|
||||
} else {
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
self.eat(t!(";"));
|
||||
expected!(self, t!("END"));
|
||||
res.close = Some(value);
|
||||
|
@ -61,19 +67,23 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_bracketed_tail(&mut self, res: &mut IfelseStatement) -> ParseResult<()> {
|
||||
async fn parse_bracketed_tail(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
res: &mut IfelseStatement,
|
||||
) -> ParseResult<()> {
|
||||
loop {
|
||||
match self.peek_kind() {
|
||||
t!("ELSE") => {
|
||||
self.pop_peek();
|
||||
if self.eat(t!("IF")) {
|
||||
let condition = self.parse_value_field()?;
|
||||
let condition = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
let span = expected!(self, t!("{")).span;
|
||||
let body = self.parse_block(span)?;
|
||||
let body = self.parse_block(ctx, span).await?;
|
||||
res.exprs.push((condition, body.into()));
|
||||
} else {
|
||||
let span = expected!(self, t!("{")).span;
|
||||
let value = self.parse_block(span)?;
|
||||
let value = self.parse_block(ctx, span).await?;
|
||||
res.close = Some(value.into());
|
||||
return Ok(());
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{statements::InsertStatement, Data, Value},
|
||||
syn::v2::{
|
||||
|
@ -7,7 +9,10 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub(crate) fn parse_insert_stmt(&mut self) -> ParseResult<InsertStatement> {
|
||||
pub(crate) async fn parse_insert_stmt(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
) -> ParseResult<InsertStatement> {
|
||||
let ignore = self.eat(t!("IGNORE"));
|
||||
expected!(self, t!("INTO"));
|
||||
let next = self.next();
|
||||
|
@ -26,23 +31,23 @@ impl Parser<'_> {
|
|||
let data = match self.peek_kind() {
|
||||
t!("(") => {
|
||||
let start = self.pop_peek().span;
|
||||
let fields = self.parse_idiom_list()?;
|
||||
let fields = self.parse_idiom_list(ctx).await?;
|
||||
self.expect_closing_delimiter(t!(")"), start)?;
|
||||
expected!(self, t!("VALUES"));
|
||||
|
||||
let start = expected!(self, t!("(")).span;
|
||||
let mut values = vec![self.parse_value()?];
|
||||
let mut values = vec![ctx.run(|ctx| self.parse_value(ctx)).await?];
|
||||
while self.eat(t!(",")) {
|
||||
values.push(self.parse_value()?);
|
||||
values.push(ctx.run(|ctx| self.parse_value(ctx)).await?);
|
||||
}
|
||||
self.expect_closing_delimiter(t!(")"), start)?;
|
||||
|
||||
let mut values = vec![values];
|
||||
while self.eat(t!(",")) {
|
||||
let start = expected!(self, t!("(")).span;
|
||||
let mut inner_values = vec![self.parse_value()?];
|
||||
let mut inner_values = vec![ctx.run(|ctx| self.parse_value(ctx)).await?];
|
||||
while self.eat(t!(",")) {
|
||||
inner_values.push(self.parse_value()?);
|
||||
inner_values.push(ctx.run(|ctx| self.parse_value(ctx)).await?);
|
||||
}
|
||||
values.push(inner_values);
|
||||
self.expect_closing_delimiter(t!(")"), start)?;
|
||||
|
@ -56,13 +61,17 @@ impl Parser<'_> {
|
|||
)
|
||||
}
|
||||
_ => {
|
||||
let value = self.parse_value()?;
|
||||
let value = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
Data::SingleExpression(value)
|
||||
}
|
||||
};
|
||||
|
||||
let update = self.eat(t!("ON")).then(|| self.parse_insert_update()).transpose()?;
|
||||
let output = self.try_parse_output()?;
|
||||
let update = if self.eat(t!("ON")) {
|
||||
Some(self.parse_insert_update(ctx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let output = self.try_parse_output(ctx).await?;
|
||||
let timeout = self.try_parse_timeout()?;
|
||||
let parallel = self.eat(t!("PARALLEL"));
|
||||
Ok(InsertStatement {
|
||||
|
@ -76,19 +85,19 @@ impl Parser<'_> {
|
|||
})
|
||||
}
|
||||
|
||||
fn parse_insert_update(&mut self) -> ParseResult<Data> {
|
||||
async fn parse_insert_update(&mut self, ctx: &mut Stk) -> ParseResult<Data> {
|
||||
expected!(self, t!("DUPLICATE"));
|
||||
expected!(self, t!("KEY"));
|
||||
expected!(self, t!("UPDATE"));
|
||||
let l = self.parse_plain_idiom()?;
|
||||
let l = self.parse_plain_idiom(ctx).await?;
|
||||
let o = self.parse_assigner()?;
|
||||
let r = self.parse_value()?;
|
||||
let r = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
let mut data = vec![(l, o, r)];
|
||||
|
||||
while self.eat(t!(",")) {
|
||||
let l = self.parse_plain_idiom()?;
|
||||
let l = self.parse_plain_idiom(ctx).await?;
|
||||
let o = self.parse_assigner()?;
|
||||
let r = self.parse_value()?;
|
||||
let r = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
data.push((l, o, r))
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::sql::block::Entry;
|
||||
use crate::sql::statements::show::{ShowSince, ShowStatement};
|
||||
use crate::sql::statements::sleep::SleepStatement;
|
||||
|
@ -33,7 +35,7 @@ mod select;
|
|||
mod update;
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_stmt_list(&mut self) -> ParseResult<Statements> {
|
||||
pub async fn parse_stmt_list(&mut self, ctx: &mut Stk) -> ParseResult<Statements> {
|
||||
let mut res = Vec::new();
|
||||
loop {
|
||||
match self.peek_kind() {
|
||||
|
@ -41,7 +43,7 @@ impl Parser<'_> {
|
|||
t!(";") => continue,
|
||||
t!("eof") => break,
|
||||
_ => {
|
||||
let stmt = self.parse_stmt()?;
|
||||
let stmt = ctx.run(|ctx| self.parse_stmt(ctx)).await?;
|
||||
res.push(stmt);
|
||||
if !self.eat(t!(";")) {
|
||||
if self.eat(t!("eof")) {
|
||||
|
@ -89,7 +91,7 @@ impl Parser<'_> {
|
|||
)
|
||||
}
|
||||
|
||||
pub(super) fn parse_stmt(&mut self) -> ParseResult<Statement> {
|
||||
pub(super) async fn parse_stmt(&mut self, ctx: &mut Stk) -> ParseResult<Statement> {
|
||||
let token = self.peek();
|
||||
match token.kind {
|
||||
t!("ANALYZE") => {
|
||||
|
@ -118,23 +120,23 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("CREATE") => {
|
||||
self.pop_peek();
|
||||
self.parse_create_stmt().map(Statement::Create)
|
||||
ctx.run(|ctx| self.parse_create_stmt(ctx)).await.map(Statement::Create)
|
||||
}
|
||||
t!("DEFINE") => {
|
||||
self.pop_peek();
|
||||
self.parse_define_stmt().map(Statement::Define)
|
||||
ctx.run(|ctx| self.parse_define_stmt(ctx)).await.map(Statement::Define)
|
||||
}
|
||||
t!("DELETE") => {
|
||||
self.pop_peek();
|
||||
self.parse_delete_stmt().map(Statement::Delete)
|
||||
ctx.run(|ctx| self.parse_delete_stmt(ctx)).await.map(Statement::Delete)
|
||||
}
|
||||
t!("FOR") => {
|
||||
self.pop_peek();
|
||||
self.parse_for_stmt().map(Statement::Foreach)
|
||||
ctx.run(|ctx| self.parse_for_stmt(ctx)).await.map(Statement::Foreach)
|
||||
}
|
||||
t!("IF") => {
|
||||
self.pop_peek();
|
||||
self.parse_if_stmt().map(Statement::Ifelse)
|
||||
ctx.run(|ctx| self.parse_if_stmt(ctx)).await.map(Statement::Ifelse)
|
||||
}
|
||||
t!("INFO") => {
|
||||
self.pop_peek();
|
||||
|
@ -142,7 +144,7 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("INSERT") => {
|
||||
self.pop_peek();
|
||||
self.parse_insert_stmt().map(Statement::Insert)
|
||||
ctx.run(|ctx| self.parse_insert_stmt(ctx)).await.map(Statement::Insert)
|
||||
}
|
||||
t!("KILL") => {
|
||||
self.pop_peek();
|
||||
|
@ -150,7 +152,7 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("LIVE") => {
|
||||
self.pop_peek();
|
||||
self.parse_live_stmt().map(Statement::Live)
|
||||
ctx.run(|ctx| self.parse_live_stmt(ctx)).await.map(Statement::Live)
|
||||
}
|
||||
t!("OPTION") => {
|
||||
self.pop_peek();
|
||||
|
@ -158,11 +160,11 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("RETURN") => {
|
||||
self.pop_peek();
|
||||
self.parse_return_stmt().map(Statement::Output)
|
||||
ctx.run(|ctx| self.parse_return_stmt(ctx)).await.map(Statement::Output)
|
||||
}
|
||||
t!("RELATE") => {
|
||||
self.pop_peek();
|
||||
self.parse_relate_stmt().map(Statement::Relate)
|
||||
ctx.run(|ctx| self.parse_relate_stmt(ctx)).await.map(Statement::Relate)
|
||||
}
|
||||
t!("REMOVE") => {
|
||||
self.pop_peek();
|
||||
|
@ -170,11 +172,11 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("SELECT") => {
|
||||
self.pop_peek();
|
||||
self.parse_select_stmt().map(Statement::Select)
|
||||
ctx.run(|ctx| self.parse_select_stmt(ctx)).await.map(Statement::Select)
|
||||
}
|
||||
t!("LET") => {
|
||||
self.pop_peek();
|
||||
self.parse_let_stmt().map(Statement::Set)
|
||||
ctx.run(|ctx| self.parse_let_stmt(ctx)).await.map(Statement::Set)
|
||||
}
|
||||
t!("SHOW") => {
|
||||
self.pop_peek();
|
||||
|
@ -186,11 +188,11 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("THROW") => {
|
||||
self.pop_peek();
|
||||
self.parse_throw_stmt().map(Statement::Throw)
|
||||
ctx.run(|ctx| self.parse_throw_stmt(ctx)).await.map(Statement::Throw)
|
||||
}
|
||||
t!("UPDATE") => {
|
||||
self.pop_peek();
|
||||
self.parse_update_stmt().map(Statement::Update)
|
||||
ctx.run(|ctx| self.parse_update_stmt(ctx)).await.map(Statement::Update)
|
||||
}
|
||||
t!("USE") => {
|
||||
self.pop_peek();
|
||||
|
@ -198,13 +200,13 @@ impl Parser<'_> {
|
|||
}
|
||||
_ => {
|
||||
// TODO: Provide information about keywords.
|
||||
let value = self.parse_value_field()?;
|
||||
let value = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
Ok(Self::refine_stmt_value(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn parse_entry(&mut self) -> ParseResult<Entry> {
|
||||
pub(super) async fn parse_entry(&mut self, ctx: &mut Stk) -> ParseResult<Entry> {
|
||||
let token = self.peek();
|
||||
match token.kind {
|
||||
t!("BREAK") => {
|
||||
|
@ -217,35 +219,35 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("CREATE") => {
|
||||
self.pop_peek();
|
||||
self.parse_create_stmt().map(Entry::Create)
|
||||
self.parse_create_stmt(ctx).await.map(Entry::Create)
|
||||
}
|
||||
t!("DEFINE") => {
|
||||
self.pop_peek();
|
||||
self.parse_define_stmt().map(Entry::Define)
|
||||
self.parse_define_stmt(ctx).await.map(Entry::Define)
|
||||
}
|
||||
t!("DELETE") => {
|
||||
self.pop_peek();
|
||||
self.parse_delete_stmt().map(Entry::Delete)
|
||||
self.parse_delete_stmt(ctx).await.map(Entry::Delete)
|
||||
}
|
||||
t!("FOR") => {
|
||||
self.pop_peek();
|
||||
self.parse_for_stmt().map(Entry::Foreach)
|
||||
self.parse_for_stmt(ctx).await.map(Entry::Foreach)
|
||||
}
|
||||
t!("IF") => {
|
||||
self.pop_peek();
|
||||
self.parse_if_stmt().map(Entry::Ifelse)
|
||||
self.parse_if_stmt(ctx).await.map(Entry::Ifelse)
|
||||
}
|
||||
t!("INSERT") => {
|
||||
self.pop_peek();
|
||||
self.parse_insert_stmt().map(Entry::Insert)
|
||||
self.parse_insert_stmt(ctx).await.map(Entry::Insert)
|
||||
}
|
||||
t!("RETURN") => {
|
||||
self.pop_peek();
|
||||
self.parse_return_stmt().map(Entry::Output)
|
||||
self.parse_return_stmt(ctx).await.map(Entry::Output)
|
||||
}
|
||||
t!("RELATE") => {
|
||||
self.pop_peek();
|
||||
self.parse_relate_stmt().map(Entry::Relate)
|
||||
self.parse_relate_stmt(ctx).await.map(Entry::Relate)
|
||||
}
|
||||
t!("REMOVE") => {
|
||||
self.pop_peek();
|
||||
|
@ -253,23 +255,23 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("SELECT") => {
|
||||
self.pop_peek();
|
||||
self.parse_select_stmt().map(Entry::Select)
|
||||
self.parse_select_stmt(ctx).await.map(Entry::Select)
|
||||
}
|
||||
t!("LET") => {
|
||||
self.pop_peek();
|
||||
self.parse_let_stmt().map(Entry::Set)
|
||||
self.parse_let_stmt(ctx).await.map(Entry::Set)
|
||||
}
|
||||
t!("THROW") => {
|
||||
self.pop_peek();
|
||||
self.parse_throw_stmt().map(Entry::Throw)
|
||||
self.parse_throw_stmt(ctx).await.map(Entry::Throw)
|
||||
}
|
||||
t!("UPDATE") => {
|
||||
self.pop_peek();
|
||||
self.parse_update_stmt().map(Entry::Update)
|
||||
self.parse_update_stmt(ctx).await.map(Entry::Update)
|
||||
}
|
||||
_ => {
|
||||
// TODO: Provide information about keywords.
|
||||
let v = self.parse_value_field()?;
|
||||
let v = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
Ok(Self::refine_entry_value(v))
|
||||
}
|
||||
}
|
||||
|
@ -390,13 +392,13 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects `FOR` to already be consumed.
|
||||
pub fn parse_for_stmt(&mut self) -> ParseResult<ForeachStatement> {
|
||||
pub async fn parse_for_stmt(&mut self, stk: &mut Stk) -> ParseResult<ForeachStatement> {
|
||||
let param = self.next_token_value()?;
|
||||
expected!(self, t!("IN"));
|
||||
let range = self.parse_value()?;
|
||||
let range = stk.run(|stk| self.parse_value(stk)).await?;
|
||||
|
||||
let span = expected!(self, t!("{")).span;
|
||||
let block = self.parse_block(span)?;
|
||||
let block = self.parse_block(stk, span).await?;
|
||||
Ok(ForeachStatement {
|
||||
param,
|
||||
range,
|
||||
|
@ -455,7 +457,7 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects `LIVE` to already be consumed.
|
||||
pub(crate) fn parse_live_stmt(&mut self) -> ParseResult<LiveStatement> {
|
||||
pub(crate) async fn parse_live_stmt(&mut self, stk: &mut Stk) -> ParseResult<LiveStatement> {
|
||||
expected!(self, t!("SELECT"));
|
||||
|
||||
let expr = match self.peek_kind() {
|
||||
|
@ -463,15 +465,15 @@ impl Parser<'_> {
|
|||
self.pop_peek();
|
||||
Fields::default()
|
||||
}
|
||||
_ => self.parse_fields()?,
|
||||
_ => self.parse_fields(stk).await?,
|
||||
};
|
||||
expected!(self, t!("FROM"));
|
||||
let what = match self.peek().kind {
|
||||
t!("$param") => Value::Param(self.next_token_value()?),
|
||||
_ => Value::Table(self.next_token_value()?),
|
||||
};
|
||||
let cond = self.try_parse_condition()?;
|
||||
let fetch = self.try_parse_fetch()?;
|
||||
let cond = self.try_parse_condition(stk).await?;
|
||||
let fetch = self.try_parse_fetch(stk).await?;
|
||||
|
||||
Ok(LiveStatement::from_source_parts(expr, what, cond, fetch))
|
||||
}
|
||||
|
@ -501,9 +503,12 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects `RETURN` to already be consumed.
|
||||
pub(crate) fn parse_return_stmt(&mut self) -> ParseResult<OutputStatement> {
|
||||
let what = self.parse_value_field()?;
|
||||
let fetch = self.try_parse_fetch()?;
|
||||
pub(crate) async fn parse_return_stmt(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
) -> ParseResult<OutputStatement> {
|
||||
let what = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
let fetch = self.try_parse_fetch(ctx).await?;
|
||||
Ok(OutputStatement {
|
||||
what,
|
||||
fetch,
|
||||
|
@ -519,10 +524,10 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects `LET` to already be consumed.
|
||||
pub(crate) fn parse_let_stmt(&mut self) -> ParseResult<SetStatement> {
|
||||
pub(crate) async fn parse_let_stmt(&mut self, ctx: &mut Stk) -> ParseResult<SetStatement> {
|
||||
let name = self.next_token_value::<Param>()?.0 .0;
|
||||
expected!(self, t!("="));
|
||||
let what = self.parse_value()?;
|
||||
let what = self.parse_value(ctx).await?;
|
||||
Ok(SetStatement {
|
||||
name,
|
||||
what,
|
||||
|
@ -579,8 +584,8 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects `THROW` to already be consumed.
|
||||
pub(crate) fn parse_throw_stmt(&mut self) -> ParseResult<ThrowStatement> {
|
||||
let error = self.parse_value_field()?;
|
||||
pub(crate) async fn parse_throw_stmt(&mut self, ctx: &mut Stk) -> ParseResult<ThrowStatement> {
|
||||
let error = self.parse_value_field(ctx).await?;
|
||||
Ok(ThrowStatement {
|
||||
error,
|
||||
})
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
//! Contains parsing code for smaller common parts of statements.
|
||||
|
||||
use crate::sql::change_feed_include::ChangeFeedInclude;
|
||||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{
|
||||
changefeed::ChangeFeed, index::Distance, Base, Cond, Data, Duration, Fetch, Fetchs, Field,
|
||||
Fields, Group, Groups, Ident, Idiom, Output, Permission, Permissions, Tables, Timeout,
|
||||
Value, View,
|
||||
change_feed_include::ChangeFeedInclude, changefeed::ChangeFeed, index::Distance, Base,
|
||||
Cond, Data, Duration, Fetch, Fetchs, Field, Fields, Group, Groups, Ident, Idiom, Output,
|
||||
Permission, Permissions, Tables, Timeout, Value, View,
|
||||
},
|
||||
syn::v2::{
|
||||
parser::{
|
||||
|
@ -20,15 +21,15 @@ use crate::{
|
|||
impl Parser<'_> {
|
||||
/// Parses a data production if the next token is a data keyword.
|
||||
/// Otherwise returns None
|
||||
pub fn try_parse_data(&mut self) -> ParseResult<Option<Data>> {
|
||||
pub async fn try_parse_data(&mut self, ctx: &mut Stk) -> ParseResult<Option<Data>> {
|
||||
let res = match self.peek().kind {
|
||||
t!("SET") => {
|
||||
self.pop_peek();
|
||||
let mut set_list = Vec::new();
|
||||
loop {
|
||||
let idiom = self.parse_plain_idiom()?;
|
||||
let idiom = self.parse_plain_idiom(ctx).await?;
|
||||
let operator = self.parse_assigner()?;
|
||||
let value = self.parse_value()?;
|
||||
let value = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
set_list.push((idiom, operator, value));
|
||||
if !self.eat(t!(",")) {
|
||||
break;
|
||||
|
@ -38,24 +39,24 @@ impl Parser<'_> {
|
|||
}
|
||||
t!("UNSET") => {
|
||||
self.pop_peek();
|
||||
let idiom_list = self.parse_idiom_list()?;
|
||||
let idiom_list = self.parse_idiom_list(ctx).await?;
|
||||
Data::UnsetExpression(idiom_list)
|
||||
}
|
||||
t!("PATCH") => {
|
||||
self.pop_peek();
|
||||
Data::PatchExpression(self.parse_value()?)
|
||||
Data::PatchExpression(ctx.run(|ctx| self.parse_value(ctx)).await?)
|
||||
}
|
||||
t!("MERGE") => {
|
||||
self.pop_peek();
|
||||
Data::MergeExpression(self.parse_value()?)
|
||||
Data::MergeExpression(ctx.run(|ctx| self.parse_value(ctx)).await?)
|
||||
}
|
||||
t!("REPLACE") => {
|
||||
self.pop_peek();
|
||||
Data::ReplaceExpression(self.parse_value()?)
|
||||
Data::ReplaceExpression(ctx.run(|ctx| self.parse_value(ctx)).await?)
|
||||
}
|
||||
t!("CONTENT") => {
|
||||
self.pop_peek();
|
||||
Data::ContentExpression(self.parse_value()?)
|
||||
Data::ContentExpression(ctx.run(|ctx| self.parse_value(ctx)).await?)
|
||||
}
|
||||
_ => return Ok(None),
|
||||
};
|
||||
|
@ -63,7 +64,7 @@ impl Parser<'_> {
|
|||
}
|
||||
|
||||
/// Parses a statement output if the next token is `return`.
|
||||
pub fn try_parse_output(&mut self) -> ParseResult<Option<Output>> {
|
||||
pub async fn try_parse_output(&mut self, ctx: &mut Stk) -> ParseResult<Option<Output>> {
|
||||
if !self.eat(t!("RETURN")) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -88,7 +89,7 @@ impl Parser<'_> {
|
|||
self.pop_peek();
|
||||
Output::Before
|
||||
}
|
||||
_ => Output::Fields(self.parse_fields()?),
|
||||
_ => Output::Fields(self.parse_fields(ctx).await?),
|
||||
};
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
@ -102,19 +103,19 @@ impl Parser<'_> {
|
|||
Ok(Some(Timeout(duration)))
|
||||
}
|
||||
|
||||
pub fn try_parse_fetch(&mut self) -> ParseResult<Option<Fetchs>> {
|
||||
pub async fn try_parse_fetch(&mut self, ctx: &mut Stk) -> ParseResult<Option<Fetchs>> {
|
||||
if !self.eat(t!("FETCH")) {
|
||||
return Ok(None);
|
||||
}
|
||||
let v = self.parse_idiom_list()?.into_iter().map(Fetch).collect();
|
||||
let v = self.parse_idiom_list(ctx).await?.into_iter().map(Fetch).collect();
|
||||
Ok(Some(Fetchs(v)))
|
||||
}
|
||||
|
||||
pub fn try_parse_condition(&mut self) -> ParseResult<Option<Cond>> {
|
||||
pub async fn try_parse_condition(&mut self, ctx: &mut Stk) -> ParseResult<Option<Cond>> {
|
||||
if !self.eat(t!("WHERE")) {
|
||||
return Ok(None);
|
||||
}
|
||||
let v = self.parse_value_field()?;
|
||||
let v = ctx.run(|ctx| self.parse_value_field(ctx)).await?;
|
||||
Ok(Some(Cond(v)))
|
||||
}
|
||||
|
||||
|
@ -212,7 +213,11 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects the parser to have just eaten the `PERMISSIONS` keyword.
|
||||
pub fn parse_permission(&mut self, permissive: bool) -> ParseResult<Permissions> {
|
||||
pub async fn parse_permission(
|
||||
&mut self,
|
||||
stk: &mut Stk,
|
||||
permissive: bool,
|
||||
) -> ParseResult<Permissions> {
|
||||
match self.next().kind {
|
||||
t!("NONE") => Ok(Permissions::none()),
|
||||
t!("FULL") => Ok(Permissions::full()),
|
||||
|
@ -222,10 +227,10 @@ impl Parser<'_> {
|
|||
} else {
|
||||
Permissions::none()
|
||||
};
|
||||
self.parse_specific_permission(&mut permission)?;
|
||||
stk.run(|stk| self.parse_specific_permission(stk, &mut permission)).await?;
|
||||
self.eat(t!(","));
|
||||
while self.eat(t!("FOR")) {
|
||||
self.parse_specific_permission(&mut permission)?;
|
||||
stk.run(|stk| self.parse_specific_permission(stk, &mut permission)).await?;
|
||||
self.eat(t!(","));
|
||||
}
|
||||
Ok(permission)
|
||||
|
@ -240,7 +245,11 @@ impl Parser<'_> {
|
|||
///
|
||||
/// # Parser State
|
||||
/// Expects the parser to just have eaten the `FOR` keyword.
|
||||
pub fn parse_specific_permission(&mut self, permissions: &mut Permissions) -> ParseResult<()> {
|
||||
pub async fn parse_specific_permission(
|
||||
&mut self,
|
||||
stk: &mut Stk,
|
||||
permissions: &mut Permissions,
|
||||
) -> ParseResult<()> {
|
||||
let mut select = false;
|
||||
let mut create = false;
|
||||
let mut update = false;
|
||||
|
@ -267,7 +276,7 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
let permission_value = self.parse_permission_value()?;
|
||||
let permission_value = self.parse_permission_value(stk).await?;
|
||||
if select {
|
||||
permissions.select = permission_value.clone();
|
||||
}
|
||||
|
@ -289,11 +298,11 @@ impl Parser<'_> {
|
|||
/// # Parser State
|
||||
///
|
||||
/// Expects the parser to just have eaten either `SELECT`, `CREATE`, `UPDATE` or `DELETE`.
|
||||
pub fn parse_permission_value(&mut self) -> ParseResult<Permission> {
|
||||
pub async fn parse_permission_value(&mut self, stk: &mut Stk) -> ParseResult<Permission> {
|
||||
match self.next().kind {
|
||||
t!("NONE") => Ok(Permission::None),
|
||||
t!("FULL") => Ok(Permission::Full),
|
||||
t!("WHERE") => Ok(Permission::Specific(self.parse_value_field()?)),
|
||||
t!("WHERE") => Ok(Permission::Specific(self.parse_value_field(stk).await?)),
|
||||
x => unexpected!(self, x, "'NONE', 'FULL', or 'WHERE'"),
|
||||
}
|
||||
}
|
||||
|
@ -350,10 +359,10 @@ impl Parser<'_> {
|
|||
/// # Parse State
|
||||
/// Expects the parser to have already eaten the possible `(` if the view was wrapped in
|
||||
/// parens. Expects the next keyword to be `SELECT`.
|
||||
pub fn parse_view(&mut self) -> ParseResult<View> {
|
||||
pub async fn parse_view(&mut self, stk: &mut Stk) -> ParseResult<View> {
|
||||
expected!(self, t!("SELECT"));
|
||||
let before_fields = self.peek().span;
|
||||
let fields = self.parse_fields()?;
|
||||
let fields = self.parse_fields(stk).await?;
|
||||
let fields_span = before_fields.covers(self.recent_span());
|
||||
expected!(self, t!("FROM"));
|
||||
let mut from = vec![self.next_token_value()?];
|
||||
|
@ -361,7 +370,7 @@ impl Parser<'_> {
|
|||
from.push(self.next_token_value()?);
|
||||
}
|
||||
|
||||
let cond = self.try_parse_condition()?;
|
||||
let cond = self.try_parse_condition(stk).await?;
|
||||
let group = self.try_parse_group(&fields, fields_span)?;
|
||||
|
||||
Ok(View {
|
||||
|
@ -378,9 +387,9 @@ impl Parser<'_> {
|
|||
DistanceKind::Chebyshev => Distance::Chebyshev,
|
||||
DistanceKind::Cosine => Distance::Cosine,
|
||||
DistanceKind::Euclidean => Distance::Euclidean,
|
||||
DistanceKind::Manhattan => Distance::Manhattan,
|
||||
DistanceKind::Hamming => Distance::Hamming,
|
||||
DistanceKind::Jaccard => Distance::Jaccard,
|
||||
DistanceKind::Manhattan => Distance::Manhattan,
|
||||
DistanceKind::Minkowski => {
|
||||
let distance = self.next_token_value()?;
|
||||
Distance::Minkowski(distance)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{statements::RelateStatement, Subquery, Value},
|
||||
syn::v2::{
|
||||
|
@ -10,13 +12,13 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_relate_stmt(&mut self) -> ParseResult<RelateStatement> {
|
||||
pub async fn parse_relate_stmt(&mut self, stk: &mut Stk) -> ParseResult<RelateStatement> {
|
||||
let only = self.eat(t!("ONLY"));
|
||||
let (kind, from, with) = self.parse_relation()?;
|
||||
let (kind, from, with) = stk.run(|stk| self.parse_relation(stk)).await?;
|
||||
let uniq = self.eat(t!("UNIQUE"));
|
||||
|
||||
let data = self.try_parse_data()?;
|
||||
let output = self.try_parse_output()?;
|
||||
let data = self.try_parse_data(stk).await?;
|
||||
let output = self.try_parse_output(stk).await?;
|
||||
let timeout = self.try_parse_timeout()?;
|
||||
let parallel = self.eat(t!("PARALLEL"));
|
||||
Ok(RelateStatement {
|
||||
|
@ -32,20 +34,20 @@ impl Parser<'_> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn parse_relation(&mut self) -> ParseResult<(Value, Value, Value)> {
|
||||
let first = self.parse_relate_value()?;
|
||||
pub async fn parse_relation(&mut self, stk: &mut Stk) -> ParseResult<(Value, Value, Value)> {
|
||||
let first = self.parse_relate_value(stk).await?;
|
||||
let is_o = match self.next().kind {
|
||||
t!("->") => true,
|
||||
t!("<-") => false,
|
||||
x => unexpected!(self, x, "a relation arrow"),
|
||||
};
|
||||
let kind = self.parse_thing_or_table()?;
|
||||
let kind = self.parse_thing_or_table(stk).await?;
|
||||
if is_o {
|
||||
expected!(self, t!("->"))
|
||||
} else {
|
||||
expected!(self, t!("<-"))
|
||||
};
|
||||
let second = self.parse_relate_value()?;
|
||||
let second = self.parse_relate_value(stk).await?;
|
||||
if is_o {
|
||||
Ok((kind, first, second))
|
||||
} else {
|
||||
|
@ -53,11 +55,11 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn parse_relate_value(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_relate_value(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
match self.peek_kind() {
|
||||
t!("[") => {
|
||||
let start = self.pop_peek().span;
|
||||
self.parse_array(start).map(Value::Array)
|
||||
self.parse_array(ctx, start).await.map(Value::Array)
|
||||
}
|
||||
t!("$param") => self.next_token_value().map(Value::Param),
|
||||
t!("RETURN")
|
||||
|
@ -67,24 +69,30 @@ impl Parser<'_> {
|
|||
| t!("DELETE")
|
||||
| t!("RELATE")
|
||||
| t!("DEFINE")
|
||||
| t!("REMOVE") => self.parse_inner_subquery(None).map(|x| Value::Subquery(Box::new(x))),
|
||||
| t!("REMOVE") => {
|
||||
self.parse_inner_subquery(ctx, None).await.map(|x| Value::Subquery(Box::new(x)))
|
||||
}
|
||||
t!("IF") => {
|
||||
self.pop_peek();
|
||||
self.parse_if_stmt().map(|x| Value::Subquery(Box::new(Subquery::Ifelse(x))))
|
||||
ctx.run(|ctx| self.parse_if_stmt(ctx))
|
||||
.await
|
||||
.map(|x| Value::Subquery(Box::new(Subquery::Ifelse(x))))
|
||||
}
|
||||
t!("(") => {
|
||||
let span = self.pop_peek().span;
|
||||
let res =
|
||||
self.parse_inner_subquery(Some(span)).map(|x| Value::Subquery(Box::new(x)))?;
|
||||
let res = self
|
||||
.parse_inner_subquery(ctx, Some(span))
|
||||
.await
|
||||
.map(|x| Value::Subquery(Box::new(x)))?;
|
||||
Ok(res)
|
||||
}
|
||||
_ => self.parse_thing().map(Value::Thing),
|
||||
_ => self.parse_thing(ctx).await.map(Value::Thing),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_thing_or_table(&mut self) -> ParseResult<Value> {
|
||||
pub async fn parse_thing_or_table(&mut self, ctx: &mut Stk) -> ParseResult<Value> {
|
||||
if self.peek_token_at(1).kind == t!(":") {
|
||||
self.parse_thing().map(Value::Thing)
|
||||
self.parse_thing(ctx).await.map(Value::Thing)
|
||||
} else {
|
||||
self.next_token_value().map(Value::Table)
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{
|
||||
statements::SelectStatement, Explain, Field, Fields, Ident, Idioms, Limit, Order, Orders,
|
||||
|
@ -14,38 +16,45 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub(crate) fn parse_select_stmt(&mut self) -> ParseResult<SelectStatement> {
|
||||
pub(crate) async fn parse_select_stmt(
|
||||
&mut self,
|
||||
stk: &mut Stk,
|
||||
) -> ParseResult<SelectStatement> {
|
||||
let before = self.peek().span;
|
||||
let expr = self.parse_fields()?;
|
||||
let expr = self.parse_fields(stk).await?;
|
||||
let fields_span = before.covers(self.last_span());
|
||||
|
||||
let omit = self.eat(t!("OMIT")).then(|| self.parse_idiom_list()).transpose()?.map(Idioms);
|
||||
let omit = if self.eat(t!("OMIT")) {
|
||||
Some(Idioms(self.parse_idiom_list(stk).await?))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
expected!(self, t!("FROM"));
|
||||
|
||||
let only = self.eat(t!("ONLY"));
|
||||
|
||||
let mut what = vec![self.parse_value()?];
|
||||
let mut what = vec![stk.run(|ctx| self.parse_value(ctx)).await?];
|
||||
while self.eat(t!(",")) {
|
||||
what.push(self.parse_value()?);
|
||||
what.push(stk.run(|ctx| self.parse_value(ctx)).await?);
|
||||
}
|
||||
let what = Values(what);
|
||||
|
||||
let with = self.try_parse_with()?;
|
||||
let cond = self.try_parse_condition()?;
|
||||
let cond = self.try_parse_condition(stk).await?;
|
||||
let split = self.try_parse_split(&expr, fields_span)?;
|
||||
let group = self.try_parse_group(&expr, fields_span)?;
|
||||
let order = self.try_parse_orders(&expr, fields_span)?;
|
||||
let (limit, start) = if let t!("START") = self.peek_kind() {
|
||||
let start = self.try_parse_start()?;
|
||||
let limit = self.try_parse_limit()?;
|
||||
let start = self.try_parse_start(stk).await?;
|
||||
let limit = self.try_parse_limit(stk).await?;
|
||||
(limit, start)
|
||||
} else {
|
||||
let limit = self.try_parse_limit()?;
|
||||
let start = self.try_parse_start()?;
|
||||
let limit = self.try_parse_limit(stk).await?;
|
||||
let start = self.try_parse_start(stk).await?;
|
||||
(limit, start)
|
||||
};
|
||||
let fetch = self.try_parse_fetch()?;
|
||||
let fetch = self.try_parse_fetch(stk).await?;
|
||||
let version = self.try_parse_version()?;
|
||||
let timeout = self.try_parse_timeout()?;
|
||||
let parallel = self.eat(t!("PARALLEL"));
|
||||
|
@ -197,21 +206,21 @@ impl Parser<'_> {
|
|||
})
|
||||
}
|
||||
|
||||
fn try_parse_limit(&mut self) -> ParseResult<Option<Limit>> {
|
||||
async fn try_parse_limit(&mut self, ctx: &mut Stk) -> ParseResult<Option<Limit>> {
|
||||
if !self.eat(t!("LIMIT")) {
|
||||
return Ok(None);
|
||||
}
|
||||
self.eat(t!("BY"));
|
||||
let value = self.parse_value()?;
|
||||
let value = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
Ok(Some(Limit(value)))
|
||||
}
|
||||
|
||||
fn try_parse_start(&mut self) -> ParseResult<Option<Start>> {
|
||||
async fn try_parse_start(&mut self, ctx: &mut Stk) -> ParseResult<Option<Start>> {
|
||||
if !self.eat(t!("START")) {
|
||||
return Ok(None);
|
||||
}
|
||||
self.eat(t!("AT"));
|
||||
let value = self.parse_value()?;
|
||||
let value = ctx.run(|ctx| self.parse_value(ctx)).await?;
|
||||
Ok(Some(Start(value)))
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use crate::{
|
||||
sql::{statements::UpdateStatement, Values},
|
||||
syn::v2::{
|
||||
|
@ -7,12 +9,12 @@ use crate::{
|
|||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_update_stmt(&mut self) -> ParseResult<UpdateStatement> {
|
||||
pub async fn parse_update_stmt(&mut self, stk: &mut Stk) -> ParseResult<UpdateStatement> {
|
||||
let only = self.eat(t!("ONLY"));
|
||||
let what = Values(self.parse_what_list()?);
|
||||
let data = self.try_parse_data()?;
|
||||
let cond = self.try_parse_condition()?;
|
||||
let output = self.try_parse_output()?;
|
||||
let what = Values(self.parse_what_list(stk).await?);
|
||||
let data = self.try_parse_data(stk).await?;
|
||||
let cond = self.try_parse_condition(stk).await?;
|
||||
let output = self.try_parse_output(stk).await?;
|
||||
let timeout = self.try_parse_timeout()?;
|
||||
let parallel = self.eat(t!("PARALLEL"));
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ use crate::{
|
|||
syn::v2::parser::{Parser, PartialResult},
|
||||
};
|
||||
use chrono::{offset::TimeZone, NaiveDate, Offset, Utc};
|
||||
use reblessive::Stack;
|
||||
|
||||
static SOURCE: &str = r#"
|
||||
ANALYZE INDEX b on a;
|
||||
|
@ -323,8 +324,8 @@ fn statements() -> Vec<Statement> {
|
|||
cols: Idioms(vec![Idiom(vec![Part::Field(Ident("a".to_owned()))])]),
|
||||
index: Index::MTree(MTreeParams {
|
||||
dimension: 4,
|
||||
_distance: Default::default(),
|
||||
distance: Distance::Minkowski(Number::Int(5)),
|
||||
_distance: Default::default(),
|
||||
capacity: 6,
|
||||
doc_ids_order: 7,
|
||||
doc_ids_cache: 8,
|
||||
|
@ -659,6 +660,7 @@ fn test_streaming() {
|
|||
let source_bytes = SOURCE.as_bytes();
|
||||
let mut source_start = 0;
|
||||
let mut parser = Parser::new(&[]);
|
||||
let mut stack = Stack::new();
|
||||
|
||||
for i in 0..source_bytes.len() {
|
||||
let partial_source = &source_bytes[source_start..i];
|
||||
|
@ -666,7 +668,7 @@ fn test_streaming() {
|
|||
//println!("{}:{}", i, src);
|
||||
parser = parser.change_source(partial_source);
|
||||
parser.reset();
|
||||
match parser.parse_partial_statement() {
|
||||
match stack.enter(|stk| parser.parse_partial_statement(stk)).finish() {
|
||||
PartialResult::Pending {
|
||||
..
|
||||
} => {
|
||||
|
@ -696,6 +698,6 @@ fn test_streaming() {
|
|||
"failed to parse at {}\nAt statement {}\n\n{:?}",
|
||||
src,
|
||||
expected[current_stmt],
|
||||
parser.parse_partial_statement()
|
||||
stack.enter(|stk| parser.parse_partial_statement(stk)).finish()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,10 +1,81 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use reblessive::Stack;
|
||||
|
||||
use crate::{
|
||||
sql::{Array, Constant, Id, Number, Object, Strand, Thing, Value},
|
||||
syn::v2::parser::mac::test_parse,
|
||||
sql::{
|
||||
Array, Constant, Id, Number, Object, Query, Statement, Statements, Strand, Thing, Value,
|
||||
},
|
||||
syn::v2::parser::{mac::test_parse, Parser},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn parse_large_depth_object() {
|
||||
let mut text = String::new();
|
||||
let start = r#" { foo: "#;
|
||||
let middle = r#" {bar: 1} "#;
|
||||
let end = r#" } "#;
|
||||
|
||||
for _ in 0..1000 {
|
||||
text.push_str(start);
|
||||
}
|
||||
text.push_str(middle);
|
||||
for _ in 0..1000 {
|
||||
text.push_str(end);
|
||||
}
|
||||
let mut parser = Parser::new(text.as_bytes())
|
||||
.with_query_recursion_limit(100000)
|
||||
.with_object_recursion_limit(100000);
|
||||
let mut stack = Stack::new();
|
||||
let query = stack.enter(|stk| parser.parse_query(stk)).finish().unwrap();
|
||||
let Query(Statements(stmts)) = query;
|
||||
let Statement::Value(Value::Object(ref object)) = stmts[0] else {
|
||||
panic!()
|
||||
};
|
||||
let mut object = object;
|
||||
for _ in 0..999 {
|
||||
let Some(Value::Object(ref new_object)) = object.get("foo") else {
|
||||
panic!()
|
||||
};
|
||||
object = new_object
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_large_depth_record_id() {
|
||||
let mut text = String::new();
|
||||
let start = r#" r"a:[ "#;
|
||||
let middle = r#" b:{c: 1} "#;
|
||||
let end = r#" ]" "#;
|
||||
|
||||
for _ in 0..1000 {
|
||||
text.push_str(start);
|
||||
}
|
||||
text.push_str(middle);
|
||||
for _ in 0..1000 {
|
||||
text.push_str(end);
|
||||
}
|
||||
let mut parser = Parser::new(text.as_bytes())
|
||||
.with_query_recursion_limit(100000)
|
||||
.with_object_recursion_limit(100000);
|
||||
let mut stack = Stack::new();
|
||||
let query = stack.enter(|stk| parser.parse_query(stk)).finish().unwrap();
|
||||
let Query(Statements(stmts)) = query;
|
||||
let Statement::Value(Value::Thing(ref thing)) = stmts[0] else {
|
||||
panic!()
|
||||
};
|
||||
let mut thing = thing;
|
||||
for _ in 0..999 {
|
||||
let Id::Array(ref x) = thing.id else {
|
||||
panic!()
|
||||
};
|
||||
let Value::Thing(ref new_thing) = x[0] else {
|
||||
panic!()
|
||||
};
|
||||
thing = new_thing
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_recursive_record_string() {
|
||||
let res = test_parse!(parse_value, r#" r"a:[r"b:{c: r"d:1"}"]" "#).unwrap();
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stk;
|
||||
|
||||
use super::{ParseResult, Parser};
|
||||
use crate::{
|
||||
sql::{id::Gen, Id, Ident, Range, Thing, Value},
|
||||
|
@ -12,8 +14,8 @@ use crate::{
|
|||
use std::{cmp::Ordering, ops::Bound};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_record_string(&mut self, double: bool) -> ParseResult<Thing> {
|
||||
let thing = self.parse_thing()?;
|
||||
pub async fn parse_record_string(&mut self, ctx: &mut Stk, double: bool) -> ParseResult<Thing> {
|
||||
let thing = self.parse_thing(ctx).await?;
|
||||
// can't have any tokens in the buffer, since the next token must be produced by a specific
|
||||
// call.
|
||||
debug_assert_eq!(self.token_buffer.len(), 0);
|
||||
|
@ -35,7 +37,11 @@ impl Parser<'_> {
|
|||
Ok(thing)
|
||||
}
|
||||
|
||||
pub fn parse_thing_or_range(&mut self, ident: String) -> ParseResult<Value> {
|
||||
pub async fn parse_thing_or_range(
|
||||
&mut self,
|
||||
stk: &mut Stk,
|
||||
ident: String,
|
||||
) -> ParseResult<Value> {
|
||||
expected!(self, t!(":"));
|
||||
|
||||
self.peek();
|
||||
|
@ -44,12 +50,14 @@ impl Parser<'_> {
|
|||
if self.eat(t!("..")) {
|
||||
let end = if self.eat(t!("=")) {
|
||||
self.no_whitespace()?;
|
||||
Bound::Included(self.parse_id()?)
|
||||
let id = stk.run(|stk| self.parse_id(stk)).await?;
|
||||
Bound::Included(id)
|
||||
} else if self.peek_can_be_ident()
|
||||
|| matches!(self.peek_kind(), TokenKind::Number(_) | t!("{") | t!("["))
|
||||
{
|
||||
self.no_whitespace()?;
|
||||
Bound::Excluded(self.parse_id()?)
|
||||
let id = stk.run(|stk| self.parse_id(stk)).await?;
|
||||
Bound::Excluded(id)
|
||||
} else {
|
||||
Bound::Unbounded
|
||||
};
|
||||
|
@ -63,7 +71,7 @@ impl Parser<'_> {
|
|||
let beg = if self.peek_can_be_ident()
|
||||
|| matches!(self.peek_kind(), TokenKind::Number(_) | t!("{") | t!("["))
|
||||
{
|
||||
let id = self.parse_id()?;
|
||||
let id = stk.run(|ctx| self.parse_id(ctx)).await?;
|
||||
|
||||
if self.eat(t!(">")) {
|
||||
self.no_whitespace()?;
|
||||
|
@ -78,12 +86,14 @@ impl Parser<'_> {
|
|||
if self.eat(t!("..")) {
|
||||
let end = if self.eat(t!("=")) {
|
||||
self.no_whitespace()?;
|
||||
Bound::Included(self.parse_id()?)
|
||||
let id = stk.run(|ctx| self.parse_id(ctx)).await?;
|
||||
Bound::Included(id)
|
||||
} else if self.peek_can_be_ident()
|
||||
|| matches!(self.peek_kind(), TokenKind::Number(_) | t!("{") | t!("["))
|
||||
{
|
||||
self.no_whitespace()?;
|
||||
Bound::Excluded(self.parse_id()?)
|
||||
let id = stk.run(|ctx| self.parse_id(ctx)).await?;
|
||||
Bound::Excluded(id)
|
||||
} else {
|
||||
Bound::Unbounded
|
||||
};
|
||||
|
@ -122,7 +132,7 @@ impl Parser<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn parse_range(&mut self) -> ParseResult<Range> {
|
||||
pub async fn parse_range(&mut self, ctx: &mut Stk) -> ParseResult<Range> {
|
||||
let tb = self.next_token_value::<Ident>()?.0;
|
||||
|
||||
expected!(self, t!(":"));
|
||||
|
@ -134,7 +144,7 @@ impl Parser<'_> {
|
|||
self.peek();
|
||||
self.no_whitespace()?;
|
||||
|
||||
let id = self.parse_id()?;
|
||||
let id = ctx.run(|ctx| self.parse_id(ctx)).await?;
|
||||
|
||||
self.peek();
|
||||
self.no_whitespace()?;
|
||||
|
@ -162,7 +172,7 @@ impl Parser<'_> {
|
|||
self.no_whitespace()?;
|
||||
|
||||
let end = if self.peek_can_be_ident() {
|
||||
let id = self.parse_id()?;
|
||||
let id = ctx.run(|ctx| self.parse_id(ctx)).await?;
|
||||
if inclusive {
|
||||
Bound::Included(id)
|
||||
} else {
|
||||
|
@ -179,33 +189,37 @@ impl Parser<'_> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn parse_thing(&mut self) -> ParseResult<Thing> {
|
||||
pub async fn parse_thing(&mut self, ctx: &mut Stk) -> ParseResult<Thing> {
|
||||
let ident = self.next_token_value::<Ident>()?.0;
|
||||
self.parse_thing_from_ident(ident)
|
||||
self.parse_thing_from_ident(ctx, ident).await
|
||||
}
|
||||
|
||||
pub fn parse_thing_from_ident(&mut self, ident: String) -> ParseResult<Thing> {
|
||||
pub async fn parse_thing_from_ident(
|
||||
&mut self,
|
||||
ctx: &mut Stk,
|
||||
ident: String,
|
||||
) -> ParseResult<Thing> {
|
||||
expected!(self, t!(":"));
|
||||
|
||||
self.peek();
|
||||
self.no_whitespace()?;
|
||||
|
||||
let id = self.parse_id()?;
|
||||
let id = ctx.run(|ctx| self.parse_id(ctx)).await?;
|
||||
Ok(Thing {
|
||||
tb: ident,
|
||||
id,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_id(&mut self) -> ParseResult<Id> {
|
||||
pub async fn parse_id(&mut self, stk: &mut Stk) -> ParseResult<Id> {
|
||||
let token = self.next();
|
||||
match token.kind {
|
||||
t!("{") => {
|
||||
let object = self.parse_object(token.span)?;
|
||||
let object = self.parse_object(stk, token.span).await?;
|
||||
Ok(Id::Object(object))
|
||||
}
|
||||
t!("[") => {
|
||||
let array = self.parse_array(token.span)?;
|
||||
let array = self.parse_array(stk, token.span).await?;
|
||||
Ok(Id::Array(array))
|
||||
}
|
||||
t!("+") => {
|
||||
|
@ -272,6 +286,8 @@ impl Parser<'_> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use reblessive::Stack;
|
||||
|
||||
use super::*;
|
||||
use crate::sql::array::Array;
|
||||
use crate::sql::object::Object;
|
||||
|
@ -279,7 +295,8 @@ mod tests {
|
|||
|
||||
fn thing(i: &str) -> ParseResult<Thing> {
|
||||
let mut parser = Parser::new(i.as_bytes());
|
||||
parser.parse_thing()
|
||||
let mut stack = Stack::new();
|
||||
stack.enter(|ctx| async move { parser.parse_thing(ctx).await }).finish()
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use reblessive::Stack;
|
||||
|
||||
use super::super::Parse;
|
||||
use super::lexer::Lexer;
|
||||
use super::parser::Parser;
|
||||
|
@ -13,9 +15,10 @@ impl Parse<Self> for Value {
|
|||
impl Parse<Self> for Array {
|
||||
fn parse(val: &str) -> Self {
|
||||
let mut parser = Parser::new(val.as_bytes());
|
||||
let mut stack = Stack::new();
|
||||
let start = parser.peek().span;
|
||||
assert!(parser.eat(t!("[")));
|
||||
parser.parse_array(start).unwrap()
|
||||
stack.enter(|ctx| async move { parser.parse_array(ctx, start).await }).finish().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -49,7 +52,8 @@ impl Parse<Self> for Thing {
|
|||
impl Parse<Self> for Expression {
|
||||
fn parse(val: &str) -> Self {
|
||||
let mut parser = Parser::new(val.as_bytes());
|
||||
let value = parser.parse_value_field().unwrap();
|
||||
let mut stack = Stack::new();
|
||||
let value = stack.enter(|ctx| parser.parse_value_field(ctx)).finish().unwrap();
|
||||
if let Value::Expression(x) = value {
|
||||
return *x;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use rocket::http::Status;
|
||||
use rocket::response::{self, Responder, Response};
|
||||
use rocket::Request;
|
||||
use serde_json::json;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
|
|
|
@ -72,8 +72,8 @@ impl Config {
|
|||
/// Set the default user
|
||||
pub fn user(mut self, user: crate::opt::auth::Root<'_>) -> Self {
|
||||
self.auth = Level::Root;
|
||||
self.username = user.username.to_owned();
|
||||
self.password = user.password.to_owned();
|
||||
user.username.clone_into(&mut self.username);
|
||||
user.password.clone_into(&mut self.password);
|
||||
self
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ impl IntoEndpoint<Mem> for () {
|
|||
let url = Url::parse(protocol)
|
||||
.unwrap_or_else(|_| unreachable!("`{protocol}` should be static and valid"));
|
||||
let mut endpoint = Endpoint::new(url);
|
||||
endpoint.path = "memory".to_owned();
|
||||
"memory".clone_into(&mut endpoint.path);
|
||||
Ok(endpoint)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -113,7 +113,7 @@ async fn database_change_feeds() -> Result<(), Error> {
|
|||
dbs: &Datastore,
|
||||
sql2: &str,
|
||||
ses: &Session,
|
||||
cf_val_arr: &Vec<Value>,
|
||||
cf_val_arr: &[Value],
|
||||
) -> Result<(), String> {
|
||||
let res = &mut dbs.execute(sql2, ses, None).await?;
|
||||
assert_eq!(res.len(), 3);
|
||||
|
@ -722,7 +722,7 @@ async fn changefeed_with_original() -> Result<(), Error> {
|
|||
assert_eq!(array.len(), 2);
|
||||
|
||||
assert_eq!(
|
||||
array.get(0).unwrap(),
|
||||
array.first().unwrap(),
|
||||
&surrealdb::sql::value(
|
||||
r#"{
|
||||
"changes": [{
|
||||
|
|
|
@ -205,7 +205,7 @@ async fn select_where_matches_partial_highlight() -> Result<(), Error> {
|
|||
";
|
||||
let dbs = new_ds().await?;
|
||||
let ses = Session::owner().with_ns("test").with_db("test");
|
||||
let res = &mut dbs.execute(&sql, &ses, None).await?;
|
||||
let res = &mut dbs.execute(sql, &ses, None).await?;
|
||||
assert_eq!(res.len(), 9);
|
||||
//
|
||||
for _ in 0..3 {
|
||||
|
@ -292,7 +292,7 @@ async fn select_where_matches_partial_highlight_ngram() -> Result<(), Error> {
|
|||
";
|
||||
let dbs = new_ds().await?;
|
||||
let ses = Session::owner().with_ns("test").with_db("test");
|
||||
let res = &mut dbs.execute(&sql, &ses, None).await?;
|
||||
let res = &mut dbs.execute(sql, &ses, None).await?;
|
||||
assert_eq!(res.len(), 10);
|
||||
//
|
||||
for _ in 0..3 {
|
||||
|
|
|
@ -1419,7 +1419,7 @@ async fn select_with_in_operator() -> Result<(), Error> {
|
|||
SELECT * FROM user WHERE email IN ['a@b', 'e@f'];
|
||||
SELECT * FROM user WHERE email INSIDE ['a@b', 'e@f'];
|
||||
";
|
||||
let mut res = dbs.execute(&sql, &ses, None).await?;
|
||||
let mut res = dbs.execute(sql, &ses, None).await?;
|
||||
|
||||
assert_eq!(res.len(), 7);
|
||||
skip_ok(&mut res, 3)?;
|
||||
|
@ -1480,7 +1480,7 @@ async fn select_with_in_operator_uniq_index() -> Result<(), Error> {
|
|||
SELECT apprenantUid FROM apprenants WHERE apprenantUid IN ["99999999-aaaa-1111-8888-abcdef012345", "00013483-fedd-43e3-a94e-80728d896f6e", "99999999-aaaa-1111-8888-abcdef012345"];
|
||||
SELECT apprenantUid FROM apprenants WHERE apprenantUid IN ["00013483-fedd-43e3-a94e-80728d896f6e"] EXPLAIN;
|
||||
"#;
|
||||
let mut res = dbs.execute(&sql, &ses, None).await?;
|
||||
let mut res = dbs.execute(sql, &ses, None).await?;
|
||||
|
||||
assert_eq!(res.len(), 8);
|
||||
skip_ok(&mut res, 2)?;
|
||||
|
|
|
@ -1735,6 +1735,10 @@ criteria = "safe-to-deploy"
|
|||
version = "0.21.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.reblessive]]
|
||||
version = "0.3.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.rquickjs]]
|
||||
version = "0.5.1"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
Loading…
Reference in a new issue