Fix warnings (#3634)
This commit is contained in:
parent
1784a1202e
commit
87da9c0b01
65 changed files with 171 additions and 212 deletions
|
@ -160,49 +160,49 @@ impl Options {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Specify wether tables/events should re-run
|
||||||
pub fn with_force(mut self, force: bool) -> Self {
|
pub fn with_force(mut self, force: bool) -> Self {
|
||||||
self.force = force;
|
self.force = force;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Sepecify if we should error when a table does not exist
|
||||||
pub fn with_strict(mut self, strict: bool) -> Self {
|
pub fn with_strict(mut self, strict: bool) -> Self {
|
||||||
self.strict = strict;
|
self.strict = strict;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Specify if we should process fields
|
||||||
pub fn with_fields(mut self, fields: bool) -> Self {
|
pub fn with_fields(mut self, fields: bool) -> Self {
|
||||||
self.fields = fields;
|
self.fields = fields;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Specify if we should process event queries
|
||||||
pub fn with_events(mut self, events: bool) -> Self {
|
pub fn with_events(mut self, events: bool) -> Self {
|
||||||
self.events = events;
|
self.events = events;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Specify if we should process table queries
|
||||||
pub fn with_tables(mut self, tables: bool) -> Self {
|
pub fn with_tables(mut self, tables: bool) -> Self {
|
||||||
self.tables = tables;
|
self.tables = tables;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Specify if we should process index queries
|
||||||
pub fn with_indexes(mut self, indexes: bool) -> Self {
|
pub fn with_indexes(mut self, indexes: bool) -> Self {
|
||||||
self.indexes = indexes;
|
self.indexes = indexes;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Specify if we should process futures
|
||||||
pub fn with_futures(mut self, futures: bool) -> Self {
|
pub fn with_futures(mut self, futures: bool) -> Self {
|
||||||
self.futures = futures;
|
self.futures = futures;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
/// Specify if we should process field projections
|
||||||
pub fn with_projections(mut self, projections: bool) -> Self {
|
pub fn with_projections(mut self, projections: bool) -> Self {
|
||||||
self.projections = projections;
|
self.projections = projections;
|
||||||
self
|
self
|
||||||
|
|
|
@ -116,7 +116,6 @@ pub mod bcrypt {
|
||||||
use crate::err::Error;
|
use crate::err::Error;
|
||||||
use crate::fnc::crypto::COST_ALLOWANCE;
|
use crate::fnc::crypto::COST_ALLOWANCE;
|
||||||
use crate::sql::value::Value;
|
use crate::sql::value::Value;
|
||||||
use bcrypt;
|
|
||||||
use bcrypt::HashParts;
|
use bcrypt::HashParts;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::string::String as StdString;
|
||||||
use js::{
|
use js::{
|
||||||
class::{Trace, Tracer},
|
class::{Trace, Tracer},
|
||||||
prelude::*,
|
prelude::*,
|
||||||
Class, Ctx, Exception, FromJs, Object, Result, Value,
|
Class, Ctx, Exception, Object, Result, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::fnc::script::fetch::{classes::Headers, util};
|
use crate::fnc::script::fetch::{classes::Headers, util};
|
||||||
|
|
|
@ -586,7 +586,7 @@ pub async fn verify_creds_legacy(
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{iam::token::Claims, iam::token::HEADER, iam::verify::token, kvs::Datastore};
|
use crate::iam::token::HEADER;
|
||||||
use argon2::password_hash::{PasswordHasher, SaltString};
|
use argon2::password_hash::{PasswordHasher, SaltString};
|
||||||
use chrono::Duration;
|
use chrono::Duration;
|
||||||
use jsonwebtoken::{encode, EncodingKey};
|
use jsonwebtoken::{encode, EncodingKey};
|
||||||
|
@ -1185,7 +1185,7 @@ mod tests {
|
||||||
// Test with custom user numeric identifiers of varying sizes
|
// Test with custom user numeric identifiers of varying sizes
|
||||||
//
|
//
|
||||||
{
|
{
|
||||||
let ids = vec!["1", "2", "100", "10000000"];
|
let ids = ["1", "2", "100", "10000000"];
|
||||||
for id in ids.iter() {
|
for id in ids.iter() {
|
||||||
let resource_id = format!("user:{id}");
|
let resource_id = format!("user:{id}");
|
||||||
// Prepare the claims object
|
// Prepare the claims object
|
||||||
|
@ -1212,7 +1212,7 @@ mod tests {
|
||||||
// Test with custom user string identifiers of varying lengths
|
// Test with custom user string identifiers of varying lengths
|
||||||
//
|
//
|
||||||
{
|
{
|
||||||
let ids = vec!["username", "username1", "username10", "username100"];
|
let ids = ["username", "username1", "username10", "username100"];
|
||||||
for id in ids.iter() {
|
for id in ids.iter() {
|
||||||
let resource_id = format!("user:{id}");
|
let resource_id = format!("user:{id}");
|
||||||
// Prepare the claims object
|
// Prepare the claims object
|
||||||
|
@ -1239,7 +1239,7 @@ mod tests {
|
||||||
// Test with custom user string identifiers of varying lengths with special characters
|
// Test with custom user string identifiers of varying lengths with special characters
|
||||||
//
|
//
|
||||||
{
|
{
|
||||||
let ids = vec!["user.name", "user.name1", "user.name10", "user.name100"];
|
let ids = ["user.name", "user.name1", "user.name10", "user.name100"];
|
||||||
for id in ids.iter() {
|
for id in ids.iter() {
|
||||||
// Enclose special characters in "⟨brackets⟩"
|
// Enclose special characters in "⟨brackets⟩"
|
||||||
let resource_id = format!("user:⟨{id}⟩");
|
let resource_id = format!("user:⟨{id}⟩");
|
||||||
|
@ -1372,7 +1372,7 @@ mod tests {
|
||||||
let string_claim = tk.get("string_claim").unwrap();
|
let string_claim = tk.get("string_claim").unwrap();
|
||||||
assert_eq!(*string_claim, Value::Strand("test".into()));
|
assert_eq!(*string_claim, Value::Strand("test".into()));
|
||||||
let bool_claim = tk.get("bool_claim").unwrap();
|
let bool_claim = tk.get("bool_claim").unwrap();
|
||||||
assert_eq!(*bool_claim, Value::Bool(true.into()));
|
assert_eq!(*bool_claim, Value::Bool(true));
|
||||||
let int_claim = tk.get("int_claim").unwrap();
|
let int_claim = tk.get("int_claim").unwrap();
|
||||||
assert_eq!(*int_claim, Value::Number(123456.into()));
|
assert_eq!(*int_claim, Value::Number(123456.into()));
|
||||||
let float_claim = tk.get("float_claim").unwrap();
|
let float_claim = tk.get("float_claim").unwrap();
|
||||||
|
|
|
@ -862,9 +862,9 @@ mod tests {
|
||||||
content: &Value,
|
content: &Value,
|
||||||
) {
|
) {
|
||||||
let (ctx, opt, txn, mut fti) =
|
let (ctx, opt, txn, mut fti) =
|
||||||
tx_fti(ds, TransactionType::Write, &az, btree_order, false).await;
|
tx_fti(ds, TransactionType::Write, az, btree_order, false).await;
|
||||||
fti.remove_document(&txn, &rid).await.unwrap();
|
fti.remove_document(&txn, rid).await.unwrap();
|
||||||
fti.index_document(&ctx, &opt, &txn, &rid, vec![content.clone()]).await.unwrap();
|
fti.index_document(&ctx, &opt, &txn, rid, vec![content.clone()]).await.unwrap();
|
||||||
finish(&txn, fti).await;
|
finish(&txn, fti).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1047,7 +1047,7 @@ mod tests {
|
||||||
|
|
||||||
async fn check_insertions<F, BK>(
|
async fn check_insertions<F, BK>(
|
||||||
mut tx: Transaction,
|
mut tx: Transaction,
|
||||||
mut st: BTreeStore<BK>,
|
st: BTreeStore<BK>,
|
||||||
t: &mut BTree<BK>,
|
t: &mut BTree<BK>,
|
||||||
samples_size: usize,
|
samples_size: usize,
|
||||||
sample_provider: F,
|
sample_provider: F,
|
||||||
|
@ -1057,7 +1057,7 @@ mod tests {
|
||||||
{
|
{
|
||||||
for i in 0..samples_size {
|
for i in 0..samples_size {
|
||||||
let (key, payload) = sample_provider(i);
|
let (key, payload) = sample_provider(i);
|
||||||
assert_eq!(t.search(&mut tx, &mut st, &key).await.unwrap(), Some(payload));
|
assert_eq!(t.search(&mut tx, &st, &key).await.unwrap(), Some(payload));
|
||||||
}
|
}
|
||||||
tx.cancel().await.unwrap();
|
tx.cancel().await.unwrap();
|
||||||
}
|
}
|
||||||
|
@ -1117,9 +1117,9 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
t.statistics(&mut tx, &mut st).await.unwrap(),
|
t.statistics(&mut tx, &st).await.unwrap(),
|
||||||
BStatistics {
|
BStatistics {
|
||||||
keys_count: 100,
|
keys_count: 100,
|
||||||
max_depth: 3,
|
max_depth: 3,
|
||||||
|
@ -1147,9 +1147,9 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
t.statistics(&mut tx, &mut st).await.unwrap(),
|
t.statistics(&mut tx, &st).await.unwrap(),
|
||||||
BStatistics {
|
BStatistics {
|
||||||
keys_count: 100,
|
keys_count: 100,
|
||||||
max_depth: 3,
|
max_depth: 3,
|
||||||
|
@ -1181,8 +1181,8 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
||||||
let s = t.statistics(&mut tx, &mut st).await.unwrap();
|
let s = t.statistics(&mut tx, &st).await.unwrap();
|
||||||
assert_eq!(s.keys_count, 100);
|
assert_eq!(s.keys_count, 100);
|
||||||
tx.cancel().await.unwrap();
|
tx.cancel().await.unwrap();
|
||||||
}
|
}
|
||||||
|
@ -1208,8 +1208,8 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
let s = t.statistics(&mut tx, &mut st).await.unwrap();
|
let s = t.statistics(&mut tx, &st).await.unwrap();
|
||||||
assert_eq!(s.keys_count, 100);
|
assert_eq!(s.keys_count, 100);
|
||||||
tx.cancel().await.unwrap();
|
tx.cancel().await.unwrap();
|
||||||
}
|
}
|
||||||
|
@ -1231,9 +1231,9 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
t.statistics(&mut tx, &mut st).await.unwrap(),
|
t.statistics(&mut tx, &st).await.unwrap(),
|
||||||
BStatistics {
|
BStatistics {
|
||||||
keys_count: 10000,
|
keys_count: 10000,
|
||||||
max_depth: 3,
|
max_depth: 3,
|
||||||
|
@ -1260,10 +1260,9 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) =
|
let (mut tx, st) = new_operation_trie(&ds, &t, TransactionType::Read, cache_size).await;
|
||||||
new_operation_trie(&ds, &t, TransactionType::Read, cache_size).await;
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
t.statistics(&mut tx, &mut st).await.unwrap(),
|
t.statistics(&mut tx, &st).await.unwrap(),
|
||||||
BStatistics {
|
BStatistics {
|
||||||
keys_count: 10000,
|
keys_count: 10000,
|
||||||
max_depth: 3,
|
max_depth: 3,
|
||||||
|
@ -1303,8 +1302,8 @@ mod tests {
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut tx, mut st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_fst(&ds, &t, TransactionType::Read, 20).await;
|
||||||
let statistics = t.statistics(&mut tx, &mut st).await.unwrap();
|
let statistics = t.statistics(&mut tx, &st).await.unwrap();
|
||||||
tx.cancel().await.unwrap();
|
tx.cancel().await.unwrap();
|
||||||
statistics
|
statistics
|
||||||
}
|
}
|
||||||
|
@ -1321,8 +1320,8 @@ mod tests {
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
let statistics = t.statistics(&mut tx, &mut st).await.unwrap();
|
let statistics = t.statistics(&mut tx, &st).await.unwrap();
|
||||||
tx.cancel().await.unwrap();
|
tx.cancel().await.unwrap();
|
||||||
|
|
||||||
statistics
|
statistics
|
||||||
|
@ -1421,7 +1420,7 @@ mod tests {
|
||||||
|
|
||||||
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
|
|
||||||
let s = t.statistics(&mut tx, &mut st).await.unwrap();
|
let s = t.statistics(&mut tx, &st).await.unwrap();
|
||||||
assert_eq!(s.keys_count, 23);
|
assert_eq!(s.keys_count, 23);
|
||||||
assert_eq!(s.max_depth, 3);
|
assert_eq!(s.max_depth, 3);
|
||||||
assert_eq!(s.nodes_count, 10);
|
assert_eq!(s.nodes_count, 10);
|
||||||
|
@ -1530,7 +1529,7 @@ mod tests {
|
||||||
st,
|
st,
|
||||||
tx,
|
tx,
|
||||||
check_generation,
|
check_generation,
|
||||||
format!("Insert CLRS example"),
|
"Insert CLRS example".to_string(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
@ -1554,9 +1553,8 @@ mod tests {
|
||||||
}
|
}
|
||||||
key_count -= 1;
|
key_count -= 1;
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) =
|
let (mut tx, st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let s = t.statistics(&mut tx, &st).await?;
|
||||||
let s = t.statistics(&mut tx, &mut st).await?;
|
|
||||||
assert_eq!(s.keys_count, key_count);
|
assert_eq!(s.keys_count, key_count);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1564,7 +1562,7 @@ mod tests {
|
||||||
|
|
||||||
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
|
|
||||||
let s = t.statistics(&mut tx, &mut st).await.unwrap();
|
let s = t.statistics(&mut tx, &st).await.unwrap();
|
||||||
assert_eq!(s.keys_count, 18);
|
assert_eq!(s.keys_count, 18);
|
||||||
assert_eq!(s.max_depth, 2);
|
assert_eq!(s.max_depth, 2);
|
||||||
assert_eq!(s.nodes_count, 7);
|
assert_eq!(s.nodes_count, 7);
|
||||||
|
@ -1646,7 +1644,7 @@ mod tests {
|
||||||
st,
|
st,
|
||||||
tx,
|
tx,
|
||||||
check_generation,
|
check_generation,
|
||||||
format!("Insert CLRS example"),
|
"Insert CLRS example".to_string(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
@ -1663,7 +1661,7 @@ mod tests {
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) =
|
let (mut tx, mut st) =
|
||||||
new_operation_trie(&ds, &t, TransactionType::Write, 20).await;
|
new_operation_trie(&ds, &t, TransactionType::Write, 20).await;
|
||||||
assert!(t.delete(&mut tx, &mut &mut st, key.into()).await?.is_some());
|
assert!(t.delete(&mut tx, &mut st, key.into()).await?.is_some());
|
||||||
expected_keys.remove(key);
|
expected_keys.remove(key);
|
||||||
let (_, tree_keys) = check_btree_properties(&t, &mut tx, &mut st).await?;
|
let (_, tree_keys) = check_btree_properties(&t, &mut tx, &mut st).await?;
|
||||||
assert_eq!(expected_keys, tree_keys);
|
assert_eq!(expected_keys, tree_keys);
|
||||||
|
@ -1679,10 +1677,10 @@ mod tests {
|
||||||
|
|
||||||
// Check that every expected keys are still found in the tree
|
// Check that every expected keys are still found in the tree
|
||||||
{
|
{
|
||||||
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
for (key, payload) in &expected_keys {
|
for (key, payload) in &expected_keys {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
t.search(&mut tx, &mut st, &key.as_str().into()).await?,
|
t.search(&mut tx, &st, &key.as_str().into()).await?,
|
||||||
Some(*payload),
|
Some(*payload),
|
||||||
"Can't find: {key}",
|
"Can't find: {key}",
|
||||||
)
|
)
|
||||||
|
@ -1691,8 +1689,8 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut tx, mut st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
let (mut tx, st) = new_operation_trie(&ds, &t, TransactionType::Read, 20).await;
|
||||||
let s = t.statistics(&mut tx, &mut st).await?;
|
let s = t.statistics(&mut tx, &st).await?;
|
||||||
assert_eq!(s.keys_count, 0);
|
assert_eq!(s.keys_count, 0);
|
||||||
assert_eq!(s.max_depth, 0);
|
assert_eq!(s.max_depth, 0);
|
||||||
assert_eq!(s.nodes_count, 0);
|
assert_eq!(s.nodes_count, 0);
|
||||||
|
@ -2007,9 +2005,8 @@ mod tests {
|
||||||
let stored_node = st.get_node_mut(tx, node_id).await?;
|
let stored_node = st.get_node_mut(tx, node_id).await?;
|
||||||
if let BTreeNode::Internal(keys, children) = &stored_node.n {
|
if let BTreeNode::Internal(keys, children) = &stored_node.n {
|
||||||
let depth = depth + 1;
|
let depth = depth + 1;
|
||||||
let mut idx = 0;
|
|
||||||
let mut child_right_key = None;
|
let mut child_right_key = None;
|
||||||
for child_id in children {
|
for (idx, child_id) in children.iter().enumerate() {
|
||||||
let child_left_key = child_right_key;
|
let child_left_key = child_right_key;
|
||||||
child_right_key = keys.get_key(idx);
|
child_right_key = keys.get_key(idx);
|
||||||
if let Some(crk) = &child_left_key {
|
if let Some(crk) = &child_left_key {
|
||||||
|
@ -2042,7 +2039,6 @@ mod tests {
|
||||||
child_left_key.clone(),
|
child_left_key.clone(),
|
||||||
child_right_key.clone(),
|
child_right_key.clone(),
|
||||||
));
|
));
|
||||||
idx += 1;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
inspect_func(count, depth, node_id, &stored_node);
|
inspect_func(count, depth, node_id, &stored_node);
|
||||||
|
|
|
@ -1683,8 +1683,8 @@ mod tests {
|
||||||
let vec1 = new_vec(1, VectorType::F64, 1);
|
let vec1 = new_vec(1, VectorType::F64, 1);
|
||||||
// First the index is empty
|
// First the index is empty
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec1, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec1, 10).await?;
|
||||||
check_knn(&res.docs, vec![]);
|
check_knn(&res.docs, vec![]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 0);
|
assert_eq!(res.visited_nodes.len(), 0);
|
||||||
|
@ -1692,9 +1692,9 @@ mod tests {
|
||||||
// Insert single element
|
// Insert single element
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec1.clone(), 1).await?;
|
t.insert(&mut tx, &mut st, vec1.clone(), 1).await?;
|
||||||
assert_eq!(t.state.root, Some(0));
|
assert_eq!(t.state.root, Some(0));
|
||||||
check_leaf_write(&mut tx, &mut &mut st, 0, |m| {
|
check_leaf_write(&mut tx, &mut st, 0, |m| {
|
||||||
assert_eq!(m.len(), 1);
|
assert_eq!(m.len(), 1);
|
||||||
check_leaf_vec(m, &vec1, 0.0, &[1]);
|
check_leaf_vec(m, &vec1, 0.0, &[1]);
|
||||||
})
|
})
|
||||||
|
@ -1704,7 +1704,7 @@ mod tests {
|
||||||
// Check KNN
|
// Check KNN
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec1, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec1, 10).await?;
|
||||||
check_knn(&res.docs, vec![1]);
|
check_knn(&res.docs, vec![1]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 1);
|
assert_eq!(res.visited_nodes.len(), 1);
|
||||||
|
@ -1715,13 +1715,13 @@ mod tests {
|
||||||
let vec2 = new_vec(2, VectorType::F64, 1);
|
let vec2 = new_vec(2, VectorType::F64, 1);
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec2.clone(), 2).await?;
|
t.insert(&mut tx, &mut st, vec2.clone(), 2).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
// vec1 knn
|
// vec1 knn
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec1, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec1, 10).await?;
|
||||||
check_knn(&res.docs, vec![1, 2]);
|
check_knn(&res.docs, vec![1, 2]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 1);
|
assert_eq!(res.visited_nodes.len(), 1);
|
||||||
|
@ -1736,8 +1736,8 @@ mod tests {
|
||||||
}
|
}
|
||||||
// vec2 knn
|
// vec2 knn
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec2, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec2, 10).await?;
|
||||||
check_knn(&res.docs, vec![2, 1]);
|
check_knn(&res.docs, vec![2, 1]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 1);
|
assert_eq!(res.visited_nodes.len(), 1);
|
||||||
|
@ -1746,13 +1746,13 @@ mod tests {
|
||||||
// insert new doc to existing vector
|
// insert new doc to existing vector
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec2.clone(), 3).await?;
|
t.insert(&mut tx, &mut st, vec2.clone(), 3).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
// vec2 knn
|
// vec2 knn
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec2, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec2, 10).await?;
|
||||||
check_knn(&res.docs, vec![2, 3, 1]);
|
check_knn(&res.docs, vec![2, 3, 1]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 1);
|
assert_eq!(res.visited_nodes.len(), 1);
|
||||||
|
@ -1770,13 +1770,13 @@ mod tests {
|
||||||
let vec3 = new_vec(3, VectorType::F64, 1);
|
let vec3 = new_vec(3, VectorType::F64, 1);
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec3.clone(), 3).await?;
|
t.insert(&mut tx, &mut st, vec3.clone(), 3).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
// vec3 knn
|
// vec3 knn
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec3, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec3, 10).await?;
|
||||||
check_knn(&res.docs, vec![3, 2, 3, 1]);
|
check_knn(&res.docs, vec![3, 2, 3, 1]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 1);
|
assert_eq!(res.visited_nodes.len(), 1);
|
||||||
|
@ -1795,13 +1795,13 @@ mod tests {
|
||||||
let vec4 = new_vec(4, VectorType::F64, 1);
|
let vec4 = new_vec(4, VectorType::F64, 1);
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec4.clone(), 4).await?;
|
t.insert(&mut tx, &mut st, vec4.clone(), 4).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
// vec4 knn
|
// vec4 knn
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec4, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec4, 10).await?;
|
||||||
check_knn(&res.docs, vec![4, 3, 2, 3, 1]);
|
check_knn(&res.docs, vec![4, 3, 2, 3, 1]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 3);
|
assert_eq!(res.visited_nodes.len(), 3);
|
||||||
|
@ -1831,13 +1831,13 @@ mod tests {
|
||||||
let vec6 = new_vec(6, VectorType::F64, 1);
|
let vec6 = new_vec(6, VectorType::F64, 1);
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec6.clone(), 6).await?;
|
t.insert(&mut tx, &mut st, vec6.clone(), 6).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
// vec6 knn
|
// vec6 knn
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec6, 10).await?;
|
let res = t.knn_search(&mut tx, &st, &vec6, 10).await?;
|
||||||
check_knn(&res.docs, vec![6, 4, 3, 2, 3, 1]);
|
check_knn(&res.docs, vec![6, 4, 3, 2, 3, 1]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 3);
|
assert_eq!(res.visited_nodes.len(), 3);
|
||||||
|
@ -1870,7 +1870,7 @@ mod tests {
|
||||||
let vec8 = new_vec(8, VectorType::F64, 1);
|
let vec8 = new_vec(8, VectorType::F64, 1);
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec8.clone(), 8).await?;
|
t.insert(&mut tx, &mut st, vec8.clone(), 8).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
@ -1909,7 +1909,7 @@ mod tests {
|
||||||
let vec9 = new_vec(9, VectorType::F64, 1);
|
let vec9 = new_vec(9, VectorType::F64, 1);
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec9.clone(), 9).await?;
|
t.insert(&mut tx, &mut st, vec9.clone(), 9).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
@ -1949,7 +1949,7 @@ mod tests {
|
||||||
let vec10 = new_vec(10, VectorType::F64, 1);
|
let vec10 = new_vec(10, VectorType::F64, 1);
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Write, CACHE_SIZE).await;
|
||||||
t.insert(&mut tx, &mut &mut st, vec10.clone(), 10).await?;
|
t.insert(&mut tx, &mut st, vec10.clone(), 10).await?;
|
||||||
finish_operation(&mut t, tx, st, true).await?;
|
finish_operation(&mut t, tx, st, true).await?;
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
@ -2005,16 +2005,16 @@ mod tests {
|
||||||
|
|
||||||
// vec8 knn
|
// vec8 knn
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec8, 20).await?;
|
let res = t.knn_search(&mut tx, &st, &vec8, 20).await?;
|
||||||
check_knn(&res.docs, vec![8, 9, 6, 10, 4, 3, 2, 3, 1]);
|
check_knn(&res.docs, vec![8, 9, 6, 10, 4, 3, 2, 3, 1]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 7);
|
assert_eq!(res.visited_nodes.len(), 7);
|
||||||
}
|
}
|
||||||
// vec4 knn(2)
|
// vec4 knn(2)
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec4, 2).await?;
|
let res = t.knn_search(&mut tx, &st, &vec4, 2).await?;
|
||||||
check_knn(&res.docs, vec![4, 3]);
|
check_knn(&res.docs, vec![4, 3]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 6);
|
assert_eq!(res.visited_nodes.len(), 6);
|
||||||
|
@ -2022,8 +2022,8 @@ mod tests {
|
||||||
|
|
||||||
// vec10 knn(2)
|
// vec10 knn(2)
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
let (st, mut tx) = new_operation(&ds, &t, TransactionType::Read, CACHE_SIZE).await;
|
||||||
let res = t.knn_search(&mut tx, &mut st, &vec10, 2).await?;
|
let res = t.knn_search(&mut tx, &st, &vec10, 2).await?;
|
||||||
check_knn(&res.docs, vec![10, 9]);
|
check_knn(&res.docs, vec![10, 9]);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
assert_eq!(res.visited_nodes.len(), 5);
|
assert_eq!(res.visited_nodes.len(), 5);
|
||||||
|
@ -2043,7 +2043,7 @@ mod tests {
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) =
|
let (mut st, mut tx) =
|
||||||
new_operation(ds, t, TransactionType::Write, cache_size).await;
|
new_operation(ds, t, TransactionType::Write, cache_size).await;
|
||||||
t.insert(&mut tx, &mut &mut st, obj.clone(), *doc_id).await?;
|
t.insert(&mut tx, &mut st, obj.clone(), *doc_id).await?;
|
||||||
finish_operation(t, tx, st, true).await?;
|
finish_operation(t, tx, st, true).await?;
|
||||||
map.insert(*doc_id, obj.clone());
|
map.insert(*doc_id, obj.clone());
|
||||||
}
|
}
|
||||||
|
@ -2068,7 +2068,7 @@ mod tests {
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) = new_operation(ds, t, TransactionType::Write, cache_size).await;
|
let (mut st, mut tx) = new_operation(ds, t, TransactionType::Write, cache_size).await;
|
||||||
for (doc_id, obj) in collection.as_ref() {
|
for (doc_id, obj) in collection.as_ref() {
|
||||||
t.insert(&mut tx, &mut &mut st, obj.clone(), *doc_id).await?;
|
t.insert(&mut tx, &mut st, obj.clone(), *doc_id).await?;
|
||||||
map.insert(*doc_id, obj.clone());
|
map.insert(*doc_id, obj.clone());
|
||||||
}
|
}
|
||||||
finish_operation(t, tx, st, true).await?;
|
finish_operation(t, tx, st, true).await?;
|
||||||
|
@ -2091,16 +2091,15 @@ mod tests {
|
||||||
let deleted = {
|
let deleted = {
|
||||||
debug!("### Remove {} {:?}", doc_id, obj);
|
debug!("### Remove {} {:?}", doc_id, obj);
|
||||||
let (mut st, mut tx) =
|
let (mut st, mut tx) =
|
||||||
new_operation(&ds, t, TransactionType::Write, cache_size).await;
|
new_operation(ds, t, TransactionType::Write, cache_size).await;
|
||||||
let deleted = t.delete(&mut tx, &mut &mut st, obj.clone(), *doc_id).await?;
|
let deleted = t.delete(&mut tx, &mut st, obj.clone(), *doc_id).await?;
|
||||||
finish_operation(t, tx, st, true).await?;
|
finish_operation(t, tx, st, true).await?;
|
||||||
deleted
|
deleted
|
||||||
};
|
};
|
||||||
all_deleted = all_deleted && deleted;
|
all_deleted = all_deleted && deleted;
|
||||||
if deleted {
|
if deleted {
|
||||||
let (mut st, mut tx) =
|
let (st, mut tx) = new_operation(ds, t, TransactionType::Read, cache_size).await;
|
||||||
new_operation(&ds, t, TransactionType::Read, cache_size).await;
|
let res = t.knn_search(&mut tx, &st, obj, 1).await?;
|
||||||
let res = t.knn_search(&mut tx, &mut st, obj, 1).await?;
|
|
||||||
assert!(!res.docs.contains(doc_id), "Found: {} {:?}", doc_id, obj);
|
assert!(!res.docs.contains(doc_id), "Found: {} {:?}", doc_id, obj);
|
||||||
} else {
|
} else {
|
||||||
// In v1.2.x deletion is experimental. Will be fixed in 1.3
|
// In v1.2.x deletion is experimental. Will be fixed in 1.3
|
||||||
|
@ -2108,7 +2107,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let (mut st, mut tx) =
|
let (mut st, mut tx) =
|
||||||
new_operation(&ds, t, TransactionType::Read, cache_size).await;
|
new_operation(ds, t, TransactionType::Read, cache_size).await;
|
||||||
check_tree_properties(&mut tx, &mut st, t).await?;
|
check_tree_properties(&mut tx, &mut st, t).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2130,7 +2129,7 @@ mod tests {
|
||||||
let max_knn = 20.max(collection.as_ref().len());
|
let max_knn = 20.max(collection.as_ref().len());
|
||||||
for (doc_id, obj) in collection.as_ref() {
|
for (doc_id, obj) in collection.as_ref() {
|
||||||
for knn in 1..max_knn {
|
for knn in 1..max_knn {
|
||||||
let res = t.knn_search(&mut tx, &mut st, obj, knn).await?;
|
let res = t.knn_search(&mut tx, &st, obj, knn).await?;
|
||||||
if collection.is_unique() {
|
if collection.is_unique() {
|
||||||
assert!(
|
assert!(
|
||||||
res.docs.contains(doc_id),
|
res.docs.contains(doc_id),
|
||||||
|
@ -2165,9 +2164,9 @@ mod tests {
|
||||||
map: &HashMap<DocId, SharedVector>,
|
map: &HashMap<DocId, SharedVector>,
|
||||||
cache_size: usize,
|
cache_size: usize,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let (mut st, mut tx) = new_operation(ds, t, TransactionType::Read, cache_size).await;
|
let (st, mut tx) = new_operation(ds, t, TransactionType::Read, cache_size).await;
|
||||||
for obj in map.values() {
|
for obj in map.values() {
|
||||||
let res = t.knn_search(&mut tx, &mut st, obj, map.len()).await?;
|
let res = t.knn_search(&mut tx, &st, obj, map.len()).await?;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
map.len(),
|
map.len(),
|
||||||
res.docs.len(),
|
res.docs.len(),
|
||||||
|
@ -2657,7 +2656,7 @@ mod tests {
|
||||||
panic!("Leaf object already exists: {:?}", o);
|
panic!("Leaf object already exists: {:?}", o);
|
||||||
}
|
}
|
||||||
if let Some(center) = center.as_ref() {
|
if let Some(center) = center.as_ref() {
|
||||||
let pd = t.calculate_distance(center, &o)?;
|
let pd = t.calculate_distance(center, o)?;
|
||||||
debug!("calc_dist: {:?} {:?} = {}", center, &o, pd);
|
debug!("calc_dist: {:?} {:?} = {}", center, &o, pd);
|
||||||
assert_eq!(pd, p.parent_dist, "Invalid parent distance ({}): {} - Expected: {} - Node Id: {} - Obj: {:?} - Center: {:?}", p.parent_dist, t.distance, pd, node_id, o, center);
|
assert_eq!(pd, p.parent_dist, "Invalid parent distance ({}): {} - Expected: {} - Node Id: {} - Obj: {:?} - Center: {:?}", p.parent_dist, t.distance, pd, node_id, o, center);
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,22 +126,22 @@ impl Expression {
|
||||||
let l = l.compute(ctx, opt, txn, doc).await?;
|
let l = l.compute(ctx, opt, txn, doc).await?;
|
||||||
match o {
|
match o {
|
||||||
Operator::Or => {
|
Operator::Or => {
|
||||||
if let true = l.is_truthy() {
|
if l.is_truthy() {
|
||||||
return Ok(l);
|
return Ok(l);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Operator::And => {
|
Operator::And => {
|
||||||
if let false = l.is_truthy() {
|
if !l.is_truthy() {
|
||||||
return Ok(l);
|
return Ok(l);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Operator::Tco => {
|
Operator::Tco => {
|
||||||
if let true = l.is_truthy() {
|
if l.is_truthy() {
|
||||||
return Ok(l);
|
return Ok(l);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Operator::Nco => {
|
Operator::Nco => {
|
||||||
if let true = l.is_some() {
|
if l.is_some() {
|
||||||
return Ok(l);
|
return Ok(l);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -153,11 +153,11 @@ thread_local! {
|
||||||
// `thread_local!` so all accesses can use `Ordering::Relaxed`.
|
// `thread_local!` so all accesses can use `Ordering::Relaxed`.
|
||||||
|
|
||||||
/// Whether pretty-printing.
|
/// Whether pretty-printing.
|
||||||
static PRETTY: AtomicBool = AtomicBool::new(false);
|
static PRETTY: AtomicBool = const { AtomicBool::new(false) };
|
||||||
/// The current level of indentation, in units of tabs.
|
/// The current level of indentation, in units of tabs.
|
||||||
static INDENT: AtomicU32 = AtomicU32::new(0);
|
static INDENT: AtomicU32 = const { AtomicU32::new(0) };
|
||||||
/// Whether the next formatting action should be preceded by a newline and indentation.
|
/// Whether the next formatting action should be preceded by a newline and indentation.
|
||||||
static NEW_LINE: AtomicBool = AtomicBool::new(false);
|
static NEW_LINE: AtomicBool = const{ AtomicBool::new(false) };
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An adapter that, if enabled, adds pretty print formatting.
|
/// An adapter that, if enabled, adds pretty print formatting.
|
||||||
|
|
|
@ -10,7 +10,7 @@ use geo_types::{MultiLineString, MultiPoint, MultiPolygon};
|
||||||
use revision::revisioned;
|
use revision::revisioned;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::iter::{once, FromIterator};
|
use std::iter::once;
|
||||||
use std::{fmt, hash};
|
use std::{fmt, hash};
|
||||||
|
|
||||||
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Geometry";
|
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Geometry";
|
||||||
|
|
|
@ -10,7 +10,6 @@ use std::hash;
|
||||||
use std::iter::Product;
|
use std::iter::Product;
|
||||||
use std::iter::Sum;
|
use std::iter::Sum;
|
||||||
use std::ops::{self, Add, Div, Mul, Neg, Rem, Sub};
|
use std::ops::{self, Add, Div, Mul, Neg, Rem, Sub};
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Number";
|
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Number";
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,9 @@ use bincode::Options;
|
||||||
use bincode::Result;
|
use bincode::Result;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
pub fn serialize<T: ?Sized>(value: &T) -> Result<Vec<u8>>
|
pub fn serialize<T>(value: &T) -> Result<Vec<u8>>
|
||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize + ?Sized,
|
||||||
{
|
{
|
||||||
bincode::options()
|
bincode::options()
|
||||||
.with_no_limit()
|
.with_no_limit()
|
||||||
|
|
|
@ -119,7 +119,7 @@ mod tests {
|
||||||
name: Ident::from("test"),
|
name: Ident::from("test"),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
let enc: Vec<u8> = stm.try_into().unwrap();
|
let enc: Vec<u8> = stm.into();
|
||||||
assert_eq!(11, enc.len());
|
assert_eq!(11, enc.len());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,6 @@ impl fmt::Display for SleepStatement {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::dbs::test::mock;
|
use crate::dbs::test::mock;
|
||||||
use crate::sql::{Duration, Value};
|
|
||||||
use std::time::{self, SystemTime};
|
use std::time::{self, SystemTime};
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
@ -58,7 +58,6 @@ impl Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::idiom::Idiom;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -62,7 +62,6 @@ impl Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::idiom::Idiom;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -247,9 +247,7 @@ mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::dbs::test::mock;
|
use crate::dbs::test::mock;
|
||||||
use crate::sql::id::Id;
|
|
||||||
use crate::sql::idiom::Idiom;
|
use crate::sql::idiom::Idiom;
|
||||||
use crate::sql::thing::Thing;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
@ -24,9 +24,9 @@ impl ser::Serializer for Serializer {
|
||||||
const EXPECTED: &'static str = "a struct `DateTime<Utc>`";
|
const EXPECTED: &'static str = "a struct `DateTime<Utc>`";
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||||
where
|
where
|
||||||
T: Display,
|
T: Display + ?Sized,
|
||||||
{
|
{
|
||||||
value.to_string().parse().map_err(Error::custom)
|
value.to_string().parse().map_err(Error::custom)
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,6 @@ impl ser::Serializer for Serializer {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use ser::Serializer as _;
|
use ser::Serializer as _;
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn now() {
|
fn now() {
|
||||||
|
|
|
@ -66,7 +66,6 @@ impl ser::Serializer for Serializer {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::value::serde::ser::Serializer;
|
use crate::sql::value::serde::ser::Serializer;
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn distance_euclidean() {
|
fn distance_euclidean() {
|
||||||
|
|
|
@ -262,9 +262,9 @@ trait Serializer: Sized {
|
||||||
Err(Self::unexpected("struct variant", Some(name)))
|
Err(Self::unexpected("struct variant", Some(name)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||||
where
|
where
|
||||||
T: Display,
|
T: Display + ?Sized,
|
||||||
{
|
{
|
||||||
self.serialize_str(&value.to_string())
|
self.serialize_str(&value.to_string())
|
||||||
}
|
}
|
||||||
|
@ -487,9 +487,9 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||||
where
|
where
|
||||||
T: Display,
|
T: Display + ?Sized,
|
||||||
{
|
{
|
||||||
self.0.collect_str(value)
|
self.0.collect_str(value)
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,6 @@ impl ser::Serializer for Serializer {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use ser::Serializer as _;
|
use ser::Serializer as _;
|
||||||
use serde::Serialize;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -72,7 +72,6 @@ impl serde::ser::SerializeTuple for SerializeCompactUuidTuple {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn nil() {
|
fn nil() {
|
||||||
|
|
|
@ -50,17 +50,17 @@ impl serde::ser::SerializeMap for SerializeValueMap {
|
||||||
type Ok = BTreeMap<String, Value>;
|
type Ok = BTreeMap<String, Value>;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
|
fn serialize_key<T>(&mut self, key: &T) -> Result<(), Self::Error>
|
||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize + ?Sized,
|
||||||
{
|
{
|
||||||
self.next_key = Some(key.serialize(ser::string::Serializer.wrap())?);
|
self.next_key = Some(key.serialize(ser::string::Serializer.wrap())?);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
fn serialize_value<T>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize + ?Sized,
|
||||||
{
|
{
|
||||||
match self.next_key.take() {
|
match self.next_key.take() {
|
||||||
Some(key) => {
|
Some(key) => {
|
||||||
|
|
|
@ -612,9 +612,7 @@ impl serde::ser::SerializeStructVariant for SerializeStructVariant {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql;
|
|
||||||
use crate::sql::block::Entry;
|
use crate::sql::block::Entry;
|
||||||
use crate::sql::constant::Constant;
|
|
||||||
use crate::sql::statements::CreateStatement;
|
use crate::sql::statements::CreateStatement;
|
||||||
use crate::sql::*;
|
use crate::sql::*;
|
||||||
use ::serde::Serialize;
|
use ::serde::Serialize;
|
||||||
|
|
|
@ -27,7 +27,6 @@ use std::collections::BTreeMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt::{self, Display, Formatter, Write};
|
use std::fmt::{self, Display, Formatter, Write};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Value";
|
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Value";
|
||||||
|
|
||||||
|
@ -2785,7 +2784,6 @@ impl TryNeg for Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::uuid::Uuid;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -2886,19 +2884,19 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn check_serialize() {
|
fn check_serialize() {
|
||||||
let enc: Vec<u8> = Value::None.try_into().unwrap();
|
let enc: Vec<u8> = Value::None.into();
|
||||||
assert_eq!(2, enc.len());
|
assert_eq!(2, enc.len());
|
||||||
let enc: Vec<u8> = Value::Null.try_into().unwrap();
|
let enc: Vec<u8> = Value::Null.into();
|
||||||
assert_eq!(2, enc.len());
|
assert_eq!(2, enc.len());
|
||||||
let enc: Vec<u8> = Value::Bool(true).try_into().unwrap();
|
let enc: Vec<u8> = Value::Bool(true).into();
|
||||||
assert_eq!(3, enc.len());
|
assert_eq!(3, enc.len());
|
||||||
let enc: Vec<u8> = Value::Bool(false).try_into().unwrap();
|
let enc: Vec<u8> = Value::Bool(false).into();
|
||||||
assert_eq!(3, enc.len());
|
assert_eq!(3, enc.len());
|
||||||
let enc: Vec<u8> = Value::from("test").try_into().unwrap();
|
let enc: Vec<u8> = Value::from("test").into();
|
||||||
assert_eq!(8, enc.len());
|
assert_eq!(8, enc.len());
|
||||||
let enc: Vec<u8> = Value::parse("{ hello: 'world' }").try_into().unwrap();
|
let enc: Vec<u8> = Value::parse("{ hello: 'world' }").into();
|
||||||
assert_eq!(19, enc.len());
|
assert_eq!(19, enc.len());
|
||||||
let enc: Vec<u8> = Value::parse("{ compact: true, schema: 0 }").try_into().unwrap();
|
let enc: Vec<u8> = Value::parse("{ compact: true, schema: 0 }").into();
|
||||||
assert_eq!(27, enc.len());
|
assert_eq!(27, enc.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2910,8 +2908,8 @@ mod tests {
|
||||||
let res = Value::parse(
|
let res = Value::parse(
|
||||||
"{ test: { something: [1, 'two', null, test:tobie, { trueee: false, noneee: nulll }] } }",
|
"{ test: { something: [1, 'two', null, test:tobie, { trueee: false, noneee: nulll }] } }",
|
||||||
);
|
);
|
||||||
let enc: Vec<u8> = val.try_into().unwrap();
|
let enc: Vec<u8> = val.into();
|
||||||
let dec: Value = enc.try_into().unwrap();
|
let dec: Value = enc.into();
|
||||||
assert_eq!(res, dec);
|
assert_eq!(res, dec);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,7 +61,6 @@ impl Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::idiom::Idiom;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -10,7 +10,7 @@ use geo_types::{MultiLineString, MultiPoint, MultiPolygon};
|
||||||
use revision::revisioned;
|
use revision::revisioned;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::iter::{once, FromIterator};
|
use std::iter::once;
|
||||||
use std::{fmt, hash};
|
use std::{fmt, hash};
|
||||||
|
|
||||||
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Geometry";
|
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Geometry";
|
||||||
|
|
|
@ -10,7 +10,6 @@ use std::hash;
|
||||||
use std::iter::Product;
|
use std::iter::Product;
|
||||||
use std::iter::Sum;
|
use std::iter::Sum;
|
||||||
use std::ops::{self, Add, Div, Mul, Neg, Rem, Sub};
|
use std::ops::{self, Add, Div, Mul, Neg, Rem, Sub};
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Number";
|
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Number";
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,9 @@ use bincode::Options;
|
||||||
use bincode::Result;
|
use bincode::Result;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
pub fn serialize<T: ?Sized>(value: &T) -> Result<Vec<u8>>
|
pub fn serialize<T>(value: &T) -> Result<Vec<u8>>
|
||||||
where
|
where
|
||||||
T: Serialize,
|
T: ?Sized + Serialize,
|
||||||
{
|
{
|
||||||
bincode::options()
|
bincode::options()
|
||||||
.with_no_limit()
|
.with_no_limit()
|
||||||
|
|
|
@ -119,7 +119,7 @@ mod tests {
|
||||||
name: Ident::from("test"),
|
name: Ident::from("test"),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
let enc: Vec<u8> = stm.try_into().unwrap();
|
let enc: Vec<u8> = stm.into();
|
||||||
assert_eq!(12, enc.len());
|
assert_eq!(12, enc.len());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,6 @@ impl fmt::Display for SleepStatement {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::dbs::test::mock;
|
use crate::dbs::test::mock;
|
||||||
use crate::sql::{Duration, Value};
|
|
||||||
use std::time::{self, SystemTime};
|
use std::time::{self, SystemTime};
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
@ -58,7 +58,6 @@ impl Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::idiom::Idiom;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -62,7 +62,6 @@ impl Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::idiom::Idiom;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -247,9 +247,7 @@ mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::dbs::test::mock;
|
use crate::dbs::test::mock;
|
||||||
use crate::sql::id::Id;
|
|
||||||
use crate::sql::idiom::Idiom;
|
use crate::sql::idiom::Idiom;
|
||||||
use crate::sql::thing::Thing;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
@ -24,9 +24,9 @@ impl ser::Serializer for Serializer {
|
||||||
const EXPECTED: &'static str = "a struct `DateTime<Utc>`";
|
const EXPECTED: &'static str = "a struct `DateTime<Utc>`";
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||||
where
|
where
|
||||||
T: Display,
|
T: Display + ?Sized,
|
||||||
{
|
{
|
||||||
value.to_string().parse().map_err(Error::custom)
|
value.to_string().parse().map_err(Error::custom)
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,6 @@ impl ser::Serializer for Serializer {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use ser::Serializer as _;
|
use ser::Serializer as _;
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn now() {
|
fn now() {
|
||||||
|
|
|
@ -66,7 +66,6 @@ impl ser::Serializer for Serializer {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::value::serde::ser::Serializer;
|
use crate::sql::value::serde::ser::Serializer;
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn distance_euclidean() {
|
fn distance_euclidean() {
|
||||||
|
|
|
@ -262,9 +262,9 @@ trait Serializer: Sized {
|
||||||
Err(Self::unexpected("struct variant", Some(name)))
|
Err(Self::unexpected("struct variant", Some(name)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||||
where
|
where
|
||||||
T: Display,
|
T: Display + ?Sized,
|
||||||
{
|
{
|
||||||
self.serialize_str(&value.to_string())
|
self.serialize_str(&value.to_string())
|
||||||
}
|
}
|
||||||
|
@ -487,9 +487,9 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||||
where
|
where
|
||||||
T: Display,
|
T: Display + ?Sized,
|
||||||
{
|
{
|
||||||
self.0.collect_str(value)
|
self.0.collect_str(value)
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,6 @@ impl ser::Serializer for Serializer {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use ser::Serializer as _;
|
use ser::Serializer as _;
|
||||||
use serde::Serialize;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -72,7 +72,6 @@ impl serde::ser::SerializeTuple for SerializeCompactUuidTuple {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn nil() {
|
fn nil() {
|
||||||
|
|
|
@ -50,17 +50,17 @@ impl serde::ser::SerializeMap for SerializeValueMap {
|
||||||
type Ok = BTreeMap<String, Value>;
|
type Ok = BTreeMap<String, Value>;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
|
fn serialize_key<T>(&mut self, key: &T) -> Result<(), Self::Error>
|
||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize + ?Sized,
|
||||||
{
|
{
|
||||||
self.next_key = Some(key.serialize(ser::string::Serializer.wrap())?);
|
self.next_key = Some(key.serialize(ser::string::Serializer.wrap())?);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
fn serialize_value<T>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize + ?Sized,
|
||||||
{
|
{
|
||||||
match self.next_key.take() {
|
match self.next_key.take() {
|
||||||
Some(key) => {
|
Some(key) => {
|
||||||
|
|
|
@ -612,9 +612,7 @@ impl serde::ser::SerializeStructVariant for SerializeStructVariant {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql;
|
|
||||||
use crate::sql::block::Entry;
|
use crate::sql::block::Entry;
|
||||||
use crate::sql::constant::Constant;
|
|
||||||
use crate::sql::statements::CreateStatement;
|
use crate::sql::statements::CreateStatement;
|
||||||
use crate::sql::*;
|
use crate::sql::*;
|
||||||
use ::serde::Serialize;
|
use ::serde::Serialize;
|
||||||
|
|
|
@ -27,7 +27,6 @@ use std::collections::BTreeMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt::{self, Display, Formatter, Write};
|
use std::fmt::{self, Display, Formatter, Write};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Value";
|
pub(crate) const TOKEN: &str = "$surrealdb::private::sql::Value";
|
||||||
|
|
||||||
|
@ -2785,7 +2784,6 @@ impl TryNeg for Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::uuid::Uuid;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -61,7 +61,6 @@ impl Value {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::idiom::Idiom;
|
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -84,7 +84,7 @@ pub fn future(i: &str) -> IResult<&str, Future> {
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::{Block, Kind, Number, Operator, Value};
|
use crate::sql::{Block, Kind, Number};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn cast_int() {
|
fn cast_int() {
|
||||||
|
|
|
@ -8,11 +8,9 @@ use super::super::super::{
|
||||||
#[cfg(feature = "sql2")]
|
#[cfg(feature = "sql2")]
|
||||||
use crate::sql::Ident;
|
use crate::sql::Ident;
|
||||||
use crate::sql::{filter::Filter, statements::DefineAnalyzerStatement, Strand, Tokenizer};
|
use crate::sql::{filter::Filter, statements::DefineAnalyzerStatement, Strand, Tokenizer};
|
||||||
#[cfg(feature = "sql2")]
|
|
||||||
use nom::bytes::complete::tag;
|
|
||||||
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::cut, multi::many0};
|
use nom::{branch::alt, bytes::complete::tag_no_case, combinator::cut, multi::many0};
|
||||||
#[cfg(feature = "sql2")]
|
#[cfg(feature = "sql2")]
|
||||||
use nom::{combinator::opt, sequence::tuple};
|
use nom::{bytes::complete::tag, combinator::opt, sequence::tuple};
|
||||||
|
|
||||||
pub fn analyzer(i: &str) -> IResult<&str, DefineAnalyzerStatement> {
|
pub fn analyzer(i: &str) -> IResult<&str, DefineAnalyzerStatement> {
|
||||||
let (i, _) = tag_no_case("ANALYZER")(i)?;
|
let (i, _) = tag_no_case("ANALYZER")(i)?;
|
||||||
|
|
|
@ -81,8 +81,8 @@ mod tests {
|
||||||
let out = res.unwrap().1;
|
let out = res.unwrap().1;
|
||||||
assert_eq!(format!("DEFINE {sql}"), format!("{}", out));
|
assert_eq!(format!("DEFINE {sql}"), format!("{}", out));
|
||||||
|
|
||||||
let serialized: Vec<u8> = (&out).try_into().unwrap();
|
let serialized: Vec<u8> = (&out).into();
|
||||||
let deserialized = DefineDatabaseStatement::try_from(&serialized).unwrap();
|
let deserialized = DefineDatabaseStatement::from(&serialized);
|
||||||
assert_eq!(out, deserialized);
|
assert_eq!(out, deserialized);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,6 +115,6 @@ mod tests {
|
||||||
let sql = "EVENT test ON test";
|
let sql = "EVENT test ON test";
|
||||||
let res = event(sql);
|
let res = event(sql);
|
||||||
|
|
||||||
assert_eq!(res.is_err(), true)
|
assert!(res.is_err())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,8 +113,6 @@ mod tests {
|
||||||
use crate::sql::index::{Distance, Distance1, MTreeParams, SearchParams, VectorType};
|
use crate::sql::index::{Distance, Distance1, MTreeParams, SearchParams, VectorType};
|
||||||
use crate::sql::Ident;
|
use crate::sql::Ident;
|
||||||
use crate::sql::Idiom;
|
use crate::sql::Idiom;
|
||||||
use crate::sql::Idioms;
|
|
||||||
use crate::sql::Index;
|
|
||||||
use crate::sql::Part;
|
use crate::sql::Part;
|
||||||
use crate::sql::Scoring;
|
use crate::sql::Scoring;
|
||||||
|
|
||||||
|
@ -264,6 +262,6 @@ mod tests {
|
||||||
let sql = "INDEX test ON test";
|
let sql = "INDEX test ON test";
|
||||||
let res = index(sql);
|
let res = index(sql);
|
||||||
|
|
||||||
assert_eq!(res.is_err(), true)
|
assert!(res.is_err())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,6 +108,6 @@ mod tests {
|
||||||
let sql = "PARAM test";
|
let sql = "PARAM test";
|
||||||
let res = param(sql);
|
let res = param(sql);
|
||||||
|
|
||||||
assert_eq!(res.is_err(), true)
|
assert!(res.is_err())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -145,8 +145,8 @@ mod tests {
|
||||||
let out = res.unwrap().1;
|
let out = res.unwrap().1;
|
||||||
assert_eq!(format!("DEFINE {sql}"), format!("{}", out));
|
assert_eq!(format!("DEFINE {sql}"), format!("{}", out));
|
||||||
|
|
||||||
let serialized: Vec<u8> = (&out).try_into().unwrap();
|
let serialized: Vec<u8> = (&out).into();
|
||||||
let deserialized = DefineTableStatement::try_from(&serialized).unwrap();
|
let deserialized = DefineTableStatement::from(&serialized);
|
||||||
assert_eq!(out, deserialized);
|
assert_eq!(out, deserialized);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,6 +121,6 @@ mod tests {
|
||||||
let sql = "TOKEN test ON test";
|
let sql = "TOKEN test ON test";
|
||||||
let res = token(sql);
|
let res = token(sql);
|
||||||
|
|
||||||
assert_eq!(res.is_err(), true)
|
assert!(res.is_err())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -329,7 +329,7 @@ mod tests {
|
||||||
#[cfg(feature = "sql2")]
|
#[cfg(feature = "sql2")]
|
||||||
if_exists: false,
|
if_exists: false,
|
||||||
});
|
});
|
||||||
let enc: Vec<u8> = stm.try_into().unwrap();
|
let enc: Vec<u8> = stm.into();
|
||||||
#[cfg(not(feature = "sql2"))]
|
#[cfg(not(feature = "sql2"))]
|
||||||
assert_eq!(9, enc.len());
|
assert_eq!(9, enc.len());
|
||||||
#[cfg(feature = "sql2")]
|
#[cfg(feature = "sql2")]
|
||||||
|
|
|
@ -455,7 +455,7 @@ impl Parser<'_> {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::sql::{Dir, Expression, Id, Number, Object, Param, Strand, Table, Thing};
|
use crate::sql::{Expression, Id, Number, Object, Param, Strand, Thing};
|
||||||
use crate::syn::Parse;
|
use crate::syn::Parse;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -275,8 +275,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::sql::array::Array;
|
use crate::sql::array::Array;
|
||||||
use crate::sql::object::Object;
|
use crate::sql::object::Object;
|
||||||
use crate::sql::value::Value;
|
use crate::syn::Parse as _;
|
||||||
use crate::syn::Parse;
|
|
||||||
|
|
||||||
fn thing(i: &str) -> ParseResult<Thing> {
|
fn thing(i: &str) -> ParseResult<Thing> {
|
||||||
let mut parser = Parser::new(i.as_bytes());
|
let mut parser = Parser::new(i.as_bytes());
|
||||||
|
|
|
@ -94,11 +94,11 @@ fn bench_hash<K: Hash + Eq + Clone, V: Clone>(
|
||||||
samples: &[(K, V)],
|
samples: &[(K, V)],
|
||||||
) {
|
) {
|
||||||
group.bench_function("hash_insert", |b| {
|
group.bench_function("hash_insert", |b| {
|
||||||
b.iter(|| bench_hash_insert(&samples));
|
b.iter(|| bench_hash_insert(samples));
|
||||||
});
|
});
|
||||||
group.bench_function("hash_get", |b| {
|
group.bench_function("hash_get", |b| {
|
||||||
let map = build_hash(&samples);
|
let map = build_hash(samples);
|
||||||
b.iter(|| bench_hash_get(&samples, &map));
|
b.iter(|| bench_hash_get(samples, &map));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,12 +107,12 @@ fn bench_trie<K: TrieKey + Clone, V: Clone>(
|
||||||
samples: &[(K, V)],
|
samples: &[(K, V)],
|
||||||
) {
|
) {
|
||||||
group.bench_function("trie_insert", |b| {
|
group.bench_function("trie_insert", |b| {
|
||||||
b.iter(|| bench_trie_insert(&samples));
|
b.iter(|| bench_trie_insert(samples));
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function("trie_get", |b| {
|
group.bench_function("trie_get", |b| {
|
||||||
let map = build_trie(&samples);
|
let map = build_trie(samples);
|
||||||
b.iter(|| bench_trie_get(&samples, &map));
|
b.iter(|| bench_trie_get(samples, &map));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,12 +121,12 @@ fn bench_btree<K: Eq + Ord + Clone, V: Clone>(
|
||||||
samples: &[(K, V)],
|
samples: &[(K, V)],
|
||||||
) {
|
) {
|
||||||
group.bench_function("btree_insert", |b| {
|
group.bench_function("btree_insert", |b| {
|
||||||
b.iter(|| bench_btree_insert(&samples));
|
b.iter(|| bench_btree_insert(samples));
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function("btree_get", |b| {
|
group.bench_function("btree_get", |b| {
|
||||||
let map = build_btree(&samples);
|
let map = build_btree(samples);
|
||||||
b.iter(|| bench_btree_get(&samples, &map));
|
b.iter(|| bench_btree_get(samples, &map));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -148,11 +148,11 @@ async fn knn_lookup_objects(
|
||||||
let t = mtree();
|
let t = mtree();
|
||||||
let mut tx = ds.transaction(Read, Optimistic).await.unwrap();
|
let mut tx = ds.transaction(Read, Optimistic).await.unwrap();
|
||||||
let c = TreeCache::new(0, TreeNodeProvider::Debug, cache_size);
|
let c = TreeCache::new(0, TreeNodeProvider::Debug, cache_size);
|
||||||
let mut s = TreeStore::new(TreeNodeProvider::Debug, c, Read).await;
|
let s = TreeStore::new(TreeNodeProvider::Debug, c, Read).await;
|
||||||
for _ in 0..samples_size {
|
for _ in 0..samples_size {
|
||||||
let object = random_object(&mut rng, vector_size).into();
|
let object = random_object(&mut rng, vector_size).into();
|
||||||
// Insert the sample
|
// Insert the sample
|
||||||
t.knn_search(&mut tx, &mut s, &object, knn).await.unwrap();
|
t.knn_search(&mut tx, &s, &object, knn).await.unwrap();
|
||||||
}
|
}
|
||||||
tx.rollback_with_panic();
|
tx.rollback_with_panic();
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,7 +61,7 @@ mod tests {
|
||||||
for duration in durations {
|
for duration in durations {
|
||||||
let string = format!("{duration:?}");
|
let string = format!("{duration:?}");
|
||||||
let parsed = super::duration_from_str(&string)
|
let parsed = super::duration_from_str(&string)
|
||||||
.expect(&format!("Duration {string} failed to parse"));
|
.unwrap_or_else(|| panic!("Duration {string} failed to parse"));
|
||||||
assert_eq!(duration, parsed, "Duration {string} not parsed correctly");
|
assert_eq!(duration, parsed, "Duration {string} not parsed correctly");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -925,7 +925,7 @@ async fn changefeed() {
|
||||||
};
|
};
|
||||||
assert_eq!(array.len(), 5);
|
assert_eq!(array.len(), 5);
|
||||||
// DEFINE TABLE
|
// DEFINE TABLE
|
||||||
let a = array.get(0).unwrap();
|
let a = array.first().unwrap();
|
||||||
let Value::Object(a) = a else {
|
let Value::Object(a) = a else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
|
|
|
@ -126,7 +126,7 @@ async fn bootstrap_removes_unreachable_node_live_queries() -> Result<(), Error>
|
||||||
let res = tx.scan_ndlq(valid_data.node_id.as_ref().unwrap(), 1000).await.unwrap();
|
let res = tx.scan_ndlq(valid_data.node_id.as_ref().unwrap(), 1000).await.unwrap();
|
||||||
tx.commit().await.unwrap();
|
tx.commit().await.unwrap();
|
||||||
assert_eq!(res.len(), 1, "We expect the node to be available");
|
assert_eq!(res.len(), 1, "We expect the node to be available");
|
||||||
let tested_entry = res.get(0).unwrap();
|
let tested_entry = res.first().unwrap();
|
||||||
assert_eq!(tested_entry.lq, valid_data.live_query_id.unwrap());
|
assert_eq!(tested_entry.lq, valid_data.live_query_id.unwrap());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -175,7 +175,7 @@ async fn bootstrap_removes_unreachable_table_live_queries() -> Result<(), Error>
|
||||||
tx.commit().await.unwrap();
|
tx.commit().await.unwrap();
|
||||||
|
|
||||||
assert_eq!(res.len(), 1, "Expected 1 table live query: {:?}", res);
|
assert_eq!(res.len(), 1, "Expected 1 table live query: {:?}", res);
|
||||||
let tested_entry = res.get(0).unwrap();
|
let tested_entry = res.first().unwrap();
|
||||||
assert_eq!(tested_entry.lq, valid_data.live_query_id.unwrap());
|
assert_eq!(tested_entry.lq, valid_data.live_query_id.unwrap());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -440,7 +440,7 @@ async fn changefeed_with_ts() -> Result<(), Error> {
|
||||||
};
|
};
|
||||||
assert_eq!(array.len(), 5);
|
assert_eq!(array.len(), 5);
|
||||||
// DEFINE TABLE
|
// DEFINE TABLE
|
||||||
let a = array.get(0).unwrap();
|
let a = array.first().unwrap();
|
||||||
let Value::Object(a) = a else {
|
let Value::Object(a) = a else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
|
@ -616,7 +616,7 @@ async fn changefeed_with_ts() -> Result<(), Error> {
|
||||||
};
|
};
|
||||||
assert_eq!(array.len(), 4);
|
assert_eq!(array.len(), 4);
|
||||||
// UPDATE user:amos
|
// UPDATE user:amos
|
||||||
let a = array.get(0).unwrap();
|
let a = array.first().unwrap();
|
||||||
let Value::Object(a) = a else {
|
let Value::Object(a) = a else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
|
|
|
@ -209,7 +209,7 @@ fn excessive_cast_chain_depth() -> Result<(), Error> {
|
||||||
async fn run_queries(
|
async fn run_queries(
|
||||||
sql: &str,
|
sql: &str,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
impl Iterator<Item = Result<Value, Error>> + ExactSizeIterator + DoubleEndedIterator + 'static,
|
impl ExactSizeIterator<Item = Result<Value, Error>> + DoubleEndedIterator + 'static,
|
||||||
Error,
|
Error,
|
||||||
> {
|
> {
|
||||||
let dbs = new_ds().await?;
|
let dbs = new_ds().await?;
|
||||||
|
|
|
@ -18,7 +18,7 @@ pub async fn iam_run_case(
|
||||||
prepare: &str,
|
prepare: &str,
|
||||||
test: &str,
|
test: &str,
|
||||||
check: &str,
|
check: &str,
|
||||||
check_expected_result: &Vec<&str>,
|
check_expected_result: &[&str],
|
||||||
ds: &Datastore,
|
ds: &Datastore,
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
should_succeed: bool,
|
should_succeed: bool,
|
||||||
|
@ -111,7 +111,7 @@ pub async fn iam_check_cases(
|
||||||
println!("* Testing '{test}' for '{level}Actor({role})' on '({ns}, {db})'");
|
println!("* Testing '{test}' for '{level}Actor({role})' on '({ns}, {db})'");
|
||||||
let sess = Session::for_level(level.to_owned(), role.to_owned()).with_ns(ns).with_db(db);
|
let sess = Session::for_level(level.to_owned(), role.to_owned()).with_ns(ns).with_db(db);
|
||||||
let expected_result = if *should_succeed {
|
let expected_result = if *should_succeed {
|
||||||
check_results.get(0).unwrap()
|
check_results.first().unwrap()
|
||||||
} else {
|
} else {
|
||||||
check_results.get(1).unwrap()
|
check_results.get(1).unwrap()
|
||||||
};
|
};
|
||||||
|
@ -147,7 +147,7 @@ pub async fn iam_check_cases(
|
||||||
let expected_result = if auth_enabled {
|
let expected_result = if auth_enabled {
|
||||||
check_results.get(1).unwrap()
|
check_results.get(1).unwrap()
|
||||||
} else {
|
} else {
|
||||||
check_results.get(0).unwrap()
|
check_results.first().unwrap()
|
||||||
};
|
};
|
||||||
iam_run_case(
|
iam_run_case(
|
||||||
prepare,
|
prepare,
|
||||||
|
|
|
@ -1163,7 +1163,7 @@ async fn select_with_datetime_value() -> Result<(), Error> {
|
||||||
SELECT * FROM test_user WHERE created_at = d'2023-12-25T17:13:01.940183014Z' EXPLAIN;
|
SELECT * FROM test_user WHERE created_at = d'2023-12-25T17:13:01.940183014Z' EXPLAIN;
|
||||||
SELECT * FROM test_user WHERE created_at = $now;
|
SELECT * FROM test_user WHERE created_at = $now;
|
||||||
SELECT * FROM test_user WHERE created_at = d'2023-12-25T17:13:01.940183014Z';";
|
SELECT * FROM test_user WHERE created_at = d'2023-12-25T17:13:01.940183014Z';";
|
||||||
let mut res = dbs.execute(&sql, &ses, None).await?;
|
let mut res = dbs.execute(sql, &ses, None).await?;
|
||||||
|
|
||||||
assert_eq!(res.len(), 8);
|
assert_eq!(res.len(), 8);
|
||||||
skip_ok(&mut res, 4)?;
|
skip_ok(&mut res, 4)?;
|
||||||
|
@ -1220,7 +1220,7 @@ async fn select_with_uuid_value() -> Result<(), Error> {
|
||||||
SELECT * FROM sessions WHERE sessionUid = u'00ad70db-f435-442e-9012-1cd853102084';
|
SELECT * FROM sessions WHERE sessionUid = u'00ad70db-f435-442e-9012-1cd853102084';
|
||||||
SELECT * FROM sessions WHERE sessionUid = $sess.uuid;
|
SELECT * FROM sessions WHERE sessionUid = $sess.uuid;
|
||||||
";
|
";
|
||||||
let mut res = dbs.execute(&sql, &ses, None).await?;
|
let mut res = dbs.execute(sql, &ses, None).await?;
|
||||||
|
|
||||||
assert_eq!(res.len(), 7);
|
assert_eq!(res.len(), 7);
|
||||||
skip_ok(&mut res, 3)?;
|
skip_ok(&mut res, 3)?;
|
||||||
|
|
|
@ -278,7 +278,7 @@ mod tests {
|
||||||
|
|
||||||
use surrealdb::dbs::Session;
|
use surrealdb::dbs::Session;
|
||||||
use surrealdb::iam::verify::verify_root_creds;
|
use surrealdb::iam::verify::verify_root_creds;
|
||||||
use surrealdb::kvs::{Datastore, LockType::*, TransactionType::*};
|
use surrealdb::kvs::{LockType::*, TransactionType::*};
|
||||||
use test_log::test;
|
use test_log::test;
|
||||||
use wiremock::{matchers::method, Mock, MockServer, ResponseTemplate};
|
use wiremock::{matchers::method, Mock, MockServer, ResponseTemplate};
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::string::ToString;
|
use std::{fmt, string::ToString};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub enum Format {
|
pub enum Format {
|
||||||
|
@ -7,12 +7,12 @@ pub enum Format {
|
||||||
Pack,
|
Pack,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToString for Format {
|
impl fmt::Display for Format {
|
||||||
fn to_string(&self) -> String {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Json => "json".to_owned(),
|
Self::Json => "json".fmt(f),
|
||||||
Self::Cbor => "cbor".to_owned(),
|
Self::Cbor => "cbor".fmt(f),
|
||||||
Self::Pack => "msgpack".to_owned(),
|
Self::Pack => "msgpack".fmt(f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue