Fix clippy warnings in tests (#3101)

This commit is contained in:
Mees Delzenne 2023-12-09 21:25:50 +01:00 committed by GitHub
parent c2d03c49db
commit 01a07b7c91
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 30 additions and 34 deletions

View file

@ -44,15 +44,15 @@ pub(super) async fn init(target: &str) {
} }
pub(super) fn benchmark_group(c: &mut Criterion, target: String) { pub(super) fn benchmark_group(c: &mut Criterion, target: String) {
let num_ops = super::NUM_OPS.clone(); let num_ops = *super::NUM_OPS;
let runtime = super::rt(); let runtime = super::rt();
runtime.block_on(async { init(&target).await }); runtime.block_on(async { init(&target).await });
let mut group = c.benchmark_group(target); let mut group = c.benchmark_group(target);
group.measurement_time(Duration::from_secs(super::DURATION_SECS.clone())); group.measurement_time(Duration::from_secs(*super::DURATION_SECS));
group.sample_size(super::SAMPLE_SIZE.clone()); group.sample_size(*super::SAMPLE_SIZE);
group.throughput(Throughput::Elements(1)); group.throughput(Throughput::Elements(1));
group.bench_function("reads", |b| { group.bench_function("reads", |b| {

View file

@ -13,7 +13,7 @@ impl Create {
pub fn new(runtime: &'static Runtime) -> Self { pub fn new(runtime: &'static Runtime) -> Self {
Self { Self {
runtime, runtime,
table_name: format!("table_{}", Id::rand().to_string()), table_name: format!("table_{}", Id::rand()),
} }
} }
} }

View file

@ -32,8 +32,6 @@ pub(super) fn bench_routine<R>(
{ {
// Run the runtime and return the duration, accounting for the number of operations on each run // Run the runtime and return the duration, accounting for the number of operations on each run
b.iter_custom(|iters| { b.iter_custom(|iters| {
let num_ops = num_ops.clone();
// Total time spent running the actual benchmark run for all iterations // Total time spent running the actual benchmark run for all iterations
let mut total = std::time::Duration::from_secs(0); let mut total = std::time::Duration::from_secs(0);
let session = Session::owner().with_ns("test").with_db("test"); let session = Session::owner().with_ns("test").with_db("test");

View file

@ -13,7 +13,7 @@ impl Read {
pub fn new(runtime: &'static Runtime) -> Self { pub fn new(runtime: &'static Runtime) -> Self {
Self { Self {
runtime, runtime,
table_name: format!("table_{}", Id::rand().to_string()), table_name: format!("table_{}", Id::rand()),
} }
} }
} }

View file

@ -19,7 +19,7 @@ static RUNTIME: OnceLock<Runtime> = OnceLock::new();
fn rt() -> &'static Runtime { fn rt() -> &'static Runtime {
RUNTIME.get_or_init(|| { RUNTIME.get_or_init(|| {
tokio::runtime::Builder::new_multi_thread() tokio::runtime::Builder::new_multi_thread()
.worker_threads(WORKER_THREADS.clone()) .worker_threads(*WORKER_THREADS)
.enable_all() .enable_all()
.build() .build()
.unwrap() .unwrap()

View file

@ -48,15 +48,15 @@ pub(super) async fn init(target: &str) {
} }
pub(super) fn benchmark_group(c: &mut Criterion, target: String) { pub(super) fn benchmark_group(c: &mut Criterion, target: String) {
let num_ops = super::NUM_OPS.clone(); let num_ops = *super::NUM_OPS;
let runtime = super::rt(); let runtime = super::rt();
runtime.block_on(async { init(&target).await }); runtime.block_on(async { init(&target).await });
let mut group = c.benchmark_group(target); let mut group = c.benchmark_group(target);
group.measurement_time(Duration::from_secs(super::DURATION_SECS.clone())); group.measurement_time(Duration::from_secs(*super::DURATION_SECS));
group.sample_size(super::SAMPLE_SIZE.clone()); group.sample_size(*super::SAMPLE_SIZE);
group.throughput(Throughput::Elements(1)); group.throughput(Throughput::Elements(1));
group.bench_function("reads", |b| { group.bench_function("reads", |b| {

View file

@ -12,7 +12,7 @@ impl Create {
pub fn new(runtime: &'static Runtime) -> Self { pub fn new(runtime: &'static Runtime) -> Self {
Self { Self {
runtime, runtime,
table_name: format!("table_{}", Id::rand().to_string()), table_name: format!("table_{}", Id::rand()),
} }
} }
} }

View file

@ -28,17 +28,15 @@ pub(super) fn bench_routine<R>(
{ {
// Run the runtime and return the duration, accounting for the number of operations on each run // Run the runtime and return the duration, accounting for the number of operations on each run
b.iter_custom(|iters| { b.iter_custom(|iters| {
let num_ops = num_ops.clone();
// Total time spent running the actual benchmark run for all iterations // Total time spent running the actual benchmark run for all iterations
let mut total = std::time::Duration::from_secs(0); let mut total = std::time::Duration::from_secs(0);
for _ in 0..iters { for _ in 0..iters {
// Setup // Setup
routine.setup(db, num_ops.clone()); routine.setup(db, num_ops);
// Run and time the routine // Run and time the routine
let now = std::time::Instant::now(); let now = std::time::Instant::now();
routine.run(db, num_ops.clone()); routine.run(db, num_ops);
total += now.elapsed(); total += now.elapsed();
// Cleanup the database // Cleanup the database

View file

@ -12,7 +12,7 @@ impl Read {
pub fn new(runtime: &'static Runtime) -> Self { pub fn new(runtime: &'static Runtime) -> Self {
Self { Self {
runtime, runtime,
table_name: format!("table_{}", Id::rand().to_string()), table_name: format!("table_{}", Id::rand()),
} }
} }
} }

View file

@ -2041,7 +2041,7 @@ mod tests {
let mut c = 0; let mut c = 0;
for (doc_id, obj) in collection.as_ref() { for (doc_id, obj) in collection.as_ref() {
{ {
let (s, mut tx) = new_operation(&ds, TreeStoreType::Write).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Write).await;
let mut s = s.lock().await; let mut s = s.lock().await;
t.insert(&mut tx, &mut s, obj.as_ref().clone(), *doc_id).await?; t.insert(&mut tx, &mut s, obj.as_ref().clone(), *doc_id).await?;
finish_operation(tx, s, true).await?; finish_operation(tx, s, true).await?;
@ -2049,9 +2049,9 @@ mod tests {
} }
c += 1; c += 1;
{ {
let (s, mut tx) = new_operation(&ds, TreeStoreType::Traversal).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Traversal).await;
let mut s = s.lock().await; let mut s = s.lock().await;
let p = check_tree_properties(&mut tx, &mut s, &t).await?; let p = check_tree_properties(&mut tx, &mut s, t).await?;
assert_eq!(p.doc_count, c); assert_eq!(p.doc_count, c);
} }
} }
@ -2065,7 +2065,7 @@ mod tests {
) -> Result<HashMap<DocId, SharedVector>, Error> { ) -> Result<HashMap<DocId, SharedVector>, Error> {
let mut map = HashMap::with_capacity(collection.as_ref().len()); let mut map = HashMap::with_capacity(collection.as_ref().len());
{ {
let (s, mut tx) = new_operation(&ds, TreeStoreType::Write).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Write).await;
let mut s = s.lock().await; let mut s = s.lock().await;
for (doc_id, obj) in collection.as_ref() { for (doc_id, obj) in collection.as_ref() {
t.insert(&mut tx, &mut s, obj.as_ref().clone(), *doc_id).await?; t.insert(&mut tx, &mut s, obj.as_ref().clone(), *doc_id).await?;
@ -2074,9 +2074,9 @@ mod tests {
finish_operation(tx, s, true).await?; finish_operation(tx, s, true).await?;
} }
{ {
let (s, mut tx) = new_operation(&ds, TreeStoreType::Traversal).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Traversal).await;
let mut s = s.lock().await; let mut s = s.lock().await;
check_tree_properties(&mut tx, &mut s, &t).await?; check_tree_properties(&mut tx, &mut s, t).await?;
} }
Ok(map) Ok(map)
} }
@ -2108,13 +2108,13 @@ mod tests {
{ {
let (s, mut tx) = new_operation(ds, TreeStoreType::Traversal).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Traversal).await;
let mut s = s.lock().await; let mut s = s.lock().await;
check_tree_properties(&mut tx, &mut s, &t).await?; check_tree_properties(&mut tx, &mut s, t).await?;
} }
} }
let (s, mut tx) = new_operation(&ds, TreeStoreType::Traversal).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Traversal).await;
let mut s = s.lock().await; let mut s = s.lock().await;
check_tree_properties(&mut tx, &mut s, &t).await?.check(0, 0, None, None, 0, 0); check_tree_properties(&mut tx, &mut s, t).await?.check(0, 0, None, None, 0, 0);
Ok(()) Ok(())
} }
@ -2123,7 +2123,7 @@ mod tests {
t: &mut MTree, t: &mut MTree,
collection: &TestCollection, collection: &TestCollection,
) -> Result<(), Error> { ) -> Result<(), Error> {
let (s, mut tx) = new_operation(&ds, TreeStoreType::Read).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Read).await;
let mut s = s.lock().await; let mut s = s.lock().await;
let max_knn = 20.max(collection.as_ref().len()); let max_knn = 20.max(collection.as_ref().len());
for (doc_id, obj) in collection.as_ref() { for (doc_id, obj) in collection.as_ref() {
@ -2162,9 +2162,9 @@ mod tests {
t: &mut MTree, t: &mut MTree,
map: &HashMap<DocId, SharedVector>, map: &HashMap<DocId, SharedVector>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let (s, mut tx) = new_operation(&ds, TreeStoreType::Read).await; let (s, mut tx) = new_operation(ds, TreeStoreType::Read).await;
let mut s = s.lock().await; let mut s = s.lock().await;
for (_, obj) in map { for obj in map.values() {
let res = t.knn_search(&mut tx, &mut s, obj, map.len()).await?; let res = t.knn_search(&mut tx, &mut s, obj, map.len()).await?;
assert_eq!( assert_eq!(
map.len(), map.len(),
@ -2234,7 +2234,7 @@ mod tests {
impl AsRef<Vec<(DocId, SharedVector)>> for TestCollection { impl AsRef<Vec<(DocId, SharedVector)>> for TestCollection {
fn as_ref(&self) -> &Vec<(DocId, SharedVector)> { fn as_ref(&self) -> &Vec<(DocId, SharedVector)> {
match self { match self {
TestCollection::Unique(c) | TestCollection::NonUnique(c) => &c, TestCollection::Unique(c) | TestCollection::NonUnique(c) => c,
} }
} }
} }

View file

@ -14,7 +14,7 @@ async fn write_scan_ndlq() {
let tb = "table"; let tb = "table";
let lq = let lq =
sql::Uuid::from(uuid::Uuid::parse_str("4c3dca4b-ec08-4e3e-b23a-6b03b5cdc3fc").unwrap()); sql::Uuid::from(uuid::Uuid::parse_str("4c3dca4b-ec08-4e3e-b23a-6b03b5cdc3fc").unwrap());
tx.putc_ndlq(nd, lq.clone().0, ns, db, tb, None).await.unwrap(); tx.putc_ndlq(nd, lq.0, ns, db, tb, None).await.unwrap();
tx.commit().await.unwrap(); tx.commit().await.unwrap();
// Verify scan // Verify scan

View file

@ -19,7 +19,7 @@ async fn archive_lv_for_node_archives() {
tx.putc(key, table, None).await.unwrap(); tx.putc(key, table, None).await.unwrap();
let mut stm = LiveStatement::from_source_parts(Fields::all(), Table(table.into()), None, None); let mut stm = LiveStatement::from_source_parts(Fields::all(), Table(table.into()), None, None);
stm.id = lv_id.clone(); stm.id = lv_id;
tx.putc_tblq(namespace, database, table, stm, None).await.unwrap(); tx.putc_tblq(namespace, database, table, stm, None).await.unwrap();
let this_node_id = crate::sql::uuid::Uuid::from(Uuid::from_bytes([ let this_node_id = crate::sql::uuid::Uuid::from(Uuid::from_bytes([
@ -33,7 +33,7 @@ async fn archive_lv_for_node_archives() {
let mut tx = test.db.transaction(Write, Optimistic).await.unwrap(); let mut tx = test.db.transaction(Write, Optimistic).await.unwrap();
let results = test let results = test
.db .db
.archive_lv_for_node(&mut tx, &sql::uuid::Uuid(node_id), this_node_id.clone()) .archive_lv_for_node(&mut tx, &sql::uuid::Uuid(node_id), this_node_id)
.await .await
.unwrap(); .unwrap();
assert_eq!(results.len(), 1); assert_eq!(results.len(), 1);

View file

@ -20,7 +20,7 @@ async fn write_scan_tblq() {
let tb = "table"; let tb = "table";
let live_id = sql::Uuid::from(live_id); let live_id = sql::Uuid::from(live_id);
let live_stm = LiveStatement { let live_stm = LiveStatement {
id: live_id.clone(), id: live_id,
node: sql::Uuid::from(node_id), node: sql::Uuid::from(node_id),
expr: Default::default(), expr: Default::default(),
what: Default::default(), what: Default::default(),