202 lines
6.3 KiB
Rust
202 lines
6.3 KiB
Rust
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
|
use ourdb::{OurDB, OurDBConfig, OurDBSetArgs};
|
|
use tempfile::tempdir;
|
|
|
|
fn criterion_benchmark(c: &mut Criterion) {
|
|
// Create a temporary directory for benchmarks
|
|
let temp_dir = tempdir().expect("Failed to create temp directory");
|
|
let db_path = temp_dir.path().to_path_buf();
|
|
|
|
// Benchmark set operation (insertion)
|
|
c.bench_function("set", |b| {
|
|
let config = OurDBConfig {
|
|
path: db_path.clone(),
|
|
incremental_mode: true,
|
|
file_size: Some(10 * 1024 * 1024), // 10MB
|
|
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
|
reset: Some(true), // Reset the database for benchmarking
|
|
};
|
|
|
|
let mut db = OurDB::new(config).unwrap();
|
|
let test_data = vec![b'X'; 100]; // 100 bytes of data
|
|
|
|
b.iter(|| {
|
|
let _ = db.set(OurDBSetArgs {
|
|
id: None,
|
|
data: &test_data,
|
|
}).unwrap();
|
|
});
|
|
|
|
db.close().unwrap();
|
|
});
|
|
|
|
// Benchmark get operation (retrieval)
|
|
c.bench_function("get", |b| {
|
|
// Setup: Create a database and insert a record
|
|
let setup_config = OurDBConfig {
|
|
path: db_path.clone(),
|
|
incremental_mode: true,
|
|
file_size: Some(10 * 1024 * 1024),
|
|
keysize: Some(6),
|
|
reset: Some(true), // Reset the database for benchmarking
|
|
};
|
|
|
|
let mut db = OurDB::new(setup_config).unwrap();
|
|
let test_data = vec![b'X'; 100];
|
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
|
|
|
b.iter(|| {
|
|
let _ = db.get(id).unwrap();
|
|
});
|
|
|
|
db.close().unwrap();
|
|
});
|
|
|
|
// Benchmark update operation
|
|
c.bench_function("update", |b| {
|
|
let config = OurDBConfig {
|
|
path: db_path.clone(),
|
|
incremental_mode: true,
|
|
file_size: Some(10 * 1024 * 1024),
|
|
keysize: Some(6),
|
|
reset: Some(true), // Reset the database for benchmarking
|
|
};
|
|
|
|
let mut db = OurDB::new(config).unwrap();
|
|
let test_data = vec![b'X'; 100];
|
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
|
|
|
b.iter(|| {
|
|
let _ = db.set(OurDBSetArgs {
|
|
id: Some(id),
|
|
data: &test_data,
|
|
}).unwrap();
|
|
});
|
|
|
|
db.close().unwrap();
|
|
});
|
|
|
|
// Benchmark delete operation
|
|
c.bench_function("delete", |b| {
|
|
let config = OurDBConfig {
|
|
path: db_path.clone(),
|
|
incremental_mode: true,
|
|
file_size: Some(10 * 1024 * 1024),
|
|
keysize: Some(6),
|
|
reset: Some(true), // Reset the database for benchmarking
|
|
};
|
|
|
|
let mut db = OurDB::new(config).unwrap();
|
|
|
|
// Create a test data vector outside the closure
|
|
let test_data = vec![b'X'; 100];
|
|
|
|
b.iter_with_setup(
|
|
// Setup: Insert a record before each iteration
|
|
|| {
|
|
db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap()
|
|
},
|
|
// Benchmark: Delete the record
|
|
|id| {
|
|
db.delete(id).unwrap();
|
|
}
|
|
);
|
|
|
|
db.close().unwrap();
|
|
});
|
|
|
|
// Benchmark history tracking
|
|
c.bench_function("get_history", |b| {
|
|
let config = OurDBConfig {
|
|
path: db_path.clone(),
|
|
incremental_mode: true,
|
|
file_size: Some(10 * 1024 * 1024),
|
|
keysize: Some(6),
|
|
reset: Some(true), // Reset the database for benchmarking
|
|
};
|
|
|
|
let mut db = OurDB::new(config).unwrap();
|
|
let test_data = vec![b'X'; 100];
|
|
|
|
// Create a record with history
|
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
|
|
|
// Update it a few times to create history
|
|
for _ in 0..5 {
|
|
db.set(OurDBSetArgs { id: Some(id), data: &test_data }).unwrap();
|
|
}
|
|
|
|
b.iter(|| {
|
|
let _ = db.get_history(id, 3).unwrap();
|
|
});
|
|
|
|
db.close().unwrap();
|
|
});
|
|
|
|
// Benchmark large data handling
|
|
c.bench_function("large_data", |b| {
|
|
let config = OurDBConfig {
|
|
path: db_path.clone(),
|
|
incremental_mode: true,
|
|
file_size: Some(10 * 1024 * 1024),
|
|
keysize: Some(6),
|
|
reset: Some(true), // Reset the database for benchmarking
|
|
};
|
|
|
|
let mut db = OurDB::new(config).unwrap();
|
|
let large_data = vec![b'X'; 10 * 1024]; // 10KB
|
|
|
|
b.iter(|| {
|
|
let id = db.set(OurDBSetArgs { id: None, data: &large_data }).unwrap();
|
|
let _ = db.get(id).unwrap();
|
|
db.delete(id).unwrap();
|
|
});
|
|
|
|
db.close().unwrap();
|
|
});
|
|
|
|
// Benchmark concurrent operations (simulated)
|
|
c.bench_function("concurrent_ops", |b| {
|
|
let config = OurDBConfig {
|
|
path: db_path.clone(),
|
|
incremental_mode: true,
|
|
file_size: Some(10 * 1024 * 1024),
|
|
keysize: Some(6),
|
|
reset: Some(true), // Reset the database for benchmarking
|
|
};
|
|
|
|
let mut db = OurDB::new(config).unwrap();
|
|
let test_data = vec![b'X'; 100];
|
|
|
|
// Pre-insert some data
|
|
let mut ids = Vec::with_capacity(100);
|
|
for _ in 0..100 {
|
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
|
ids.push(id);
|
|
}
|
|
|
|
b.iter(|| {
|
|
// Simulate mixed workload
|
|
for i in 0..10 {
|
|
if i % 3 == 0 {
|
|
// Insert
|
|
let _ = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
|
} else if i % 3 == 1 {
|
|
// Read
|
|
let idx = i % ids.len();
|
|
let _ = db.get(ids[idx]).unwrap();
|
|
} else {
|
|
// Update
|
|
let idx = i % ids.len();
|
|
db.set(OurDBSetArgs { id: Some(ids[idx]), data: &test_data }).unwrap();
|
|
}
|
|
}
|
|
});
|
|
|
|
db.close().unwrap();
|
|
});
|
|
}
|
|
|
|
criterion_group!(benches, criterion_benchmark);
|
|
criterion_main!(benches);
|