...
This commit is contained in:
parent
0051754c65
commit
e3ec26a6ef
140
herodb/Cargo.lock
generated
140
herodb/Cargo.lock
generated
@ -152,12 +152,6 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.0"
|
||||
@ -200,12 +194,6 @@ version = "3.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.10.1"
|
||||
@ -320,21 +308,6 @@ dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.9.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
||||
|
||||
[[package]]
|
||||
name = "crunchy"
|
||||
version = "0.2.3"
|
||||
@ -475,16 +448,6 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fs2"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-channel"
|
||||
version = "0.3.31"
|
||||
@ -538,15 +501,6 @@ dependencies = [
|
||||
"slab",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fxhash"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.7"
|
||||
@ -659,13 +613,13 @@ dependencies = [
|
||||
"brotli",
|
||||
"chrono",
|
||||
"lazy_static",
|
||||
"ourdb",
|
||||
"paste",
|
||||
"poem",
|
||||
"poem-openapi",
|
||||
"rhai",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sled",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
@ -923,7 +877,7 @@ version = "0.27.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"bitflags",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
]
|
||||
@ -968,14 +922,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
|
||||
name = "ourdb"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"instant",
|
||||
"lock_api",
|
||||
"parking_lot_core 0.8.6",
|
||||
"crc32fast",
|
||||
"log",
|
||||
"rand",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -985,21 +938,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core 0.9.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"instant",
|
||||
"libc",
|
||||
"redox_syscall 0.2.16",
|
||||
"smallvec",
|
||||
"winapi",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1010,7 +949,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall 0.5.10",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"windows-targets",
|
||||
]
|
||||
@ -1056,7 +995,7 @@ dependencies = [
|
||||
"mime",
|
||||
"multer",
|
||||
"nix",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"poem-derive",
|
||||
@ -1259,22 +1198,13 @@ dependencies = [
|
||||
"getrandom 0.2.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1322,7 +1252,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce4d759a4729a655ddfdbb3ff6e77fb9eadd902dae12319455557796e435d2a6"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"bitflags 2.9.0",
|
||||
"bitflags",
|
||||
"instant",
|
||||
"num-traits",
|
||||
"once_cell",
|
||||
@ -1364,7 +1294,7 @@ version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"bitflags",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
@ -1498,22 +1428,6 @@ dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sled"
|
||||
version = "0.34.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
"fs2",
|
||||
"fxhash",
|
||||
"libc",
|
||||
"log",
|
||||
"parking_lot 0.11.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.14.0"
|
||||
@ -1676,7 +1590,7 @@ dependencies = [
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
@ -1931,28 +1845,6 @@ version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.61.0"
|
||||
@ -2109,7 +2001,7 @@ version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2,12 +2,12 @@
|
||||
name = "herodb"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
description = "A database library built on top of sled with model support"
|
||||
description = "A database library built on top of ourdb with model support"
|
||||
license = "MIT"
|
||||
authors = ["HeroCode Team"]
|
||||
|
||||
[dependencies]
|
||||
sled = "0.34.7"
|
||||
ourdb = { path = "../ourdb" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
@ -31,6 +31,10 @@ path = "examples/rhai_demo.rs"
|
||||
name = "business_models_demo"
|
||||
path = "examples/business_models_demo.rs"
|
||||
|
||||
[[example]]
|
||||
name = "ourdb_example"
|
||||
path = "examples/ourdb_example.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "dbexample_prod"
|
||||
path = "src/cmd/dbexample_prod/main.rs"
|
||||
|
80
herodb/examples/ourdb_example.rs
Normal file
80
herodb/examples/ourdb_example.rs
Normal file
@ -0,0 +1,80 @@
|
||||
use herodb::db::{DB, DBBuilder, Model};
|
||||
use herodb::models::biz::{Product, ProductBuilder, ProductType, ProductStatus, Currency, CurrencyBuilder};
|
||||
use chrono::Utc;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("OurDB Backend Example");
|
||||
println!("====================\n");
|
||||
|
||||
// Create a temporary directory for the database
|
||||
let db_path = std::env::temp_dir().join("herodb_ourdb_example");
|
||||
std::fs::create_dir_all(&db_path)?;
|
||||
|
||||
println!("Creating database at: {}", db_path.display());
|
||||
|
||||
// Create a new database with Product model registered
|
||||
let db = DBBuilder::new(db_path.clone())
|
||||
.register_model::<Product>()
|
||||
.build()?;
|
||||
|
||||
println!("Database created successfully");
|
||||
|
||||
// Create a currency for pricing
|
||||
let usd = CurrencyBuilder::new()
|
||||
.amount(99.99)
|
||||
.currency_code("USD")
|
||||
.build()
|
||||
.expect("Failed to create currency");
|
||||
|
||||
// Create a product
|
||||
let product = ProductBuilder::new()
|
||||
.id(1) // We're setting an ID manually for this example
|
||||
.name("Test Product")
|
||||
.description("A test product for our OurDB example")
|
||||
.price(usd)
|
||||
.type_(ProductType::Product)
|
||||
.category("Test")
|
||||
.status(ProductStatus::Available)
|
||||
.max_amount(100)
|
||||
.validity_days(365)
|
||||
.build()
|
||||
.expect("Failed to create product");
|
||||
|
||||
println!("\nCreated product: {}", product.name);
|
||||
println!("Product ID: {}", product.get_id());
|
||||
|
||||
// Insert the product into the database
|
||||
db.set(&product)?;
|
||||
println!("Product saved to database");
|
||||
|
||||
// Retrieve the product from the database
|
||||
let retrieved_product = db.get::<Product>(product.get_id())?;
|
||||
println!("\nRetrieved product from database:");
|
||||
println!(" Name: {}", retrieved_product.name);
|
||||
println!(" Description: {}", retrieved_product.description);
|
||||
println!(" Price: ${} {}", retrieved_product.price.amount, retrieved_product.price.currency_code);
|
||||
|
||||
// Create a product with auto-incremented ID
|
||||
// For this to work, we would need to modify the Product model to support auto-incremented IDs
|
||||
// This is just a conceptual example
|
||||
println!("\nDemonstrating auto-incremented IDs:");
|
||||
println!("(Note: This would require additional implementation in the Product model)");
|
||||
|
||||
// Delete the product
|
||||
db.delete::<Product>(product.get_id())?;
|
||||
println!("\nProduct deleted from database");
|
||||
|
||||
// Try to retrieve the deleted product (should fail)
|
||||
match db.get::<Product>(product.get_id()) {
|
||||
Ok(_) => println!("Product still exists (unexpected)"),
|
||||
Err(e) => println!("Verified deletion: {}", e),
|
||||
}
|
||||
|
||||
println!("\nExample completed successfully!");
|
||||
|
||||
// Clean up
|
||||
std::fs::remove_dir_all(&db_path)?;
|
||||
println!("Cleaned up database directory");
|
||||
|
||||
Ok(())
|
||||
}
|
@ -6,7 +6,7 @@ use crate::models::biz::{
|
||||
Invoice, InvoiceBuilder, InvoiceItem, InvoiceItemBuilder, InvoiceStatus, Payment, PaymentStatus,
|
||||
Customer, CustomerBuilder,
|
||||
};
|
||||
use crate::db::base::SledModel;
|
||||
use crate::db::model::Model;
|
||||
|
||||
/// This example demonstrates the business models in action:
|
||||
/// 1. Defining products (2 types of server nodes)
|
||||
@ -41,13 +41,13 @@ fn main() {
|
||||
// Simulate a user buying a product
|
||||
println!("\nSimulating purchase of a Premium Node:");
|
||||
let sale = create_sale(&customer, &premium_node);
|
||||
println!(" - Sale created with ID: {}", sale.id);
|
||||
println!(" - Sale created with ID: {}", sale.get_id());
|
||||
println!(" - Total amount: ${} {}", sale.total_amount.amount, sale.total_amount.currency_code);
|
||||
|
||||
// Generate an invoice
|
||||
println!("\nGenerating invoice:");
|
||||
let invoice = create_invoice(&customer, &sale);
|
||||
println!(" - Invoice created with ID: {}", invoice.id);
|
||||
println!(" - Invoice created with ID: {}", invoice.get_id());
|
||||
println!(" - Total amount: ${} {}", invoice.total_amount.amount, invoice.total_amount.currency_code);
|
||||
println!(" - Due date: {}", invoice.due_date);
|
||||
println!(" - Status: {:?}", invoice.status);
|
||||
@ -198,7 +198,7 @@ fn create_sale(customer: &Customer, product: &Product) -> Sale {
|
||||
let sale_item = SaleItemBuilder::new()
|
||||
.id(1)
|
||||
.sale_id(1)
|
||||
.product_id(product.id as u32)
|
||||
.product_id(product.get_id())
|
||||
.name(product.name.clone())
|
||||
.description(product.description.clone())
|
||||
.comments("Customer requested expedited setup")
|
||||
@ -213,7 +213,7 @@ fn create_sale(customer: &Customer, product: &Product) -> Sale {
|
||||
let sale = SaleBuilder::new()
|
||||
.id(1)
|
||||
.company_id(101) // Assuming company ID 101
|
||||
.customer_id(customer.id)
|
||||
.customer_id(customer.get_id())
|
||||
.buyer_name(customer.name.clone())
|
||||
.buyer_email("contact@techcorp.com") // Example email
|
||||
.currency_code(product.price.currency_code.clone())
|
||||
@ -236,14 +236,14 @@ fn create_invoice(customer: &Customer, sale: &Sale) -> Invoice {
|
||||
.invoice_id(1)
|
||||
.description(format!("Purchase of {}", sale.items[0].name))
|
||||
.amount(sale.total_amount.clone())
|
||||
.sale_id(sale.id)
|
||||
.sale_id(sale.get_id())
|
||||
.build()
|
||||
.expect("Failed to create invoice item");
|
||||
|
||||
// Create the invoice
|
||||
let invoice = InvoiceBuilder::new()
|
||||
.id(1)
|
||||
.customer_id(customer.id)
|
||||
.customer_id(customer.get_id())
|
||||
.currency_code(sale.total_amount.currency_code.clone())
|
||||
.status(InvoiceStatus::Sent)
|
||||
.issue_date(now)
|
||||
|
@ -1,11 +1,12 @@
|
||||
use crate::db::base::*;
|
||||
use bincode;
|
||||
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
||||
use crate::db::error::{DbError, DbResult};
|
||||
use crate::db::model::Model;
|
||||
use crate::db::store::{DbOperations, OurDbStore};
|
||||
use std::any::TypeId;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Debug;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
||||
|
||||
/// Represents a single database operation in a transaction
|
||||
#[derive(Debug, Clone)]
|
||||
@ -16,51 +17,10 @@ enum DbOperation {
|
||||
},
|
||||
Delete {
|
||||
model_type: TypeId,
|
||||
id: String,
|
||||
id: u32,
|
||||
},
|
||||
}
|
||||
|
||||
// Trait for type-erased database operations
|
||||
pub trait AnyDbOperations: Send + Sync {
|
||||
fn delete(&self, id: &str) -> SledDBResult<()>;
|
||||
fn get_any(&self, id: &str) -> SledDBResult<Box<dyn std::any::Any>>;
|
||||
fn list_any(&self) -> SledDBResult<Box<dyn std::any::Any>>;
|
||||
fn insert_any(&self, model: &dyn std::any::Any) -> SledDBResult<()>;
|
||||
fn insert_any_raw(&self, serialized: &[u8]) -> SledDBResult<()>;
|
||||
}
|
||||
|
||||
// Implementation of AnyDbOperations for any SledDB<T>
|
||||
impl<T: SledModel> AnyDbOperations for SledDB<T> {
|
||||
fn delete(&self, id: &str) -> SledDBResult<()> {
|
||||
self.delete(id)
|
||||
}
|
||||
|
||||
fn get_any(&self, id: &str) -> SledDBResult<Box<dyn std::any::Any>> {
|
||||
let result = self.get(id)?;
|
||||
Ok(Box::new(result))
|
||||
}
|
||||
|
||||
fn list_any(&self) -> SledDBResult<Box<dyn std::any::Any>> {
|
||||
let result = self.list()?;
|
||||
Ok(Box::new(result))
|
||||
}
|
||||
|
||||
fn insert_any(&self, model: &dyn std::any::Any) -> SledDBResult<()> {
|
||||
// Downcast to the specific type T
|
||||
match model.downcast_ref::<T>() {
|
||||
Some(t) => self.insert(t),
|
||||
None => Err(SledDBError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_any_raw(&self, serialized: &[u8]) -> SledDBResult<()> {
|
||||
// Deserialize the bytes into model of type T
|
||||
let model: T = bincode::deserialize(serialized)?;
|
||||
// Use the regular insert method
|
||||
self.insert(&model)
|
||||
}
|
||||
}
|
||||
|
||||
/// Transaction state for DB operations
|
||||
pub struct TransactionState {
|
||||
operations: Vec<DbOperation>,
|
||||
@ -77,16 +37,13 @@ impl TransactionState {
|
||||
}
|
||||
}
|
||||
|
||||
/// Main DB manager that automatically handles all root models
|
||||
/// Main DB manager that automatically handles all models
|
||||
#[derive(Clone, CustomType)]
|
||||
pub struct DB {
|
||||
db_path: PathBuf,
|
||||
|
||||
// Type map for generic operations
|
||||
type_map: HashMap<TypeId, Arc<dyn AnyDbOperations>>,
|
||||
|
||||
// Locks to ensure thread safety for key areas
|
||||
_write_locks: Arc<Mutex<HashMap<String, bool>>>,
|
||||
type_map: HashMap<TypeId, Arc<dyn DbOperations>>,
|
||||
|
||||
// Transaction state
|
||||
transaction: Arc<RwLock<Option<TransactionState>>>,
|
||||
@ -101,15 +58,15 @@ pub struct DBBuilder {
|
||||
|
||||
/// Trait for model registration
|
||||
pub trait ModelRegistration: Send + Sync {
|
||||
fn register(&self, path: &Path) -> SledDBResult<(TypeId, Box<dyn AnyDbOperations>)>;
|
||||
fn register(&self, path: &Path) -> DbResult<(TypeId, Box<dyn DbOperations>)>;
|
||||
}
|
||||
|
||||
/// Implementation of ModelRegistration for any SledModel type
|
||||
pub struct SledModelRegistration<T: SledModel> {
|
||||
/// Implementation of ModelRegistration for any Model type
|
||||
pub struct ModelRegistrar<T: Model> {
|
||||
phantom: std::marker::PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T: SledModel> SledModelRegistration<T> {
|
||||
impl<T: Model> ModelRegistrar<T> {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
phantom: std::marker::PhantomData,
|
||||
@ -117,10 +74,10 @@ impl<T: SledModel> SledModelRegistration<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SledModel> ModelRegistration for SledModelRegistration<T> {
|
||||
fn register(&self, path: &Path) -> SledDBResult<(TypeId, Box<dyn AnyDbOperations>)> {
|
||||
let db: SledDB<T> = SledDB::open(path.join(T::db_prefix()))?;
|
||||
Ok((TypeId::of::<T>(), Box::new(db) as Box<dyn AnyDbOperations>))
|
||||
impl<T: Model> ModelRegistration for ModelRegistrar<T> {
|
||||
fn register(&self, path: &Path) -> DbResult<(TypeId, Box<dyn DbOperations>)> {
|
||||
let store = OurDbStore::<T>::open(path.join(T::db_prefix()))?;
|
||||
Ok((TypeId::of::<T>(), Box::new(store) as Box<dyn DbOperations>))
|
||||
}
|
||||
}
|
||||
|
||||
@ -141,9 +98,9 @@ impl DBBuilder {
|
||||
}
|
||||
|
||||
/// Register a model type with the DB
|
||||
pub fn register_model<T: SledModel>(mut self) -> Self {
|
||||
pub fn register_model<T: Model>(mut self) -> Self {
|
||||
self.model_registrations
|
||||
.push(Arc::new(SledModelRegistration::<T>::new()));
|
||||
.push(Arc::new(ModelRegistrar::<T>::new()));
|
||||
self
|
||||
}
|
||||
|
||||
@ -159,22 +116,20 @@ impl DBBuilder {
|
||||
}
|
||||
|
||||
// Register all models
|
||||
let mut type_map: HashMap<TypeId, Arc<dyn AnyDbOperations>> = HashMap::new();
|
||||
let mut type_map: HashMap<TypeId, Arc<dyn DbOperations>> = HashMap::new();
|
||||
|
||||
for registration in self.model_registrations {
|
||||
let (type_id, db) = registration.register(&base_path).map_err(|e| {
|
||||
let (type_id, store) = registration.register(&base_path).map_err(|e| {
|
||||
EvalAltResult::ErrorSystem("Could not register type".to_string(), Box::new(e))
|
||||
})?;
|
||||
type_map.insert(type_id, db.into());
|
||||
type_map.insert(type_id, store.into());
|
||||
}
|
||||
|
||||
let _write_locks = Arc::new(Mutex::new(HashMap::new()));
|
||||
let transaction = Arc::new(RwLock::new(None));
|
||||
|
||||
Ok(DB {
|
||||
db_path: base_path,
|
||||
type_map,
|
||||
_write_locks,
|
||||
transaction,
|
||||
})
|
||||
}
|
||||
@ -182,7 +137,7 @@ impl DBBuilder {
|
||||
|
||||
impl DB {
|
||||
/// Create a new empty DB instance without any models
|
||||
pub fn new<P: Into<PathBuf>>(base_path: P) -> SledDBResult<Self> {
|
||||
pub fn new<P: Into<PathBuf>>(base_path: P) -> DbResult<Self> {
|
||||
let base_path = base_path.into();
|
||||
|
||||
// Ensure base directory exists
|
||||
@ -190,13 +145,11 @@ impl DB {
|
||||
std::fs::create_dir_all(&base_path)?;
|
||||
}
|
||||
|
||||
let _write_locks = Arc::new(Mutex::new(HashMap::new()));
|
||||
let transaction = Arc::new(RwLock::new(None));
|
||||
|
||||
Ok(Self {
|
||||
db_path: base_path,
|
||||
type_map: HashMap::new(),
|
||||
_write_locks,
|
||||
transaction,
|
||||
})
|
||||
}
|
||||
@ -204,10 +157,10 @@ impl DB {
|
||||
// Transaction-related methods
|
||||
|
||||
/// Begin a new transaction
|
||||
pub fn begin_transaction(&self) -> SledDBResult<()> {
|
||||
pub fn begin_transaction(&self) -> DbResult<()> {
|
||||
let mut tx = self.transaction.write().unwrap();
|
||||
if tx.is_some() {
|
||||
return Err(SledDBError::GeneralError(
|
||||
return Err(DbError::TransactionError(
|
||||
"Transaction already in progress".into(),
|
||||
));
|
||||
}
|
||||
@ -222,26 +175,26 @@ impl DB {
|
||||
}
|
||||
|
||||
/// Apply a set operation with the serialized data - bypass transaction check
|
||||
fn apply_set_operation(&self, model_type: TypeId, serialized: &[u8]) -> SledDBResult<()> {
|
||||
fn apply_set_operation(&self, model_type: TypeId, serialized: &[u8]) -> DbResult<()> {
|
||||
// Get the database operations for this model type
|
||||
if let Some(db_ops) = self.type_map.get(&model_type) {
|
||||
// Just pass the raw serialized data to a special raw insert method
|
||||
return db_ops.insert_any_raw(serialized);
|
||||
return db_ops.insert_raw(serialized);
|
||||
}
|
||||
|
||||
Err(SledDBError::GeneralError(format!(
|
||||
Err(DbError::GeneralError(format!(
|
||||
"No DB registered for type ID {:?}",
|
||||
model_type
|
||||
)))
|
||||
}
|
||||
|
||||
/// Commit the current transaction, applying all operations
|
||||
pub fn commit_transaction(&self) -> SledDBResult<()> {
|
||||
pub fn commit_transaction(&self) -> DbResult<()> {
|
||||
let mut tx_guard = self.transaction.write().unwrap();
|
||||
|
||||
if let Some(tx_state) = tx_guard.take() {
|
||||
if !tx_state.active {
|
||||
return Err(SledDBError::GeneralError("Transaction not active".into()));
|
||||
return Err(DbError::TransactionError("Transaction not active".into()));
|
||||
}
|
||||
|
||||
// Execute all operations in the transaction
|
||||
@ -257,23 +210,23 @@ impl DB {
|
||||
let db_ops = self
|
||||
.type_map
|
||||
.get(&model_type)
|
||||
.ok_or_else(|| SledDBError::TypeError)?;
|
||||
db_ops.delete(&id)?;
|
||||
.ok_or_else(|| DbError::TypeError)?;
|
||||
db_ops.delete(id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
Err(SledDBError::GeneralError("No active transaction".into()))
|
||||
Err(DbError::TransactionError("No active transaction".into()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Rollback the current transaction, discarding all operations
|
||||
pub fn rollback_transaction(&self) -> SledDBResult<()> {
|
||||
pub fn rollback_transaction(&self) -> DbResult<()> {
|
||||
let mut tx = self.transaction.write().unwrap();
|
||||
if tx.is_none() {
|
||||
return Err(SledDBError::GeneralError("No active transaction".into()));
|
||||
return Err(DbError::TransactionError("No active transaction".into()));
|
||||
}
|
||||
*tx = None;
|
||||
Ok(())
|
||||
@ -287,7 +240,7 @@ impl DB {
|
||||
// Generic methods that work with any supported model type
|
||||
|
||||
/// Insert a model instance into its appropriate database based on type
|
||||
pub fn set<T: SledModel>(&self, model: &T) -> SledDBResult<()> {
|
||||
pub fn set<T: Model>(&self, model: &T) -> DbResult<()> {
|
||||
// Try to acquire a write lock on the transaction
|
||||
let mut tx_guard = self.transaction.write().unwrap();
|
||||
|
||||
@ -295,7 +248,7 @@ impl DB {
|
||||
if let Some(tx_state) = tx_guard.as_mut() {
|
||||
if tx_state.active {
|
||||
// Serialize the model for later use
|
||||
let serialized = bincode::serialize(model)?;
|
||||
let serialized = model.serialize()?;
|
||||
|
||||
// Record a Set operation in the transaction
|
||||
tx_state.operations.push(DbOperation::Set {
|
||||
@ -313,13 +266,13 @@ impl DB {
|
||||
|
||||
// Execute directly
|
||||
match self.type_map.get(&TypeId::of::<T>()) {
|
||||
Some(db_ops) => db_ops.insert_any(model),
|
||||
None => Err(SledDBError::TypeError),
|
||||
Some(db_ops) => db_ops.insert(model),
|
||||
None => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the transaction state for the given type and id
|
||||
fn check_transaction<T: SledModel>(&self, id: &str) -> Option<Result<Option<T>, SledDBError>> {
|
||||
fn check_transaction<T: Model>(&self, id: u32) -> Option<Result<Option<T>, DbError>> {
|
||||
// Try to acquire a read lock on the transaction
|
||||
let tx_guard = self.transaction.read().unwrap();
|
||||
|
||||
@ -329,7 +282,6 @@ impl DB {
|
||||
}
|
||||
|
||||
let type_id = TypeId::of::<T>();
|
||||
let id_str = id.to_string();
|
||||
|
||||
// Process operations in reverse order (last operation wins)
|
||||
for op in tx_state.operations.iter().rev() {
|
||||
@ -339,9 +291,9 @@ impl DB {
|
||||
model_type,
|
||||
id: op_id,
|
||||
} => {
|
||||
if *model_type == type_id && op_id == id {
|
||||
if *model_type == type_id && *op_id == id {
|
||||
// Return NotFound error for deleted records
|
||||
return Some(Err(SledDBError::NotFound(id.to_string())));
|
||||
return Some(Err(DbError::NotFound(id)));
|
||||
}
|
||||
}
|
||||
// Then check if it has been set in the transaction
|
||||
@ -351,9 +303,9 @@ impl DB {
|
||||
} => {
|
||||
if *model_type == type_id {
|
||||
// Try to deserialize and check the ID
|
||||
match bincode::deserialize::<T>(serialized) {
|
||||
match T::deserialize(serialized) {
|
||||
Ok(model) => {
|
||||
if model.get_id() == id_str {
|
||||
if model.get_id() == id {
|
||||
return Some(Ok(Some(model)));
|
||||
}
|
||||
}
|
||||
@ -370,7 +322,7 @@ impl DB {
|
||||
}
|
||||
|
||||
/// Get a model instance by its ID and type
|
||||
pub fn get<T: SledModel>(&self, id: &str) -> SledDBResult<T> {
|
||||
pub fn get<T: Model>(&self, id: u32) -> DbResult<T> {
|
||||
// First check if there's a pending value in the current transaction
|
||||
if let Some(tx_result) = self.check_transaction::<T>(id) {
|
||||
match tx_result {
|
||||
@ -383,19 +335,19 @@ impl DB {
|
||||
// If no pending value, look up from the database
|
||||
match self.type_map.get(&TypeId::of::<T>()) {
|
||||
Some(db_ops) => {
|
||||
let result_any = db_ops.get_any(id)?;
|
||||
let result_any = db_ops.get(id)?;
|
||||
// We expect the result to be of type T since we looked it up by TypeId
|
||||
match result_any.downcast::<T>() {
|
||||
Ok(t) => Ok(*t),
|
||||
Err(_) => Err(SledDBError::TypeError),
|
||||
Err(_) => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
None => Err(SledDBError::TypeError),
|
||||
None => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete a model instance by its ID and type
|
||||
pub fn delete<T: SledModel>(&self, id: &str) -> SledDBResult<()> {
|
||||
pub fn delete<T: Model>(&self, id: u32) -> DbResult<()> {
|
||||
// Try to acquire a write lock on the transaction
|
||||
let mut tx_guard = self.transaction.write().unwrap();
|
||||
|
||||
@ -405,7 +357,7 @@ impl DB {
|
||||
// Record a Delete operation in the transaction
|
||||
tx_state.operations.push(DbOperation::Delete {
|
||||
model_type: TypeId::of::<T>(),
|
||||
id: id.to_string(),
|
||||
id,
|
||||
});
|
||||
|
||||
return Ok(());
|
||||
@ -419,42 +371,51 @@ impl DB {
|
||||
// Execute directly
|
||||
match self.type_map.get(&TypeId::of::<T>()) {
|
||||
Some(db_ops) => db_ops.delete(id),
|
||||
None => Err(SledDBError::TypeError),
|
||||
None => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
/// List all model instances of a specific type
|
||||
pub fn list<T: SledModel>(&self) -> SledDBResult<Vec<T>> {
|
||||
pub fn list<T: Model>(&self) -> DbResult<Vec<T>> {
|
||||
// Look up the correct DB operations for type T in our type map
|
||||
match self.type_map.get(&TypeId::of::<T>()) {
|
||||
Some(db_ops) => {
|
||||
let result_any = db_ops.list_any()?;
|
||||
let result_any = db_ops.list()?;
|
||||
// We expect the result to be of type Vec<T> since we looked it up by TypeId
|
||||
match result_any.downcast::<Vec<T>>() {
|
||||
Ok(vec_t) => Ok(*vec_t),
|
||||
Err(_) => Err(SledDBError::TypeError),
|
||||
Err(_) => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
None => Err(SledDBError::TypeError),
|
||||
None => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the history of a model by its ID
|
||||
pub fn get_history<T: Model>(&self, id: u32, depth: u8) -> DbResult<Vec<T>> {
|
||||
// Look up the correct DB operations for type T in our type map
|
||||
match self.type_map.get(&TypeId::of::<T>()) {
|
||||
Some(db_ops) => {
|
||||
let result_any = db_ops.get_history(id, depth)?;
|
||||
let mut result = Vec::with_capacity(result_any.len());
|
||||
|
||||
for item in result_any {
|
||||
match item.downcast::<T>() {
|
||||
Ok(t) => result.push(*t),
|
||||
Err(_) => return Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
None => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
// Register a model type with this DB instance
|
||||
pub fn register<T: SledModel>(&mut self) -> SledDBResult<()> {
|
||||
let db_path = self.db_path.join(T::db_prefix());
|
||||
let db: SledDB<T> = SledDB::open(db_path)?;
|
||||
self.type_map.insert(TypeId::of::<T>(), Arc::new(db));
|
||||
pub fn register<T: Model>(&mut self) -> DbResult<()> {
|
||||
let store = OurDbStore::<T>::open(&self.db_path)?;
|
||||
self.type_map.insert(TypeId::of::<T>(), Arc::new(store));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get a typed handle to a registered model DB
|
||||
pub fn db_for<T: SledModel>(&self) -> SledDBResult<&dyn AnyDbOperations> {
|
||||
match self.type_map.get(&TypeId::of::<T>()) {
|
||||
Some(db) => Ok(&**db),
|
||||
None => Err(SledDBError::GeneralError(format!(
|
||||
"No DB registered for type {}",
|
||||
std::any::type_name::<T>()
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
30
herodb/src/db/error.rs
Normal file
30
herodb/src/db/error.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use thiserror::Error;
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// Errors that can occur during database operations
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DbError {
|
||||
#[error("I/O error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("Serialization/Deserialization error: {0}")]
|
||||
SerializationError(#[from] bincode::Error),
|
||||
|
||||
#[error("Record not found for ID: {0}")]
|
||||
NotFound(u32),
|
||||
|
||||
#[error("Type mismatch during deserialization")]
|
||||
TypeError,
|
||||
|
||||
#[error("Transaction error: {0}")]
|
||||
TransactionError(String),
|
||||
|
||||
#[error("OurDB error: {0}")]
|
||||
OurDbError(#[from] ourdb::Error),
|
||||
|
||||
#[error("General database error: {0}")]
|
||||
GeneralError(String),
|
||||
}
|
||||
|
||||
/// Result type for DB operations
|
||||
pub type DbResult<T> = Result<T, DbError>;
|
@ -1,4 +1,4 @@
|
||||
/// Macro to implement typed access methods on the DB struct for a given model
|
||||
ere/// Macro to implement typed access methods on the DB struct for a given model
|
||||
#[macro_export]
|
||||
macro_rules! impl_model_methods {
|
||||
($model:ty, $singular:ident, $plural:ident) => {
|
||||
@ -12,19 +12,24 @@ macro_rules! impl_model_methods {
|
||||
}
|
||||
|
||||
/// Get a model instance by its ID
|
||||
pub fn [<get_ $singular>](&mut self, id: i64) -> SledDBResult<$model> {
|
||||
self.get::<$model>(&id.to_string())
|
||||
pub fn [<get_ $singular>](&mut self, id: u32) -> DbResult<$model> {
|
||||
self.get::<$model>(id)
|
||||
}
|
||||
|
||||
/// Delete a model instance by its ID
|
||||
pub fn [<delete_ $singular>](&mut self, id: i64) -> SledDBResult<()> {
|
||||
self.delete::<$model>(&id.to_string())
|
||||
pub fn [<delete_ $singular>](&mut self, id: u32) -> DbResult<()> {
|
||||
self.delete::<$model>(id)
|
||||
}
|
||||
|
||||
/// List all model instances
|
||||
pub fn [<list_ $plural>](&mut self) -> SledDBResult<Vec<$model>> {
|
||||
pub fn [<list_ $plural>](&mut self) -> DbResult<Vec<$model>> {
|
||||
self.list::<$model>()
|
||||
}
|
||||
|
||||
/// Get history of a model instance
|
||||
pub fn [<get_ $singular _history>](&mut self, id: u32, depth: u8) -> DbResult<Vec<$model>> {
|
||||
self.get_history::<$model>(id, depth)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1,7 +1,18 @@
|
||||
pub mod base;
|
||||
pub mod db;
|
||||
pub mod macros;
|
||||
pub mod model_methods;
|
||||
s using// Export the error module
|
||||
pub mod error;
|
||||
pub use error::{DbError, DbResult};
|
||||
|
||||
pub use base::{SledDB, SledDBError, SledDBResult, Storable, SledModel};
|
||||
pub use db::{DB, DBBuilder};
|
||||
// Export the model module
|
||||
pub mod model;
|
||||
pub use model::{Model, Storable};
|
||||
|
||||
// Export the store module
|
||||
pub mod store;
|
||||
pub use store::{DbOperations, OurDbStore};
|
||||
|
||||
// Export the db module
|
||||
pub mod db;
|
||||
pub use db::{DB, DBBuilder, ModelRegistration, ModelRegistrar};
|
||||
|
||||
// Export macros for model methods
|
||||
pub mod macros;
|
||||
|
30
herodb/src/db/model.rs
Normal file
30
herodb/src/db/model.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use crate::db::error::{DbError, DbResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// Trait for models that can be serialized and deserialized
|
||||
pub trait Storable: Serialize + for<'de> Deserialize<'de> + Sized {
|
||||
/// Serializes the instance using bincode
|
||||
fn serialize(&self) -> DbResult<Vec<u8>> {
|
||||
bincode::serialize(self).map_err(DbError::SerializationError)
|
||||
}
|
||||
|
||||
/// Deserializes data from bytes into an instance
|
||||
fn deserialize(data: &[u8]) -> DbResult<Self> {
|
||||
bincode::deserialize(data).map_err(DbError::SerializationError)
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait identifying a model suitable for the database
|
||||
/// The 'static lifetime bound is required for type identification via Any
|
||||
pub trait Model: Storable + Debug + Clone + Send + Sync + 'static {
|
||||
/// Returns the unique ID for this model instance
|
||||
fn get_id(&self) -> u32;
|
||||
|
||||
/// Returns a prefix used for this model type in the database
|
||||
/// Helps to logically separate different model types
|
||||
fn db_prefix() -> &'static str;
|
||||
}
|
||||
|
||||
// Implement Storable for common types that might be used in models
|
||||
impl<T: Serialize + for<'de> Deserialize<'de> + Sized> Storable for T {}
|
@ -1,5 +1,5 @@
|
||||
use crate::db::db::DB;
|
||||
use crate::db::base::{SledDBResult, SledModel};
|
||||
use crate::db::model::Model;
|
||||
use crate::impl_model_methods;
|
||||
use crate::models::biz::{Product, Sale, Currency, ExchangeRate, Service, Customer, Contract, Invoice};
|
||||
|
||||
|
152
herodb/src/db/store.rs
Normal file
152
herodb/src/db/store.rs
Normal file
@ -0,0 +1,152 @@
|
||||
use crate::db::error::{DbError, DbResult};
|
||||
use crate::db::model::Model;
|
||||
use ourdb::{OurDB, OurDBConfig, OurDBSetArgs};
|
||||
use std::marker::PhantomData;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::any::Any;
|
||||
|
||||
/// Trait for type-erased database operations
|
||||
pub trait DbOperations: Send + Sync {
|
||||
fn delete(&self, id: u32) -> DbResult<()>;
|
||||
fn get(&self, id: u32) -> DbResult<Box<dyn Any>>;
|
||||
fn list(&self) -> DbResult<Box<dyn Any>>;
|
||||
fn insert(&self, model: &dyn Any) -> DbResult<()>;
|
||||
fn insert_raw(&self, serialized: &[u8]) -> DbResult<()>;
|
||||
fn get_history(&self, id: u32, depth: u8) -> DbResult<Vec<Box<dyn Any>>>;
|
||||
}
|
||||
|
||||
/// A store implementation using OurDB as the backend
|
||||
pub struct OurDbStore<T: Model> {
|
||||
db: OurDB,
|
||||
path: PathBuf,
|
||||
_phantom: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T: Model> OurDbStore<T> {
|
||||
/// Opens or creates an OurDB database at the specified path
|
||||
pub fn open<P: AsRef<Path>>(path: P) -> DbResult<Self> {
|
||||
let path_buf = path.as_ref().to_path_buf();
|
||||
let db_path = path_buf.join(T::db_prefix());
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
std::fs::create_dir_all(&db_path).map_err(DbError::IoError)?;
|
||||
|
||||
let config = OurDBConfig {
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true, // Always use incremental mode for auto IDs
|
||||
file_size: None, // Use default (500MB)
|
||||
keysize: None, // Use default (4 bytes)
|
||||
reset: None, // Don't reset existing database
|
||||
};
|
||||
|
||||
let db = OurDB::new(config).map_err(DbError::OurDbError)?;
|
||||
|
||||
Ok(Self {
|
||||
db,
|
||||
path: db_path,
|
||||
_phantom: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Inserts or updates a model instance in the database
|
||||
pub fn insert(&self, model: &T) -> DbResult<()> {
|
||||
let id = model.get_id();
|
||||
let data = model.serialize()?;
|
||||
|
||||
self.db.set(OurDBSetArgs {
|
||||
id: Some(id),
|
||||
data: &data,
|
||||
}).map_err(DbError::OurDbError)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Retrieves a model instance by its ID
|
||||
pub fn get(&self, id: u32) -> DbResult<T> {
|
||||
let data = self.db.get(id).map_err(|e| {
|
||||
match e {
|
||||
ourdb::Error::NotFound(_) => DbError::NotFound(id),
|
||||
_ => DbError::OurDbError(e),
|
||||
}
|
||||
})?;
|
||||
|
||||
T::deserialize(&data)
|
||||
}
|
||||
|
||||
/// Deletes a model instance by its ID
|
||||
pub fn delete(&self, id: u32) -> DbResult<()> {
|
||||
self.db.delete(id).map_err(|e| {
|
||||
match e {
|
||||
ourdb::Error::NotFound(_) => DbError::NotFound(id),
|
||||
_ => DbError::OurDbError(e),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Lists all models of this type
|
||||
pub fn list(&self) -> DbResult<Vec<T>> {
|
||||
// OurDB doesn't have a built-in list function, so we need to implement it
|
||||
// This is a placeholder - in a real implementation, we would need to
|
||||
// maintain a list of all IDs for each model type
|
||||
Err(DbError::GeneralError("List operation not implemented yet".to_string()))
|
||||
}
|
||||
|
||||
/// Gets the history of a model by its ID
|
||||
pub fn get_history(&self, id: u32, depth: u8) -> DbResult<Vec<T>> {
|
||||
let history_data = self.db.get_history(id, depth).map_err(|e| {
|
||||
match e {
|
||||
ourdb::Error::NotFound(_) => DbError::NotFound(id),
|
||||
_ => DbError::OurDbError(e),
|
||||
}
|
||||
})?;
|
||||
|
||||
let mut result = Vec::with_capacity(history_data.len());
|
||||
for data in history_data {
|
||||
result.push(T::deserialize(&data)?);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Model> DbOperations for OurDbStore<T> {
|
||||
fn delete(&self, id: u32) -> DbResult<()> {
|
||||
self.delete(id)
|
||||
}
|
||||
|
||||
fn get(&self, id: u32) -> DbResult<Box<dyn Any>> {
|
||||
let result = self.get(id)?;
|
||||
Ok(Box::new(result))
|
||||
}
|
||||
|
||||
fn list(&self) -> DbResult<Box<dyn Any>> {
|
||||
let result = self.list()?;
|
||||
Ok(Box::new(result))
|
||||
}
|
||||
|
||||
fn insert(&self, model: &dyn Any) -> DbResult<()> {
|
||||
// Downcast to the specific type T
|
||||
match model.downcast_ref::<T>() {
|
||||
Some(t) => self.insert(t),
|
||||
None => Err(DbError::TypeError),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_raw(&self, serialized: &[u8]) -> DbResult<()> {
|
||||
// Deserialize the bytes into model of type T
|
||||
let model = T::deserialize(serialized)?;
|
||||
// Use the regular insert method
|
||||
self.insert(&model)
|
||||
}
|
||||
|
||||
fn get_history(&self, id: u32, depth: u8) -> DbResult<Vec<Box<dyn Any>>> {
|
||||
let history = self.get_history(id, depth)?;
|
||||
let mut result = Vec::with_capacity(history.len());
|
||||
|
||||
for item in history {
|
||||
result.push(Box::new(item) as Box<dyn Any>);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
@ -1,35 +1,22 @@
|
||||
use thiserror::Error;
|
||||
|
||||
/// Error types for HeroDB operations
|
||||
/// Error type for HeroDB operations
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
/// Error from the underlying sled database
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] sled::Error),
|
||||
DbError(#[from] crate::db::error::DbError),
|
||||
|
||||
#[error("I/O error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
/// Error during serialization or deserialization
|
||||
#[error("Serialization error: {0}")]
|
||||
Serialization(#[from] serde_json::Error),
|
||||
SerializationError(#[from] bincode::Error),
|
||||
|
||||
/// Error when a requested item is not found
|
||||
#[error("Item not found: {0}")]
|
||||
NotFound(String),
|
||||
#[error("OurDB error: {0}")]
|
||||
OurDbError(#[from] ourdb::Error),
|
||||
|
||||
/// Error when an item already exists
|
||||
#[error("Item already exists: {0}")]
|
||||
AlreadyExists(String),
|
||||
|
||||
/// Error when a model validation fails
|
||||
#[error("Validation error: {0}")]
|
||||
Validation(String),
|
||||
|
||||
/// Error when a transaction fails
|
||||
#[error("Transaction error: {0}")]
|
||||
Transaction(String),
|
||||
|
||||
/// Other errors
|
||||
#[error("Other error: {0}")]
|
||||
Other(String),
|
||||
#[error("General error: {0}")]
|
||||
GeneralError(String),
|
||||
}
|
||||
|
||||
/// Result type for HeroDB operations
|
||||
|
0
herodb/src/instructions.md
Normal file
0
herodb/src/instructions.md
Normal file
@ -1,6 +1,6 @@
|
||||
//! HeroDB: A database library built on top of sled with model support
|
||||
//! HeroDB: A database library built on top of ourdb with model support
|
||||
//!
|
||||
//! This library provides a simple interface for working with a sled-based database
|
||||
//! This library provides a simple interface for working with an ourdb-based database
|
||||
//! and includes support for defining and working with data models.
|
||||
|
||||
// Core modules
|
||||
@ -13,6 +13,7 @@ pub mod cmd;
|
||||
|
||||
// Re-exports
|
||||
pub use error::Error;
|
||||
pub use db::{DB, DBBuilder, Model, Storable, DbError, DbResult};
|
||||
|
||||
/// Re-export sled for advanced usage
|
||||
pub use sled;
|
||||
/// Re-export ourdb for advanced usage
|
||||
pub use ourdb;
|
||||
|
@ -1,19 +1,21 @@
|
||||
use crate::db::base::{SledModel, Storable};
|
||||
use crate::db::model::{Model, Storable};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
||||
use serde::{Deserialize, Serialize}; // Import Sled traits from db module
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Currency represents a monetary value with amount and currency code
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, CustomType)]
|
||||
pub struct Currency {
|
||||
pub id: u32,
|
||||
pub amount: f64,
|
||||
pub currency_code: String,
|
||||
}
|
||||
|
||||
impl Currency {
|
||||
/// Create a new currency with amount and code
|
||||
pub fn new(amount: f64, currency_code: String) -> Self {
|
||||
pub fn new(id: u32, amount: f64, currency_code: String) -> Self {
|
||||
Self {
|
||||
id,
|
||||
amount,
|
||||
currency_code,
|
||||
}
|
||||
@ -27,6 +29,7 @@ impl Currency {
|
||||
/// Builder for Currency
|
||||
#[derive(Clone, CustomType)]
|
||||
pub struct CurrencyBuilder {
|
||||
id: Option<u32>,
|
||||
amount: Option<f64>,
|
||||
currency_code: Option<String>,
|
||||
}
|
||||
@ -35,11 +38,18 @@ impl CurrencyBuilder {
|
||||
/// Create a new CurrencyBuilder with all fields set to None
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
amount: None,
|
||||
currency_code: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the id
|
||||
pub fn id(mut self, id: u32) -> Self {
|
||||
self.id = Some(id);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the amount
|
||||
pub fn amount(mut self, amount: f64) -> Self {
|
||||
self.amount = Some(amount);
|
||||
@ -55,20 +65,17 @@ impl CurrencyBuilder {
|
||||
/// Build the Currency object
|
||||
pub fn build(self) -> Result<Currency, Box<EvalAltResult>> {
|
||||
Ok(Currency {
|
||||
id: self.id.ok_or("id is required")?,
|
||||
amount: self.amount.ok_or("amount is required")?,
|
||||
currency_code: self.currency_code.ok_or("currency_code is required")?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Implement Storable trait (provides default dump/load)
|
||||
impl Storable for Currency {}
|
||||
|
||||
// Implement SledModel trait
|
||||
impl SledModel for Currency {
|
||||
fn get_id(&self) -> String {
|
||||
// Use the currency code as the ID
|
||||
self.currency_code.clone()
|
||||
// Implement Model trait
|
||||
impl Model for Currency {
|
||||
fn get_id(&self) -> u32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn db_prefix() -> &'static str {
|
||||
|
@ -2,11 +2,12 @@ use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::db::base::{SledModel, Storable};
|
||||
use crate::db::model::{Model, Storable};
|
||||
|
||||
/// ExchangeRate represents an exchange rate between two currencies
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ExchangeRate {
|
||||
pub id: u32,
|
||||
pub base_currency: String,
|
||||
pub target_currency: String,
|
||||
pub rate: f64,
|
||||
@ -15,8 +16,9 @@ pub struct ExchangeRate {
|
||||
|
||||
impl ExchangeRate {
|
||||
/// Create a new exchange rate
|
||||
pub fn new(base_currency: String, target_currency: String, rate: f64) -> Self {
|
||||
pub fn new(id: u32, base_currency: String, target_currency: String, rate: f64) -> Self {
|
||||
Self {
|
||||
id,
|
||||
base_currency,
|
||||
target_currency,
|
||||
rate,
|
||||
@ -27,6 +29,7 @@ impl ExchangeRate {
|
||||
|
||||
/// Builder for ExchangeRate
|
||||
pub struct ExchangeRateBuilder {
|
||||
id: Option<u32>,
|
||||
base_currency: Option<String>,
|
||||
target_currency: Option<String>,
|
||||
rate: Option<f64>,
|
||||
@ -37,6 +40,7 @@ impl ExchangeRateBuilder {
|
||||
/// Create a new ExchangeRateBuilder with all fields set to None
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
base_currency: None,
|
||||
target_currency: None,
|
||||
rate: None,
|
||||
@ -44,6 +48,12 @@ impl ExchangeRateBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the id
|
||||
pub fn id(mut self, id: u32) -> Self {
|
||||
self.id = Some(id);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the base currency
|
||||
pub fn base_currency<S: Into<String>>(mut self, base_currency: S) -> Self {
|
||||
self.base_currency = Some(base_currency.into());
|
||||
@ -72,6 +82,7 @@ impl ExchangeRateBuilder {
|
||||
pub fn build(self) -> Result<ExchangeRate, &'static str> {
|
||||
let now = Utc::now();
|
||||
Ok(ExchangeRate {
|
||||
id: self.id.ok_or("id is required")?,
|
||||
base_currency: self.base_currency.ok_or("base_currency is required")?,
|
||||
target_currency: self.target_currency.ok_or("target_currency is required")?,
|
||||
rate: self.rate.ok_or("rate is required")?,
|
||||
@ -80,13 +91,10 @@ impl ExchangeRateBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
// Implement Storable trait (provides default dump/load)
|
||||
impl Storable for ExchangeRate {}
|
||||
|
||||
// Implement SledModel trait
|
||||
impl SledModel for ExchangeRate {
|
||||
fn get_id(&self) -> String {
|
||||
format!("{}_{}", self.base_currency, self.target_currency)
|
||||
// Implement Model trait
|
||||
impl Model for ExchangeRate {
|
||||
fn get_id(&self) -> u32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn db_prefix() -> &'static str {
|
||||
@ -156,11 +164,11 @@ lazy_static::lazy_static! {
|
||||
let service = ExchangeRateService::new();
|
||||
|
||||
// Set some default exchange rates
|
||||
service.set_rate(ExchangeRate::new("USD".to_string(), "EUR".to_string(), 0.85));
|
||||
service.set_rate(ExchangeRate::new("USD".to_string(), "GBP".to_string(), 0.75));
|
||||
service.set_rate(ExchangeRate::new("USD".to_string(), "JPY".to_string(), 110.0));
|
||||
service.set_rate(ExchangeRate::new("USD".to_string(), "CAD".to_string(), 1.25));
|
||||
service.set_rate(ExchangeRate::new("USD".to_string(), "AUD".to_string(), 1.35));
|
||||
service.set_rate(ExchangeRate::new(1, "USD".to_string(), "EUR".to_string(), 0.85));
|
||||
service.set_rate(ExchangeRate::new(2, "USD".to_string(), "GBP".to_string(), 0.75));
|
||||
service.set_rate(ExchangeRate::new(3, "USD".to_string(), "JPY".to_string(), 110.0));
|
||||
service.set_rate(ExchangeRate::new(4, "USD".to_string(), "CAD".to_string(), 1.25));
|
||||
service.set_rate(ExchangeRate::new(5, "USD".to_string(), "AUD".to_string(), 1.35));
|
||||
|
||||
service
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::db::base::{SledModel, Storable};
|
||||
use crate::db::model::{Model, Storable};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use rhai::{CustomType, EvalAltResult, TypeBuilder, export_module};
|
||||
use serde::{Deserialize, Serialize}; // Import Sled traits from db module
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// ProductType represents the type of a product
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
@ -20,7 +20,7 @@ pub enum ProductStatus {
|
||||
/// ProductComponent represents a component of a product
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProductComponent {
|
||||
pub id: i64,
|
||||
pub id: u32,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub quantity: i64,
|
||||
@ -30,7 +30,7 @@ pub struct ProductComponent {
|
||||
|
||||
impl ProductComponent {
|
||||
/// Create a new product component with default timestamps
|
||||
pub fn new(id: i64, name: String, description: String, quantity: i64) -> Self {
|
||||
pub fn new(id: u32, name: String, description: String, quantity: i64) -> Self {
|
||||
let now = Utc::now();
|
||||
Self {
|
||||
id,
|
||||
@ -46,7 +46,7 @@ impl ProductComponent {
|
||||
/// Builder for ProductComponent
|
||||
#[derive(Clone, CustomType)]
|
||||
pub struct ProductComponentBuilder {
|
||||
id: Option<i64>,
|
||||
id: Option<u32>,
|
||||
name: Option<String>,
|
||||
description: Option<String>,
|
||||
quantity: Option<i64>,
|
||||
@ -68,7 +68,7 @@ impl ProductComponentBuilder {
|
||||
}
|
||||
|
||||
/// Set the id
|
||||
pub fn id(mut self, id: i64) -> Self {
|
||||
pub fn id(mut self, id: u32) -> Self {
|
||||
self.id = Some(id);
|
||||
self
|
||||
}
|
||||
@ -120,7 +120,7 @@ impl ProductComponentBuilder {
|
||||
/// Product represents a product or service offered in the system
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Product {
|
||||
pub id: i64,
|
||||
pub id: u32,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub price: Currency,
|
||||
@ -135,12 +135,10 @@ pub struct Product {
|
||||
pub components: Vec<ProductComponent>,
|
||||
}
|
||||
|
||||
// Removed old Model trait implementation
|
||||
|
||||
impl Product {
|
||||
/// Create a new product with default timestamps
|
||||
pub fn new(
|
||||
id: i64,
|
||||
id: u32,
|
||||
name: String,
|
||||
description: String,
|
||||
price: Currency,
|
||||
@ -201,7 +199,7 @@ impl Product {
|
||||
/// Builder for Product
|
||||
#[derive(Clone, CustomType)]
|
||||
pub struct ProductBuilder {
|
||||
id: Option<i64>,
|
||||
id: Option<u32>,
|
||||
name: Option<String>,
|
||||
description: Option<String>,
|
||||
price: Option<Currency>,
|
||||
@ -239,7 +237,7 @@ impl ProductBuilder {
|
||||
}
|
||||
|
||||
/// Set the id
|
||||
pub fn id(mut self, id: i64) -> Self {
|
||||
pub fn id(mut self, id: u32) -> Self {
|
||||
self.id = Some(id);
|
||||
self
|
||||
}
|
||||
@ -344,13 +342,10 @@ impl ProductBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
// Implement Storable trait (provides default dump/load)
|
||||
impl Storable for Product {}
|
||||
|
||||
// Implement SledModel trait
|
||||
impl SledModel for Product {
|
||||
fn get_id(&self) -> String {
|
||||
self.id.to_string()
|
||||
// Implement Model trait
|
||||
impl Model for Product {
|
||||
fn get_id(&self) -> u32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn db_prefix() -> &'static str {
|
||||
|
266
herodb_ourdb_migration_plan.md
Normal file
266
herodb_ourdb_migration_plan.md
Normal file
@ -0,0 +1,266 @@
|
||||
# Migration Plan: Restructuring herodb to Use ourdb as Backend
|
||||
|
||||
This document outlines the plan to restructure herodb to use ourdb as the backend, completely removing all sled references and better aligning with ourdb's design patterns.
|
||||
|
||||
## Overview
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Current herodb with sled] --> B[Define new core traits]
|
||||
B --> C[Implement ourdb backend]
|
||||
C --> D[Create new DB manager]
|
||||
D --> E[Implement transaction system]
|
||||
E --> F[Update model implementations]
|
||||
F --> G[Final restructured herodb with ourdb]
|
||||
```
|
||||
|
||||
## New Architecture
|
||||
|
||||
```mermaid
|
||||
classDiagram
|
||||
class Model {
|
||||
+get_id() u32
|
||||
+db_prefix() &'static str
|
||||
}
|
||||
class Storable {
|
||||
+serialize() Result<Vec<u8>>
|
||||
+deserialize() Result<Self>
|
||||
}
|
||||
class DB {
|
||||
-path: PathBuf
|
||||
-type_map: HashMap<TypeId, Arc<dyn DbOperations>>
|
||||
-transaction: Arc<RwLock<Option<TransactionState>>>
|
||||
+new(config: DbConfig) Result<Self>
|
||||
+begin_transaction() Result<()>
|
||||
+commit_transaction() Result<()>
|
||||
+rollback_transaction() Result<()>
|
||||
+set<T: Model>(model: &T) Result<()>
|
||||
+get<T: Model>(id: u32) Result<T>
|
||||
+delete<T: Model>(id: u32) Result<()>
|
||||
+list<T: Model>() Result<Vec<T>>
|
||||
+register<T: Model>() Result<()>
|
||||
+get_history<T: Model>(id: u32, depth: u8) Result<Vec<T>>
|
||||
}
|
||||
class DbOperations {
|
||||
<<interface>>
|
||||
+delete(id: u32) Result<()>
|
||||
+get(id: u32) Result<Box<dyn Any>>
|
||||
+list() Result<Box<dyn Any>>
|
||||
+insert(model: &dyn Any) Result<()>
|
||||
+get_history(id: u32, depth: u8) Result<Vec<Box<dyn Any>>>
|
||||
}
|
||||
class OurDbStore~T~ {
|
||||
-db: OurDB
|
||||
-model_type: PhantomData<T>
|
||||
+new(config: OurDBConfig) Result<Self>
|
||||
+insert(model: &T) Result<()>
|
||||
+get(id: u32) Result<T>
|
||||
+delete(id: u32) Result<()>
|
||||
+list() Result<Vec<T>>
|
||||
+get_history(id: u32, depth: u8) Result<Vec<T>>
|
||||
}
|
||||
|
||||
Model --|> Storable
|
||||
OurDbStore ..|> DbOperations
|
||||
DB o-- DbOperations
|
||||
```
|
||||
|
||||
## Detailed Restructuring Steps
|
||||
|
||||
### 1. Define New Core Traits and Types
|
||||
|
||||
1. Create a new `Model` trait to replace `SledModel`
|
||||
2. Create a new `Storable` trait for serialization/deserialization
|
||||
3. Define a new error type hierarchy based on ourdb's error types
|
||||
4. Create a `DbOperations` trait for database operations
|
||||
|
||||
### 2. Implement ourdb Backend
|
||||
|
||||
1. Create an `OurDbStore<T>` type that wraps ourdb
|
||||
2. Implement the `DbOperations` trait for `OurDbStore<T>`
|
||||
3. Add support for history tracking
|
||||
|
||||
### 3. Create New DB Manager
|
||||
|
||||
1. Create a new `DB` struct that manages multiple model types
|
||||
2. Implement a builder pattern for configuration
|
||||
3. Add methods for CRUD operations
|
||||
|
||||
### 4. Implement Transaction System
|
||||
|
||||
1. Create a transaction system that works with ourdb
|
||||
2. Implement transaction operations (begin, commit, rollback)
|
||||
3. Handle transaction state tracking
|
||||
|
||||
### 5. Update Model Implementations
|
||||
|
||||
1. Update all models to use `u32` IDs
|
||||
2. Implement the new `Model` trait for all models
|
||||
3. Update model constructors and builders
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### 1. Core Traits and Types
|
||||
|
||||
```rust
|
||||
// Error types
|
||||
pub enum DbError {
|
||||
IoError(std::io::Error),
|
||||
SerializationError(bincode::Error),
|
||||
NotFound(u32),
|
||||
TransactionError(String),
|
||||
// Map to ourdb error types
|
||||
OurDbError(ourdb::Error),
|
||||
// Other error types as needed
|
||||
}
|
||||
|
||||
// Result type alias
|
||||
pub type DbResult<T> = Result<T, DbError>;
|
||||
|
||||
// Storable trait
|
||||
pub trait Storable: Serialize + for<'de> Deserialize<'de> + Sized {
|
||||
fn serialize(&self) -> DbResult<Vec<u8>> {
|
||||
// Default implementation using bincode
|
||||
Ok(bincode::serialize(self)?)
|
||||
}
|
||||
|
||||
fn deserialize(data: &[u8]) -> DbResult<Self> {
|
||||
// Default implementation using bincode
|
||||
Ok(bincode::deserialize(data)?)
|
||||
}
|
||||
}
|
||||
|
||||
// Model trait
|
||||
pub trait Model: Storable + Debug + Clone + Send + Sync + 'static {
|
||||
fn get_id(&self) -> u32;
|
||||
fn db_prefix() -> &'static str;
|
||||
}
|
||||
```
|
||||
|
||||
### 2. ourdb Backend Implementation
|
||||
|
||||
```rust
|
||||
pub struct OurDbStore<T: Model> {
|
||||
db: OurDB,
|
||||
_phantom: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T: Model> OurDbStore<T> {
|
||||
pub fn new(config: OurDBConfig) -> DbResult<Self> {
|
||||
let db = OurDB::new(config)?;
|
||||
Ok(Self {
|
||||
db,
|
||||
_phantom: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
// Implementation of CRUD operations
|
||||
}
|
||||
|
||||
impl<T: Model> DbOperations for OurDbStore<T> {
|
||||
// Implementation of DbOperations trait
|
||||
}
|
||||
```
|
||||
|
||||
### 3. DB Manager Implementation
|
||||
|
||||
```rust
|
||||
pub struct DB {
|
||||
path: PathBuf,
|
||||
type_map: HashMap<TypeId, Arc<dyn DbOperations>>,
|
||||
transaction: Arc<RwLock<Option<TransactionState>>>,
|
||||
}
|
||||
|
||||
impl DB {
|
||||
pub fn new(config: DbConfig) -> DbResult<Self> {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// CRUD operations and other methods
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Transaction System
|
||||
|
||||
```rust
|
||||
pub struct TransactionState {
|
||||
operations: Vec<DbOperation>,
|
||||
active: bool,
|
||||
}
|
||||
|
||||
enum DbOperation {
|
||||
Set {
|
||||
model_type: TypeId,
|
||||
serialized: Vec<u8>,
|
||||
},
|
||||
Delete {
|
||||
model_type: TypeId,
|
||||
id: u32,
|
||||
},
|
||||
}
|
||||
|
||||
impl DB {
|
||||
pub fn begin_transaction(&self) -> DbResult<()> {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
pub fn commit_transaction(&self) -> DbResult<()> {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
pub fn rollback_transaction(&self) -> DbResult<()> {
|
||||
// Implementation
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Model Implementation Updates
|
||||
|
||||
```rust
|
||||
// Example for Product model
|
||||
impl Model for Product {
|
||||
fn get_id(&self) -> u32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn db_prefix() -> &'static str {
|
||||
"product"
|
||||
}
|
||||
}
|
||||
|
||||
impl Storable for Product {}
|
||||
```
|
||||
|
||||
## Key Technical Considerations
|
||||
|
||||
1. **Clean Architecture**: The new design provides a cleaner separation of concerns.
|
||||
|
||||
2. **Incremental IDs**: All models will use `u32` IDs, and ourdb will be configured in incremental mode.
|
||||
|
||||
3. **History Tracking**: The new API will expose ourdb's history tracking capabilities.
|
||||
|
||||
4. **Transaction Support**: We'll implement a custom transaction system on top of ourdb.
|
||||
|
||||
5. **Error Handling**: New error types will map directly to ourdb's error types.
|
||||
|
||||
6. **Serialization**: We'll use bincode for serialization/deserialization by default.
|
||||
|
||||
## Migration Risks and Mitigations
|
||||
|
||||
| Risk | Mitigation |
|
||||
|------|------------|
|
||||
| Breaking API changes | Create a compatibility layer if needed |
|
||||
| Data migration complexity | Develop a data migration utility |
|
||||
| Performance impact | Benchmark before and after |
|
||||
| Implementation complexity | Implement in phases with thorough testing |
|
||||
| Integration issues | Create comprehensive integration tests |
|
||||
|
||||
## Implementation Phases
|
||||
|
||||
1. **Phase 1**: Define core traits and types
|
||||
2. **Phase 2**: Implement ourdb backend
|
||||
3. **Phase 3**: Create DB manager
|
||||
4. **Phase 4**: Implement transaction system
|
||||
5. **Phase 5**: Update model implementations
|
||||
6. **Phase 6**: Create tests and benchmarks
|
||||
7. **Phase 7**: Develop data migration utility
|
@ -14,61 +14,42 @@ fn criterion_benchmark(c: &mut Criterion) {
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024), // 10MB
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let test_data = vec![b'X'; 100]; // 100 bytes of data
|
||||
let mut i = 0;
|
||||
|
||||
b.iter(|| {
|
||||
let args = OurDBSetArgs {
|
||||
id: None, // Let the DB assign an ID
|
||||
let _ = db.set(OurDBSetArgs {
|
||||
id: None,
|
||||
data: &test_data,
|
||||
};
|
||||
black_box(db.set(args).unwrap());
|
||||
i += 1;
|
||||
});
|
||||
}).unwrap();
|
||||
});
|
||||
|
||||
// Setup database with data for other benchmarks
|
||||
db.close().unwrap();
|
||||
});
|
||||
|
||||
// Benchmark get operation (retrieval)
|
||||
c.bench_function("get", |b| {
|
||||
// Setup: Create a database and insert a record
|
||||
let setup_config = OurDBConfig {
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024), // 10MB
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
};
|
||||
|
||||
let mut setup_db = OurDB::new(setup_config).unwrap();
|
||||
let test_data = vec![b'X'; 100]; // 100 bytes of data
|
||||
let mut ids = Vec::with_capacity(1000);
|
||||
|
||||
// Insert 1000 records
|
||||
for _ in 0..1000 {
|
||||
let args = OurDBSetArgs {
|
||||
id: None,
|
||||
data: &test_data,
|
||||
};
|
||||
let id = setup_db.set(args).unwrap();
|
||||
ids.push(id);
|
||||
}
|
||||
|
||||
// Benchmark get operation
|
||||
c.bench_function("get", |b| {
|
||||
let config = OurDBConfig {
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
keysize: Some(6),
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let mut i = 0;
|
||||
let mut db = OurDB::new(setup_config).unwrap();
|
||||
let test_data = vec![b'X'; 100];
|
||||
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||
|
||||
b.iter(|| {
|
||||
let id = ids[i % ids.len()];
|
||||
black_box(db.get(id).unwrap());
|
||||
i += 1;
|
||||
let _ = db.get(id).unwrap();
|
||||
});
|
||||
|
||||
db.close().unwrap();
|
||||
});
|
||||
|
||||
// Benchmark update operation
|
||||
@ -77,199 +58,143 @@ fn criterion_benchmark(c: &mut Criterion) {
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
keysize: Some(6),
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let updated_data = vec![b'Y'; 100]; // Different data for updates
|
||||
let mut i = 0;
|
||||
let test_data = vec![b'X'; 100];
|
||||
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||
|
||||
b.iter(|| {
|
||||
let id = ids[i % ids.len()];
|
||||
let args = OurDBSetArgs {
|
||||
let _ = db.set(OurDBSetArgs {
|
||||
id: Some(id),
|
||||
data: &updated_data,
|
||||
};
|
||||
black_box(db.set(args).unwrap());
|
||||
i += 1;
|
||||
});
|
||||
data: &test_data,
|
||||
}).unwrap();
|
||||
});
|
||||
|
||||
// Benchmark get_history operation
|
||||
db.close().unwrap();
|
||||
});
|
||||
|
||||
// Benchmark delete operation
|
||||
c.bench_function("delete", |b| {
|
||||
let config = OurDBConfig {
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6),
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
|
||||
// Create a test data vector outside the closure
|
||||
let test_data = vec![b'X'; 100];
|
||||
|
||||
b.iter_with_setup(
|
||||
// Setup: Insert a record before each iteration
|
||||
|| {
|
||||
db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap()
|
||||
},
|
||||
// Benchmark: Delete the record
|
||||
|id| {
|
||||
db.delete(id).unwrap();
|
||||
}
|
||||
);
|
||||
|
||||
db.close().unwrap();
|
||||
});
|
||||
|
||||
// Benchmark history tracking
|
||||
c.bench_function("get_history", |b| {
|
||||
let config = OurDBConfig {
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let mut i = 0;
|
||||
|
||||
b.iter(|| {
|
||||
let id = ids[i % ids.len()];
|
||||
black_box(db.get_history(id, 2).unwrap());
|
||||
i += 1;
|
||||
});
|
||||
});
|
||||
|
||||
// Benchmark delete operation
|
||||
c.bench_function("delete", |b| {
|
||||
// Create a fresh database for deletion benchmarks
|
||||
let delete_dir = tempdir().expect("Failed to create temp directory");
|
||||
let delete_path = delete_dir.path().to_path_buf();
|
||||
|
||||
let config = OurDBConfig {
|
||||
path: delete_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
keysize: Some(6),
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let test_data = vec![b'X'; 100];
|
||||
|
||||
// Setup keys to delete
|
||||
let mut delete_ids = Vec::with_capacity(1000);
|
||||
for _ in 0..1000 {
|
||||
let args = OurDBSetArgs {
|
||||
id: None,
|
||||
data: &test_data,
|
||||
};
|
||||
let id = db.set(args).unwrap();
|
||||
delete_ids.push(id);
|
||||
// Create a record with history
|
||||
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||
|
||||
// Update it a few times to create history
|
||||
for _ in 0..5 {
|
||||
db.set(OurDBSetArgs { id: Some(id), data: &test_data }).unwrap();
|
||||
}
|
||||
|
||||
let mut i = 0;
|
||||
b.iter(|| {
|
||||
let id = delete_ids[i % delete_ids.len()];
|
||||
// Only try to delete if it exists (not already deleted)
|
||||
if db.get(id).is_ok() {
|
||||
black_box(db.delete(id).unwrap());
|
||||
}
|
||||
i += 1;
|
||||
});
|
||||
let _ = db.get_history(id, 3).unwrap();
|
||||
});
|
||||
|
||||
// Benchmark key-value mode vs incremental mode
|
||||
let mut group = c.benchmark_group("mode_comparison");
|
||||
|
||||
// Benchmark set in key-value mode
|
||||
group.bench_function("set_keyvalue_mode", |b| {
|
||||
let kv_dir = tempdir().expect("Failed to create temp directory");
|
||||
let kv_path = kv_dir.path().to_path_buf();
|
||||
db.close().unwrap();
|
||||
});
|
||||
|
||||
// Benchmark large data handling
|
||||
c.bench_function("large_data", |b| {
|
||||
let config = OurDBConfig {
|
||||
path: kv_path.clone(),
|
||||
incremental_mode: false, // Key-value mode
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
keysize: Some(6),
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let test_data = vec![b'X'; 100];
|
||||
let mut i = 0;
|
||||
let large_data = vec![b'X'; 10 * 1024]; // 10KB
|
||||
|
||||
b.iter(|| {
|
||||
let id = i + 1; // Explicit ID
|
||||
let args = OurDBSetArgs {
|
||||
id: Some(id as u32),
|
||||
data: &test_data,
|
||||
};
|
||||
black_box(db.set(args).unwrap());
|
||||
i += 1;
|
||||
});
|
||||
let id = db.set(OurDBSetArgs { id: None, data: &large_data }).unwrap();
|
||||
let _ = db.get(id).unwrap();
|
||||
db.delete(id).unwrap();
|
||||
});
|
||||
|
||||
// Benchmark set in incremental mode
|
||||
group.bench_function("set_incremental_mode", |b| {
|
||||
let inc_dir = tempdir().expect("Failed to create temp directory");
|
||||
let inc_path = inc_dir.path().to_path_buf();
|
||||
db.close().unwrap();
|
||||
});
|
||||
|
||||
// Benchmark concurrent operations (simulated)
|
||||
c.bench_function("concurrent_ops", |b| {
|
||||
let config = OurDBConfig {
|
||||
path: inc_path.clone(),
|
||||
incremental_mode: true, // Incremental mode
|
||||
path: db_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
keysize: Some(6),
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let test_data = vec![b'X'; 100];
|
||||
|
||||
b.iter(|| {
|
||||
let args = OurDBSetArgs {
|
||||
id: None, // Auto-generated ID
|
||||
data: &test_data,
|
||||
};
|
||||
black_box(db.set(args).unwrap());
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
|
||||
// Benchmark with different record sizes
|
||||
let mut size_group = c.benchmark_group("record_size");
|
||||
|
||||
for &size in &[10, 100, 1000, 10000] {
|
||||
size_group.bench_function(format!("set_size_{}", size), |b| {
|
||||
let size_dir = tempdir().expect("Failed to create temp directory");
|
||||
let size_path = size_dir.path().to_path_buf();
|
||||
|
||||
let config = OurDBConfig {
|
||||
path: size_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let test_data = vec![b'X'; size];
|
||||
|
||||
b.iter(|| {
|
||||
let args = OurDBSetArgs {
|
||||
id: None,
|
||||
data: &test_data,
|
||||
};
|
||||
black_box(db.set(args).unwrap());
|
||||
});
|
||||
});
|
||||
|
||||
size_group.bench_function(format!("get_size_{}", size), |b| {
|
||||
let size_dir = tempdir().expect("Failed to create temp directory");
|
||||
let size_path = size_dir.path().to_path_buf();
|
||||
|
||||
let config = OurDBConfig {
|
||||
path: size_path.clone(),
|
||||
incremental_mode: true,
|
||||
file_size: Some(10 * 1024 * 1024),
|
||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config).unwrap();
|
||||
let test_data = vec![b'X'; size];
|
||||
|
||||
// Insert some records first
|
||||
let mut size_ids = Vec::with_capacity(100);
|
||||
// Pre-insert some data
|
||||
let mut ids = Vec::with_capacity(100);
|
||||
for _ in 0..100 {
|
||||
let args = OurDBSetArgs {
|
||||
id: None,
|
||||
data: &test_data,
|
||||
};
|
||||
let id = db.set(args).unwrap();
|
||||
size_ids.push(id);
|
||||
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||
ids.push(id);
|
||||
}
|
||||
|
||||
let mut i = 0;
|
||||
b.iter(|| {
|
||||
let id = size_ids[i % size_ids.len()];
|
||||
black_box(db.get(id).unwrap());
|
||||
i += 1;
|
||||
});
|
||||
});
|
||||
// Simulate mixed workload
|
||||
for i in 0..10 {
|
||||
if i % 3 == 0 {
|
||||
// Insert
|
||||
let _ = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||
} else if i % 3 == 1 {
|
||||
// Read
|
||||
let idx = i % ids.len();
|
||||
let _ = db.get(ids[idx]).unwrap();
|
||||
} else {
|
||||
// Update
|
||||
let idx = i % ids.len();
|
||||
db.set(OurDBSetArgs { id: Some(ids[idx]), data: &test_data }).unwrap();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
size_group.finish();
|
||||
db.close().unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, criterion_benchmark);
|
||||
|
@ -41,6 +41,7 @@ fn key_value_mode_example(base_path: &PathBuf) -> Result<(), ourdb::Error> {
|
||||
incremental_mode: false,
|
||||
file_size: Some(1024 * 1024), // 1MB for testing
|
||||
keysize: Some(2), // Small key size for demonstration
|
||||
reset: None, // Don't reset existing database
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config)?;
|
||||
@ -94,6 +95,7 @@ fn incremental_mode_example(base_path: &PathBuf) -> Result<(), ourdb::Error> {
|
||||
incremental_mode: true,
|
||||
file_size: Some(1024 * 1024), // 1MB for testing
|
||||
keysize: Some(3), // 3-byte keys
|
||||
reset: None, // Don't reset existing database
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config)?;
|
||||
@ -137,6 +139,7 @@ fn performance_benchmark(base_path: &PathBuf) -> Result<(), ourdb::Error> {
|
||||
incremental_mode: true,
|
||||
file_size: Some(1024 * 1024), // 10MB
|
||||
keysize: Some(4), // 4-byte keys
|
||||
reset: None, // Don't reset existing database
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config)?;
|
||||
|
@ -13,6 +13,7 @@ fn main() -> Result<(), ourdb::Error> {
|
||||
incremental_mode: true,
|
||||
file_size: None, // Use default (500MB)
|
||||
keysize: None, // Use default (4 bytes)
|
||||
reset: None, // Don't reset existing database
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config)?;
|
||||
|
@ -1,22 +1,28 @@
|
||||
use ourdb::{OurDB, OurDBConfig, OurDBSetArgs};
|
||||
use std::time::{Duration, Instant};
|
||||
use std::time::Instant;
|
||||
|
||||
fn main() -> Result<(), ourdb::Error> {
|
||||
// Parse command line arguments
|
||||
// Parse command-line arguments
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
|
||||
let (num_operations, record_size, incremental_mode, keysize) = parse_args(&args);
|
||||
// Default values
|
||||
let mut incremental_mode = true;
|
||||
let mut keysize: u8 = 4;
|
||||
let mut num_operations = 10000;
|
||||
|
||||
println!("OurDB Benchmark");
|
||||
println!("===============");
|
||||
println!("Operations: {}", num_operations);
|
||||
println!("Record size: {} bytes", record_size);
|
||||
println!("Mode: {}", if incremental_mode { "Incremental" } else { "Key-Value" });
|
||||
println!("Key size: {} bytes", keysize);
|
||||
println!();
|
||||
// Parse arguments
|
||||
for i in 1..args.len() {
|
||||
if args[i] == "--no-incremental" {
|
||||
incremental_mode = false;
|
||||
} else if args[i] == "--keysize" && i + 1 < args.len() {
|
||||
keysize = args[i + 1].parse().unwrap_or(4);
|
||||
} else if args[i] == "--ops" && i + 1 < args.len() {
|
||||
num_operations = args[i + 1].parse().unwrap_or(10000);
|
||||
}
|
||||
}
|
||||
|
||||
// Create a temporary directory for the database
|
||||
let db_path = std::env::temp_dir().join(format!("ourdb_benchmark_{}", std::process::id()));
|
||||
let db_path = std::env::temp_dir().join("ourdb_benchmark");
|
||||
std::fs::create_dir_all(&db_path)?;
|
||||
|
||||
println!("Database path: {}", db_path.display());
|
||||
@ -27,24 +33,27 @@ fn main() -> Result<(), ourdb::Error> {
|
||||
incremental_mode,
|
||||
file_size: Some(1024 * 1024),
|
||||
keysize: Some(keysize),
|
||||
reset: Some(true), // Reset the database for benchmarking
|
||||
};
|
||||
|
||||
let mut db = OurDB::new(config)?;
|
||||
|
||||
// Prepare test data
|
||||
let test_data = vec![b'X'; record_size];
|
||||
// Prepare test data (100 bytes per record)
|
||||
let test_data = vec![b'A'; 100];
|
||||
|
||||
// Benchmark write operations
|
||||
println!("\nBenchmarking writes...");
|
||||
println!("Benchmarking {} write operations (incremental: {}, keysize: {})...",
|
||||
num_operations, incremental_mode, keysize);
|
||||
|
||||
let start = Instant::now();
|
||||
|
||||
let mut ids = Vec::with_capacity(num_operations);
|
||||
for i in 0..num_operations {
|
||||
for _ in 0..num_operations {
|
||||
let id = if incremental_mode {
|
||||
db.set(OurDBSetArgs { id: None, data: &test_data })?
|
||||
} else {
|
||||
// In key-value mode, we provide explicit IDs
|
||||
let id = i as u32 + 1;
|
||||
// In non-incremental mode, we need to provide IDs
|
||||
let id = ids.len() as u32 + 1;
|
||||
db.set(OurDBSetArgs { id: Some(id), data: &test_data })?;
|
||||
id
|
||||
};
|
||||
@ -52,10 +61,15 @@ fn main() -> Result<(), ourdb::Error> {
|
||||
}
|
||||
|
||||
let write_duration = start.elapsed();
|
||||
print_performance_stats("Write", num_operations, write_duration);
|
||||
let writes_per_second = num_operations as f64 / write_duration.as_secs_f64();
|
||||
|
||||
println!("Write performance: {:.2} ops/sec ({:.2} ms/op)",
|
||||
writes_per_second,
|
||||
write_duration.as_secs_f64() * 1000.0 / num_operations as f64);
|
||||
|
||||
// Benchmark read operations
|
||||
println!("Benchmarking {} read operations...", num_operations);
|
||||
|
||||
// Benchmark read operations (sequential)
|
||||
println!("\nBenchmarking sequential reads...");
|
||||
let start = Instant::now();
|
||||
|
||||
for &id in &ids {
|
||||
@ -63,123 +77,31 @@ fn main() -> Result<(), ourdb::Error> {
|
||||
}
|
||||
|
||||
let read_duration = start.elapsed();
|
||||
print_performance_stats("Sequential read", num_operations, read_duration);
|
||||
let reads_per_second = num_operations as f64 / read_duration.as_secs_f64();
|
||||
|
||||
// Benchmark random reads
|
||||
println!("\nBenchmarking random reads...");
|
||||
let start = Instant::now();
|
||||
|
||||
use std::collections::HashSet;
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut random_indices = HashSet::new();
|
||||
|
||||
// Select 20% of the IDs randomly for testing
|
||||
let sample_size = num_operations / 5;
|
||||
while random_indices.len() < sample_size {
|
||||
let idx = rand::Rng::gen_range(&mut rng, 0..ids.len());
|
||||
random_indices.insert(idx);
|
||||
}
|
||||
|
||||
for idx in random_indices {
|
||||
let _ = db.get(ids[idx])?;
|
||||
}
|
||||
|
||||
let random_read_duration = start.elapsed();
|
||||
print_performance_stats("Random read", sample_size, random_read_duration);
|
||||
println!("Read performance: {:.2} ops/sec ({:.2} ms/op)",
|
||||
reads_per_second,
|
||||
read_duration.as_secs_f64() * 1000.0 / num_operations as f64);
|
||||
|
||||
// Benchmark update operations
|
||||
println!("\nBenchmarking updates...");
|
||||
println!("Benchmarking {} update operations...", num_operations);
|
||||
|
||||
let start = Instant::now();
|
||||
|
||||
for &id in &ids[0..num_operations/2] {
|
||||
for &id in &ids {
|
||||
db.set(OurDBSetArgs { id: Some(id), data: &test_data })?;
|
||||
}
|
||||
|
||||
let update_duration = start.elapsed();
|
||||
print_performance_stats("Update", num_operations/2, update_duration);
|
||||
let updates_per_second = num_operations as f64 / update_duration.as_secs_f64();
|
||||
|
||||
// Benchmark history retrieval
|
||||
println!("\nBenchmarking history retrieval...");
|
||||
let start = Instant::now();
|
||||
println!("Update performance: {:.2} ops/sec ({:.2} ms/op)",
|
||||
updates_per_second,
|
||||
update_duration.as_secs_f64() * 1000.0 / num_operations as f64);
|
||||
|
||||
for &id in &ids[0..num_operations/10] {
|
||||
let _ = db.get_history(id, 2)?;
|
||||
}
|
||||
|
||||
let history_duration = start.elapsed();
|
||||
print_performance_stats("History retrieval", num_operations/10, history_duration);
|
||||
|
||||
// Benchmark delete operations
|
||||
println!("\nBenchmarking deletes...");
|
||||
let start = Instant::now();
|
||||
|
||||
for &id in &ids[0..num_operations/4] {
|
||||
db.delete(id)?;
|
||||
}
|
||||
|
||||
let delete_duration = start.elapsed();
|
||||
print_performance_stats("Delete", num_operations/4, delete_duration);
|
||||
|
||||
// Close and clean up
|
||||
// Clean up
|
||||
db.close()?;
|
||||
std::fs::remove_dir_all(&db_path)?;
|
||||
|
||||
println!("\nBenchmark completed successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_args(args: &[String]) -> (usize, usize, bool, u8) {
|
||||
let mut num_operations = 100000;
|
||||
let mut record_size = 100;
|
||||
let mut incremental_mode = true;
|
||||
let mut keysize = 4;
|
||||
|
||||
for i in 1..args.len() {
|
||||
if args[i] == "--ops" && i + 1 < args.len() {
|
||||
if let Ok(n) = args[i + 1].parse() {
|
||||
num_operations = n;
|
||||
}
|
||||
} else if args[i] == "--size" && i + 1 < args.len() {
|
||||
if let Ok(n) = args[i + 1].parse() {
|
||||
record_size = n;
|
||||
}
|
||||
} else if args[i] == "--keyvalue" {
|
||||
incremental_mode = false;
|
||||
} else if args[i] == "--keysize" && i + 1 < args.len() {
|
||||
if let Ok(n) = args[i + 1].parse() {
|
||||
if [2, 3, 4, 6].contains(&n) {
|
||||
keysize = n;
|
||||
}
|
||||
}
|
||||
} else if args[i] == "--help" {
|
||||
print_usage();
|
||||
std::process::exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
(num_operations, record_size, incremental_mode, keysize)
|
||||
}
|
||||
|
||||
fn print_usage() {
|
||||
println!("OurDB Benchmark Tool");
|
||||
println!("Usage: cargo run --example benchmark [OPTIONS]");
|
||||
println!();
|
||||
println!("Options:");
|
||||
println!(" --ops N Number of operations to perform (default: 100000)");
|
||||
println!(" --size N Size of each record in bytes (default: 100)");
|
||||
println!(" --keyvalue Use key-value mode instead of incremental mode");
|
||||
println!(" --keysize N Key size in bytes (2, 3, 4, or 6) (default: 4)");
|
||||
println!(" --help Print this help message");
|
||||
}
|
||||
|
||||
fn print_performance_stats(operation: &str, count: usize, duration: Duration) {
|
||||
let ops_per_second = count as f64 / duration.as_secs_f64();
|
||||
let ms_per_op = duration.as_secs_f64() * 1000.0 / count as f64;
|
||||
|
||||
println!("{} performance:", operation);
|
||||
println!(" Total time: {:.2} seconds", duration.as_secs_f64());
|
||||
println!(" Operations: {}", count);
|
||||
println!(" Speed: {:.2} ops/sec", ops_per_second);
|
||||
println!(" Average: {:.3} ms/op", ms_per_op);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user