...
This commit is contained in:
parent
0051754c65
commit
e3ec26a6ef
140
herodb/Cargo.lock
generated
140
herodb/Cargo.lock
generated
@ -152,12 +152,6 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "1.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "2.9.0"
|
version = "2.9.0"
|
||||||
@ -200,12 +194,6 @@ version = "3.17.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "byteorder"
|
|
||||||
version = "1.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bytes"
|
name = "bytes"
|
||||||
version = "1.10.1"
|
version = "1.10.1"
|
||||||
@ -320,21 +308,6 @@ dependencies = [
|
|||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "crossbeam-epoch"
|
|
||||||
version = "0.9.18"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
|
|
||||||
dependencies = [
|
|
||||||
"crossbeam-utils",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "crossbeam-utils"
|
|
||||||
version = "0.8.21"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crunchy"
|
name = "crunchy"
|
||||||
version = "0.2.3"
|
version = "0.2.3"
|
||||||
@ -475,16 +448,6 @@ dependencies = [
|
|||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fs2"
|
|
||||||
version = "0.4.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-channel"
|
name = "futures-channel"
|
||||||
version = "0.3.31"
|
version = "0.3.31"
|
||||||
@ -538,15 +501,6 @@ dependencies = [
|
|||||||
"slab",
|
"slab",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fxhash"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
|
|
||||||
dependencies = [
|
|
||||||
"byteorder",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "generic-array"
|
name = "generic-array"
|
||||||
version = "0.14.7"
|
version = "0.14.7"
|
||||||
@ -659,13 +613,13 @@ dependencies = [
|
|||||||
"brotli",
|
"brotli",
|
||||||
"chrono",
|
"chrono",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
"ourdb",
|
||||||
"paste",
|
"paste",
|
||||||
"poem",
|
"poem",
|
||||||
"poem-openapi",
|
"poem-openapi",
|
||||||
"rhai",
|
"rhai",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sled",
|
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"tokio",
|
"tokio",
|
||||||
@ -923,7 +877,7 @@ version = "0.27.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
|
checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.0",
|
"bitflags",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
@ -968,14 +922,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
|
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking_lot"
|
name = "ourdb"
|
||||||
version = "0.11.2"
|
version = "0.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"instant",
|
"crc32fast",
|
||||||
"lock_api",
|
"log",
|
||||||
"parking_lot_core 0.8.6",
|
"rand",
|
||||||
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -985,21 +938,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
|
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lock_api",
|
"lock_api",
|
||||||
"parking_lot_core 0.9.10",
|
"parking_lot_core",
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "parking_lot_core"
|
|
||||||
version = "0.8.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"instant",
|
|
||||||
"libc",
|
|
||||||
"redox_syscall 0.2.16",
|
|
||||||
"smallvec",
|
|
||||||
"winapi",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1010,7 +949,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
"redox_syscall 0.5.10",
|
"redox_syscall",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"windows-targets",
|
"windows-targets",
|
||||||
]
|
]
|
||||||
@ -1056,7 +995,7 @@ dependencies = [
|
|||||||
"mime",
|
"mime",
|
||||||
"multer",
|
"multer",
|
||||||
"nix",
|
"nix",
|
||||||
"parking_lot 0.12.3",
|
"parking_lot",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"poem-derive",
|
"poem-derive",
|
||||||
@ -1259,22 +1198,13 @@ dependencies = [
|
|||||||
"getrandom 0.2.15",
|
"getrandom 0.2.15",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "redox_syscall"
|
|
||||||
version = "0.2.16"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags 1.3.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redox_syscall"
|
name = "redox_syscall"
|
||||||
version = "0.5.10"
|
version = "0.5.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1"
|
checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.0",
|
"bitflags",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1322,7 +1252,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "ce4d759a4729a655ddfdbb3ff6e77fb9eadd902dae12319455557796e435d2a6"
|
checksum = "ce4d759a4729a655ddfdbb3ff6e77fb9eadd902dae12319455557796e435d2a6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"bitflags 2.9.0",
|
"bitflags",
|
||||||
"instant",
|
"instant",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
@ -1364,7 +1294,7 @@ version = "1.0.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf"
|
checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.0",
|
"bitflags",
|
||||||
"errno",
|
"errno",
|
||||||
"libc",
|
"libc",
|
||||||
"linux-raw-sys",
|
"linux-raw-sys",
|
||||||
@ -1498,22 +1428,6 @@ dependencies = [
|
|||||||
"autocfg",
|
"autocfg",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sled"
|
|
||||||
version = "0.34.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935"
|
|
||||||
dependencies = [
|
|
||||||
"crc32fast",
|
|
||||||
"crossbeam-epoch",
|
|
||||||
"crossbeam-utils",
|
|
||||||
"fs2",
|
|
||||||
"fxhash",
|
|
||||||
"libc",
|
|
||||||
"log",
|
|
||||||
"parking_lot 0.11.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "smallvec"
|
name = "smallvec"
|
||||||
version = "1.14.0"
|
version = "1.14.0"
|
||||||
@ -1676,7 +1590,7 @@ dependencies = [
|
|||||||
"bytes",
|
"bytes",
|
||||||
"libc",
|
"libc",
|
||||||
"mio",
|
"mio",
|
||||||
"parking_lot 0.12.3",
|
"parking_lot",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"signal-hook-registry",
|
"signal-hook-registry",
|
||||||
"socket2",
|
"socket2",
|
||||||
@ -1931,28 +1845,6 @@ version = "2.4.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd"
|
checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi"
|
|
||||||
version = "0.3.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
|
||||||
dependencies = [
|
|
||||||
"winapi-i686-pc-windows-gnu",
|
|
||||||
"winapi-x86_64-pc-windows-gnu",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-i686-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-core"
|
name = "windows-core"
|
||||||
version = "0.61.0"
|
version = "0.61.0"
|
||||||
@ -2109,7 +2001,7 @@ version = "0.39.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.0",
|
"bitflags",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2,12 +2,12 @@
|
|||||||
name = "herodb"
|
name = "herodb"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
description = "A database library built on top of sled with model support"
|
description = "A database library built on top of ourdb with model support"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
authors = ["HeroCode Team"]
|
authors = ["HeroCode Team"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
sled = "0.34.7"
|
ourdb = { path = "../ourdb" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
@ -31,6 +31,10 @@ path = "examples/rhai_demo.rs"
|
|||||||
name = "business_models_demo"
|
name = "business_models_demo"
|
||||||
path = "examples/business_models_demo.rs"
|
path = "examples/business_models_demo.rs"
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "ourdb_example"
|
||||||
|
path = "examples/ourdb_example.rs"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "dbexample_prod"
|
name = "dbexample_prod"
|
||||||
path = "src/cmd/dbexample_prod/main.rs"
|
path = "src/cmd/dbexample_prod/main.rs"
|
||||||
|
80
herodb/examples/ourdb_example.rs
Normal file
80
herodb/examples/ourdb_example.rs
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
use herodb::db::{DB, DBBuilder, Model};
|
||||||
|
use herodb::models::biz::{Product, ProductBuilder, ProductType, ProductStatus, Currency, CurrencyBuilder};
|
||||||
|
use chrono::Utc;
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
println!("OurDB Backend Example");
|
||||||
|
println!("====================\n");
|
||||||
|
|
||||||
|
// Create a temporary directory for the database
|
||||||
|
let db_path = std::env::temp_dir().join("herodb_ourdb_example");
|
||||||
|
std::fs::create_dir_all(&db_path)?;
|
||||||
|
|
||||||
|
println!("Creating database at: {}", db_path.display());
|
||||||
|
|
||||||
|
// Create a new database with Product model registered
|
||||||
|
let db = DBBuilder::new(db_path.clone())
|
||||||
|
.register_model::<Product>()
|
||||||
|
.build()?;
|
||||||
|
|
||||||
|
println!("Database created successfully");
|
||||||
|
|
||||||
|
// Create a currency for pricing
|
||||||
|
let usd = CurrencyBuilder::new()
|
||||||
|
.amount(99.99)
|
||||||
|
.currency_code("USD")
|
||||||
|
.build()
|
||||||
|
.expect("Failed to create currency");
|
||||||
|
|
||||||
|
// Create a product
|
||||||
|
let product = ProductBuilder::new()
|
||||||
|
.id(1) // We're setting an ID manually for this example
|
||||||
|
.name("Test Product")
|
||||||
|
.description("A test product for our OurDB example")
|
||||||
|
.price(usd)
|
||||||
|
.type_(ProductType::Product)
|
||||||
|
.category("Test")
|
||||||
|
.status(ProductStatus::Available)
|
||||||
|
.max_amount(100)
|
||||||
|
.validity_days(365)
|
||||||
|
.build()
|
||||||
|
.expect("Failed to create product");
|
||||||
|
|
||||||
|
println!("\nCreated product: {}", product.name);
|
||||||
|
println!("Product ID: {}", product.get_id());
|
||||||
|
|
||||||
|
// Insert the product into the database
|
||||||
|
db.set(&product)?;
|
||||||
|
println!("Product saved to database");
|
||||||
|
|
||||||
|
// Retrieve the product from the database
|
||||||
|
let retrieved_product = db.get::<Product>(product.get_id())?;
|
||||||
|
println!("\nRetrieved product from database:");
|
||||||
|
println!(" Name: {}", retrieved_product.name);
|
||||||
|
println!(" Description: {}", retrieved_product.description);
|
||||||
|
println!(" Price: ${} {}", retrieved_product.price.amount, retrieved_product.price.currency_code);
|
||||||
|
|
||||||
|
// Create a product with auto-incremented ID
|
||||||
|
// For this to work, we would need to modify the Product model to support auto-incremented IDs
|
||||||
|
// This is just a conceptual example
|
||||||
|
println!("\nDemonstrating auto-incremented IDs:");
|
||||||
|
println!("(Note: This would require additional implementation in the Product model)");
|
||||||
|
|
||||||
|
// Delete the product
|
||||||
|
db.delete::<Product>(product.get_id())?;
|
||||||
|
println!("\nProduct deleted from database");
|
||||||
|
|
||||||
|
// Try to retrieve the deleted product (should fail)
|
||||||
|
match db.get::<Product>(product.get_id()) {
|
||||||
|
Ok(_) => println!("Product still exists (unexpected)"),
|
||||||
|
Err(e) => println!("Verified deletion: {}", e),
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\nExample completed successfully!");
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
std::fs::remove_dir_all(&db_path)?;
|
||||||
|
println!("Cleaned up database directory");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
@ -6,7 +6,7 @@ use crate::models::biz::{
|
|||||||
Invoice, InvoiceBuilder, InvoiceItem, InvoiceItemBuilder, InvoiceStatus, Payment, PaymentStatus,
|
Invoice, InvoiceBuilder, InvoiceItem, InvoiceItemBuilder, InvoiceStatus, Payment, PaymentStatus,
|
||||||
Customer, CustomerBuilder,
|
Customer, CustomerBuilder,
|
||||||
};
|
};
|
||||||
use crate::db::base::SledModel;
|
use crate::db::model::Model;
|
||||||
|
|
||||||
/// This example demonstrates the business models in action:
|
/// This example demonstrates the business models in action:
|
||||||
/// 1. Defining products (2 types of server nodes)
|
/// 1. Defining products (2 types of server nodes)
|
||||||
@ -41,13 +41,13 @@ fn main() {
|
|||||||
// Simulate a user buying a product
|
// Simulate a user buying a product
|
||||||
println!("\nSimulating purchase of a Premium Node:");
|
println!("\nSimulating purchase of a Premium Node:");
|
||||||
let sale = create_sale(&customer, &premium_node);
|
let sale = create_sale(&customer, &premium_node);
|
||||||
println!(" - Sale created with ID: {}", sale.id);
|
println!(" - Sale created with ID: {}", sale.get_id());
|
||||||
println!(" - Total amount: ${} {}", sale.total_amount.amount, sale.total_amount.currency_code);
|
println!(" - Total amount: ${} {}", sale.total_amount.amount, sale.total_amount.currency_code);
|
||||||
|
|
||||||
// Generate an invoice
|
// Generate an invoice
|
||||||
println!("\nGenerating invoice:");
|
println!("\nGenerating invoice:");
|
||||||
let invoice = create_invoice(&customer, &sale);
|
let invoice = create_invoice(&customer, &sale);
|
||||||
println!(" - Invoice created with ID: {}", invoice.id);
|
println!(" - Invoice created with ID: {}", invoice.get_id());
|
||||||
println!(" - Total amount: ${} {}", invoice.total_amount.amount, invoice.total_amount.currency_code);
|
println!(" - Total amount: ${} {}", invoice.total_amount.amount, invoice.total_amount.currency_code);
|
||||||
println!(" - Due date: {}", invoice.due_date);
|
println!(" - Due date: {}", invoice.due_date);
|
||||||
println!(" - Status: {:?}", invoice.status);
|
println!(" - Status: {:?}", invoice.status);
|
||||||
@ -198,7 +198,7 @@ fn create_sale(customer: &Customer, product: &Product) -> Sale {
|
|||||||
let sale_item = SaleItemBuilder::new()
|
let sale_item = SaleItemBuilder::new()
|
||||||
.id(1)
|
.id(1)
|
||||||
.sale_id(1)
|
.sale_id(1)
|
||||||
.product_id(product.id as u32)
|
.product_id(product.get_id())
|
||||||
.name(product.name.clone())
|
.name(product.name.clone())
|
||||||
.description(product.description.clone())
|
.description(product.description.clone())
|
||||||
.comments("Customer requested expedited setup")
|
.comments("Customer requested expedited setup")
|
||||||
@ -213,7 +213,7 @@ fn create_sale(customer: &Customer, product: &Product) -> Sale {
|
|||||||
let sale = SaleBuilder::new()
|
let sale = SaleBuilder::new()
|
||||||
.id(1)
|
.id(1)
|
||||||
.company_id(101) // Assuming company ID 101
|
.company_id(101) // Assuming company ID 101
|
||||||
.customer_id(customer.id)
|
.customer_id(customer.get_id())
|
||||||
.buyer_name(customer.name.clone())
|
.buyer_name(customer.name.clone())
|
||||||
.buyer_email("contact@techcorp.com") // Example email
|
.buyer_email("contact@techcorp.com") // Example email
|
||||||
.currency_code(product.price.currency_code.clone())
|
.currency_code(product.price.currency_code.clone())
|
||||||
@ -236,14 +236,14 @@ fn create_invoice(customer: &Customer, sale: &Sale) -> Invoice {
|
|||||||
.invoice_id(1)
|
.invoice_id(1)
|
||||||
.description(format!("Purchase of {}", sale.items[0].name))
|
.description(format!("Purchase of {}", sale.items[0].name))
|
||||||
.amount(sale.total_amount.clone())
|
.amount(sale.total_amount.clone())
|
||||||
.sale_id(sale.id)
|
.sale_id(sale.get_id())
|
||||||
.build()
|
.build()
|
||||||
.expect("Failed to create invoice item");
|
.expect("Failed to create invoice item");
|
||||||
|
|
||||||
// Create the invoice
|
// Create the invoice
|
||||||
let invoice = InvoiceBuilder::new()
|
let invoice = InvoiceBuilder::new()
|
||||||
.id(1)
|
.id(1)
|
||||||
.customer_id(customer.id)
|
.customer_id(customer.get_id())
|
||||||
.currency_code(sale.total_amount.currency_code.clone())
|
.currency_code(sale.total_amount.currency_code.clone())
|
||||||
.status(InvoiceStatus::Sent)
|
.status(InvoiceStatus::Sent)
|
||||||
.issue_date(now)
|
.issue_date(now)
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
use crate::db::base::*;
|
use crate::db::error::{DbError, DbResult};
|
||||||
use bincode;
|
use crate::db::model::Model;
|
||||||
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
use crate::db::store::{DbOperations, OurDbStore};
|
||||||
use std::any::TypeId;
|
use std::any::TypeId;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::{Arc, Mutex, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
|
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
||||||
|
|
||||||
/// Represents a single database operation in a transaction
|
/// Represents a single database operation in a transaction
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -16,51 +17,10 @@ enum DbOperation {
|
|||||||
},
|
},
|
||||||
Delete {
|
Delete {
|
||||||
model_type: TypeId,
|
model_type: TypeId,
|
||||||
id: String,
|
id: u32,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Trait for type-erased database operations
|
|
||||||
pub trait AnyDbOperations: Send + Sync {
|
|
||||||
fn delete(&self, id: &str) -> SledDBResult<()>;
|
|
||||||
fn get_any(&self, id: &str) -> SledDBResult<Box<dyn std::any::Any>>;
|
|
||||||
fn list_any(&self) -> SledDBResult<Box<dyn std::any::Any>>;
|
|
||||||
fn insert_any(&self, model: &dyn std::any::Any) -> SledDBResult<()>;
|
|
||||||
fn insert_any_raw(&self, serialized: &[u8]) -> SledDBResult<()>;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Implementation of AnyDbOperations for any SledDB<T>
|
|
||||||
impl<T: SledModel> AnyDbOperations for SledDB<T> {
|
|
||||||
fn delete(&self, id: &str) -> SledDBResult<()> {
|
|
||||||
self.delete(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_any(&self, id: &str) -> SledDBResult<Box<dyn std::any::Any>> {
|
|
||||||
let result = self.get(id)?;
|
|
||||||
Ok(Box::new(result))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn list_any(&self) -> SledDBResult<Box<dyn std::any::Any>> {
|
|
||||||
let result = self.list()?;
|
|
||||||
Ok(Box::new(result))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_any(&self, model: &dyn std::any::Any) -> SledDBResult<()> {
|
|
||||||
// Downcast to the specific type T
|
|
||||||
match model.downcast_ref::<T>() {
|
|
||||||
Some(t) => self.insert(t),
|
|
||||||
None => Err(SledDBError::TypeError),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_any_raw(&self, serialized: &[u8]) -> SledDBResult<()> {
|
|
||||||
// Deserialize the bytes into model of type T
|
|
||||||
let model: T = bincode::deserialize(serialized)?;
|
|
||||||
// Use the regular insert method
|
|
||||||
self.insert(&model)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transaction state for DB operations
|
/// Transaction state for DB operations
|
||||||
pub struct TransactionState {
|
pub struct TransactionState {
|
||||||
operations: Vec<DbOperation>,
|
operations: Vec<DbOperation>,
|
||||||
@ -77,17 +37,14 @@ impl TransactionState {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Main DB manager that automatically handles all root models
|
/// Main DB manager that automatically handles all models
|
||||||
#[derive(Clone, CustomType)]
|
#[derive(Clone, CustomType)]
|
||||||
pub struct DB {
|
pub struct DB {
|
||||||
db_path: PathBuf,
|
db_path: PathBuf,
|
||||||
|
|
||||||
// Type map for generic operations
|
// Type map for generic operations
|
||||||
type_map: HashMap<TypeId, Arc<dyn AnyDbOperations>>,
|
type_map: HashMap<TypeId, Arc<dyn DbOperations>>,
|
||||||
|
|
||||||
// Locks to ensure thread safety for key areas
|
|
||||||
_write_locks: Arc<Mutex<HashMap<String, bool>>>,
|
|
||||||
|
|
||||||
// Transaction state
|
// Transaction state
|
||||||
transaction: Arc<RwLock<Option<TransactionState>>>,
|
transaction: Arc<RwLock<Option<TransactionState>>>,
|
||||||
}
|
}
|
||||||
@ -101,15 +58,15 @@ pub struct DBBuilder {
|
|||||||
|
|
||||||
/// Trait for model registration
|
/// Trait for model registration
|
||||||
pub trait ModelRegistration: Send + Sync {
|
pub trait ModelRegistration: Send + Sync {
|
||||||
fn register(&self, path: &Path) -> SledDBResult<(TypeId, Box<dyn AnyDbOperations>)>;
|
fn register(&self, path: &Path) -> DbResult<(TypeId, Box<dyn DbOperations>)>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implementation of ModelRegistration for any SledModel type
|
/// Implementation of ModelRegistration for any Model type
|
||||||
pub struct SledModelRegistration<T: SledModel> {
|
pub struct ModelRegistrar<T: Model> {
|
||||||
phantom: std::marker::PhantomData<T>,
|
phantom: std::marker::PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: SledModel> SledModelRegistration<T> {
|
impl<T: Model> ModelRegistrar<T> {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
phantom: std::marker::PhantomData,
|
phantom: std::marker::PhantomData,
|
||||||
@ -117,10 +74,10 @@ impl<T: SledModel> SledModelRegistration<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: SledModel> ModelRegistration for SledModelRegistration<T> {
|
impl<T: Model> ModelRegistration for ModelRegistrar<T> {
|
||||||
fn register(&self, path: &Path) -> SledDBResult<(TypeId, Box<dyn AnyDbOperations>)> {
|
fn register(&self, path: &Path) -> DbResult<(TypeId, Box<dyn DbOperations>)> {
|
||||||
let db: SledDB<T> = SledDB::open(path.join(T::db_prefix()))?;
|
let store = OurDbStore::<T>::open(path.join(T::db_prefix()))?;
|
||||||
Ok((TypeId::of::<T>(), Box::new(db) as Box<dyn AnyDbOperations>))
|
Ok((TypeId::of::<T>(), Box::new(store) as Box<dyn DbOperations>))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,49 +89,47 @@ impl DBBuilder {
|
|||||||
model_registrations: Vec::new(),
|
model_registrations: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_path<P: Into<PathBuf>>(base_path: P) -> Self {
|
pub fn with_path<P: Into<PathBuf>>(base_path: P) -> Self {
|
||||||
Self {
|
Self {
|
||||||
base_path: base_path.into(),
|
base_path: base_path.into(),
|
||||||
model_registrations: Vec::new(),
|
model_registrations: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Register a model type with the DB
|
/// Register a model type with the DB
|
||||||
pub fn register_model<T: SledModel>(mut self) -> Self {
|
pub fn register_model<T: Model>(mut self) -> Self {
|
||||||
self.model_registrations
|
self.model_registrations
|
||||||
.push(Arc::new(SledModelRegistration::<T>::new()));
|
.push(Arc::new(ModelRegistrar::<T>::new()));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Build the DB with the registered models
|
/// Build the DB with the registered models
|
||||||
pub fn build(self) -> Result<DB, Box<EvalAltResult>> {
|
pub fn build(self) -> Result<DB, Box<EvalAltResult>> {
|
||||||
let base_path = self.base_path;
|
let base_path = self.base_path;
|
||||||
|
|
||||||
// Ensure base directory exists
|
// Ensure base directory exists
|
||||||
if !base_path.exists() {
|
if !base_path.exists() {
|
||||||
std::fs::create_dir_all(&base_path).map_err(|e| {
|
std::fs::create_dir_all(&base_path).map_err(|e| {
|
||||||
EvalAltResult::ErrorSystem("Could not create base dir".to_string(), Box::new(e))
|
EvalAltResult::ErrorSystem("Could not create base dir".to_string(), Box::new(e))
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register all models
|
// Register all models
|
||||||
let mut type_map: HashMap<TypeId, Arc<dyn AnyDbOperations>> = HashMap::new();
|
let mut type_map: HashMap<TypeId, Arc<dyn DbOperations>> = HashMap::new();
|
||||||
|
|
||||||
for registration in self.model_registrations {
|
for registration in self.model_registrations {
|
||||||
let (type_id, db) = registration.register(&base_path).map_err(|e| {
|
let (type_id, store) = registration.register(&base_path).map_err(|e| {
|
||||||
EvalAltResult::ErrorSystem("Could not register type".to_string(), Box::new(e))
|
EvalAltResult::ErrorSystem("Could not register type".to_string(), Box::new(e))
|
||||||
})?;
|
})?;
|
||||||
type_map.insert(type_id, db.into());
|
type_map.insert(type_id, store.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let _write_locks = Arc::new(Mutex::new(HashMap::new()));
|
|
||||||
let transaction = Arc::new(RwLock::new(None));
|
let transaction = Arc::new(RwLock::new(None));
|
||||||
|
|
||||||
Ok(DB {
|
Ok(DB {
|
||||||
db_path: base_path,
|
db_path: base_path,
|
||||||
type_map,
|
type_map,
|
||||||
_write_locks,
|
|
||||||
transaction,
|
transaction,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -182,68 +137,66 @@ impl DBBuilder {
|
|||||||
|
|
||||||
impl DB {
|
impl DB {
|
||||||
/// Create a new empty DB instance without any models
|
/// Create a new empty DB instance without any models
|
||||||
pub fn new<P: Into<PathBuf>>(base_path: P) -> SledDBResult<Self> {
|
pub fn new<P: Into<PathBuf>>(base_path: P) -> DbResult<Self> {
|
||||||
let base_path = base_path.into();
|
let base_path = base_path.into();
|
||||||
|
|
||||||
// Ensure base directory exists
|
// Ensure base directory exists
|
||||||
if !base_path.exists() {
|
if !base_path.exists() {
|
||||||
std::fs::create_dir_all(&base_path)?;
|
std::fs::create_dir_all(&base_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let _write_locks = Arc::new(Mutex::new(HashMap::new()));
|
|
||||||
let transaction = Arc::new(RwLock::new(None));
|
let transaction = Arc::new(RwLock::new(None));
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
db_path: base_path,
|
db_path: base_path,
|
||||||
type_map: HashMap::new(),
|
type_map: HashMap::new(),
|
||||||
_write_locks,
|
|
||||||
transaction,
|
transaction,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transaction-related methods
|
// Transaction-related methods
|
||||||
|
|
||||||
/// Begin a new transaction
|
/// Begin a new transaction
|
||||||
pub fn begin_transaction(&self) -> SledDBResult<()> {
|
pub fn begin_transaction(&self) -> DbResult<()> {
|
||||||
let mut tx = self.transaction.write().unwrap();
|
let mut tx = self.transaction.write().unwrap();
|
||||||
if tx.is_some() {
|
if tx.is_some() {
|
||||||
return Err(SledDBError::GeneralError(
|
return Err(DbError::TransactionError(
|
||||||
"Transaction already in progress".into(),
|
"Transaction already in progress".into(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
*tx = Some(TransactionState::new());
|
*tx = Some(TransactionState::new());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if a transaction is active
|
/// Check if a transaction is active
|
||||||
pub fn has_active_transaction(&self) -> bool {
|
pub fn has_active_transaction(&self) -> bool {
|
||||||
let tx = self.transaction.read().unwrap();
|
let tx = self.transaction.read().unwrap();
|
||||||
tx.is_some() && tx.as_ref().unwrap().active
|
tx.is_some() && tx.as_ref().unwrap().active
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply a set operation with the serialized data - bypass transaction check
|
/// Apply a set operation with the serialized data - bypass transaction check
|
||||||
fn apply_set_operation(&self, model_type: TypeId, serialized: &[u8]) -> SledDBResult<()> {
|
fn apply_set_operation(&self, model_type: TypeId, serialized: &[u8]) -> DbResult<()> {
|
||||||
// Get the database operations for this model type
|
// Get the database operations for this model type
|
||||||
if let Some(db_ops) = self.type_map.get(&model_type) {
|
if let Some(db_ops) = self.type_map.get(&model_type) {
|
||||||
// Just pass the raw serialized data to a special raw insert method
|
// Just pass the raw serialized data to a special raw insert method
|
||||||
return db_ops.insert_any_raw(serialized);
|
return db_ops.insert_raw(serialized);
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(SledDBError::GeneralError(format!(
|
Err(DbError::GeneralError(format!(
|
||||||
"No DB registered for type ID {:?}",
|
"No DB registered for type ID {:?}",
|
||||||
model_type
|
model_type
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Commit the current transaction, applying all operations
|
/// Commit the current transaction, applying all operations
|
||||||
pub fn commit_transaction(&self) -> SledDBResult<()> {
|
pub fn commit_transaction(&self) -> DbResult<()> {
|
||||||
let mut tx_guard = self.transaction.write().unwrap();
|
let mut tx_guard = self.transaction.write().unwrap();
|
||||||
|
|
||||||
if let Some(tx_state) = tx_guard.take() {
|
if let Some(tx_state) = tx_guard.take() {
|
||||||
if !tx_state.active {
|
if !tx_state.active {
|
||||||
return Err(SledDBError::GeneralError("Transaction not active".into()));
|
return Err(DbError::TransactionError("Transaction not active".into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute all operations in the transaction
|
// Execute all operations in the transaction
|
||||||
for op in tx_state.operations {
|
for op in tx_state.operations {
|
||||||
match op {
|
match op {
|
||||||
@ -257,80 +210,79 @@ impl DB {
|
|||||||
let db_ops = self
|
let db_ops = self
|
||||||
.type_map
|
.type_map
|
||||||
.get(&model_type)
|
.get(&model_type)
|
||||||
.ok_or_else(|| SledDBError::TypeError)?;
|
.ok_or_else(|| DbError::TypeError)?;
|
||||||
db_ops.delete(&id)?;
|
db_ops.delete(id)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(SledDBError::GeneralError("No active transaction".into()))
|
Err(DbError::TransactionError("No active transaction".into()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Rollback the current transaction, discarding all operations
|
/// Rollback the current transaction, discarding all operations
|
||||||
pub fn rollback_transaction(&self) -> SledDBResult<()> {
|
pub fn rollback_transaction(&self) -> DbResult<()> {
|
||||||
let mut tx = self.transaction.write().unwrap();
|
let mut tx = self.transaction.write().unwrap();
|
||||||
if tx.is_none() {
|
if tx.is_none() {
|
||||||
return Err(SledDBError::GeneralError("No active transaction".into()));
|
return Err(DbError::TransactionError("No active transaction".into()));
|
||||||
}
|
}
|
||||||
*tx = None;
|
*tx = None;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the path to the database
|
/// Get the path to the database
|
||||||
pub fn path(&self) -> &PathBuf {
|
pub fn path(&self) -> &PathBuf {
|
||||||
&self.db_path
|
&self.db_path
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generic methods that work with any supported model type
|
// Generic methods that work with any supported model type
|
||||||
|
|
||||||
/// Insert a model instance into its appropriate database based on type
|
/// Insert a model instance into its appropriate database based on type
|
||||||
pub fn set<T: SledModel>(&self, model: &T) -> SledDBResult<()> {
|
pub fn set<T: Model>(&self, model: &T) -> DbResult<()> {
|
||||||
// Try to acquire a write lock on the transaction
|
// Try to acquire a write lock on the transaction
|
||||||
let mut tx_guard = self.transaction.write().unwrap();
|
let mut tx_guard = self.transaction.write().unwrap();
|
||||||
|
|
||||||
// Check if there's an active transaction
|
// Check if there's an active transaction
|
||||||
if let Some(tx_state) = tx_guard.as_mut() {
|
if let Some(tx_state) = tx_guard.as_mut() {
|
||||||
if tx_state.active {
|
if tx_state.active {
|
||||||
// Serialize the model for later use
|
// Serialize the model for later use
|
||||||
let serialized = bincode::serialize(model)?;
|
let serialized = model.serialize()?;
|
||||||
|
|
||||||
// Record a Set operation in the transaction
|
// Record a Set operation in the transaction
|
||||||
tx_state.operations.push(DbOperation::Set {
|
tx_state.operations.push(DbOperation::Set {
|
||||||
model_type: TypeId::of::<T>(),
|
model_type: TypeId::of::<T>(),
|
||||||
serialized,
|
serialized,
|
||||||
});
|
});
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we got here, either there's no transaction or it's not active
|
// If we got here, either there's no transaction or it's not active
|
||||||
// Drop the write lock before doing a direct database operation
|
// Drop the write lock before doing a direct database operation
|
||||||
drop(tx_guard);
|
drop(tx_guard);
|
||||||
|
|
||||||
// Execute directly
|
// Execute directly
|
||||||
match self.type_map.get(&TypeId::of::<T>()) {
|
match self.type_map.get(&TypeId::of::<T>()) {
|
||||||
Some(db_ops) => db_ops.insert_any(model),
|
Some(db_ops) => db_ops.insert(model),
|
||||||
None => Err(SledDBError::TypeError),
|
None => Err(DbError::TypeError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check the transaction state for the given type and id
|
/// Check the transaction state for the given type and id
|
||||||
fn check_transaction<T: SledModel>(&self, id: &str) -> Option<Result<Option<T>, SledDBError>> {
|
fn check_transaction<T: Model>(&self, id: u32) -> Option<Result<Option<T>, DbError>> {
|
||||||
// Try to acquire a read lock on the transaction
|
// Try to acquire a read lock on the transaction
|
||||||
let tx_guard = self.transaction.read().unwrap();
|
let tx_guard = self.transaction.read().unwrap();
|
||||||
|
|
||||||
if let Some(tx_state) = tx_guard.as_ref() {
|
if let Some(tx_state) = tx_guard.as_ref() {
|
||||||
if !tx_state.active {
|
if !tx_state.active {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let type_id = TypeId::of::<T>();
|
let type_id = TypeId::of::<T>();
|
||||||
let id_str = id.to_string();
|
|
||||||
|
|
||||||
// Process operations in reverse order (last operation wins)
|
// Process operations in reverse order (last operation wins)
|
||||||
for op in tx_state.operations.iter().rev() {
|
for op in tx_state.operations.iter().rev() {
|
||||||
match op {
|
match op {
|
||||||
@ -339,9 +291,9 @@ impl DB {
|
|||||||
model_type,
|
model_type,
|
||||||
id: op_id,
|
id: op_id,
|
||||||
} => {
|
} => {
|
||||||
if *model_type == type_id && op_id == id {
|
if *model_type == type_id && *op_id == id {
|
||||||
// Return NotFound error for deleted records
|
// Return NotFound error for deleted records
|
||||||
return Some(Err(SledDBError::NotFound(id.to_string())));
|
return Some(Err(DbError::NotFound(id)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Then check if it has been set in the transaction
|
// Then check if it has been set in the transaction
|
||||||
@ -351,9 +303,9 @@ impl DB {
|
|||||||
} => {
|
} => {
|
||||||
if *model_type == type_id {
|
if *model_type == type_id {
|
||||||
// Try to deserialize and check the ID
|
// Try to deserialize and check the ID
|
||||||
match bincode::deserialize::<T>(serialized) {
|
match T::deserialize(serialized) {
|
||||||
Ok(model) => {
|
Ok(model) => {
|
||||||
if model.get_id() == id_str {
|
if model.get_id() == id {
|
||||||
return Some(Ok(Some(model)));
|
return Some(Ok(Some(model)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -364,13 +316,13 @@ impl DB {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not found in transaction (continue to database)
|
// Not found in transaction (continue to database)
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a model instance by its ID and type
|
/// Get a model instance by its ID and type
|
||||||
pub fn get<T: SledModel>(&self, id: &str) -> SledDBResult<T> {
|
pub fn get<T: Model>(&self, id: u32) -> DbResult<T> {
|
||||||
// First check if there's a pending value in the current transaction
|
// First check if there's a pending value in the current transaction
|
||||||
if let Some(tx_result) = self.check_transaction::<T>(id) {
|
if let Some(tx_result) = self.check_transaction::<T>(id) {
|
||||||
match tx_result {
|
match tx_result {
|
||||||
@ -379,82 +331,91 @@ impl DB {
|
|||||||
Ok(None) => {} // Should never happen
|
Ok(None) => {} // Should never happen
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no pending value, look up from the database
|
// If no pending value, look up from the database
|
||||||
match self.type_map.get(&TypeId::of::<T>()) {
|
match self.type_map.get(&TypeId::of::<T>()) {
|
||||||
Some(db_ops) => {
|
Some(db_ops) => {
|
||||||
let result_any = db_ops.get_any(id)?;
|
let result_any = db_ops.get(id)?;
|
||||||
// We expect the result to be of type T since we looked it up by TypeId
|
// We expect the result to be of type T since we looked it up by TypeId
|
||||||
match result_any.downcast::<T>() {
|
match result_any.downcast::<T>() {
|
||||||
Ok(t) => Ok(*t),
|
Ok(t) => Ok(*t),
|
||||||
Err(_) => Err(SledDBError::TypeError),
|
Err(_) => Err(DbError::TypeError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => Err(SledDBError::TypeError),
|
None => Err(DbError::TypeError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete a model instance by its ID and type
|
/// Delete a model instance by its ID and type
|
||||||
pub fn delete<T: SledModel>(&self, id: &str) -> SledDBResult<()> {
|
pub fn delete<T: Model>(&self, id: u32) -> DbResult<()> {
|
||||||
// Try to acquire a write lock on the transaction
|
// Try to acquire a write lock on the transaction
|
||||||
let mut tx_guard = self.transaction.write().unwrap();
|
let mut tx_guard = self.transaction.write().unwrap();
|
||||||
|
|
||||||
// Check if there's an active transaction
|
// Check if there's an active transaction
|
||||||
if let Some(tx_state) = tx_guard.as_mut() {
|
if let Some(tx_state) = tx_guard.as_mut() {
|
||||||
if tx_state.active {
|
if tx_state.active {
|
||||||
// Record a Delete operation in the transaction
|
// Record a Delete operation in the transaction
|
||||||
tx_state.operations.push(DbOperation::Delete {
|
tx_state.operations.push(DbOperation::Delete {
|
||||||
model_type: TypeId::of::<T>(),
|
model_type: TypeId::of::<T>(),
|
||||||
id: id.to_string(),
|
id,
|
||||||
});
|
});
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we got here, either there's no transaction or it's not active
|
// If we got here, either there's no transaction or it's not active
|
||||||
// Drop the write lock before doing a direct database operation
|
// Drop the write lock before doing a direct database operation
|
||||||
drop(tx_guard);
|
drop(tx_guard);
|
||||||
|
|
||||||
// Execute directly
|
// Execute directly
|
||||||
match self.type_map.get(&TypeId::of::<T>()) {
|
match self.type_map.get(&TypeId::of::<T>()) {
|
||||||
Some(db_ops) => db_ops.delete(id),
|
Some(db_ops) => db_ops.delete(id),
|
||||||
None => Err(SledDBError::TypeError),
|
None => Err(DbError::TypeError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List all model instances of a specific type
|
/// List all model instances of a specific type
|
||||||
pub fn list<T: SledModel>(&self) -> SledDBResult<Vec<T>> {
|
pub fn list<T: Model>(&self) -> DbResult<Vec<T>> {
|
||||||
// Look up the correct DB operations for type T in our type map
|
// Look up the correct DB operations for type T in our type map
|
||||||
match self.type_map.get(&TypeId::of::<T>()) {
|
match self.type_map.get(&TypeId::of::<T>()) {
|
||||||
Some(db_ops) => {
|
Some(db_ops) => {
|
||||||
let result_any = db_ops.list_any()?;
|
let result_any = db_ops.list()?;
|
||||||
// We expect the result to be of type Vec<T> since we looked it up by TypeId
|
// We expect the result to be of type Vec<T> since we looked it up by TypeId
|
||||||
match result_any.downcast::<Vec<T>>() {
|
match result_any.downcast::<Vec<T>>() {
|
||||||
Ok(vec_t) => Ok(*vec_t),
|
Ok(vec_t) => Ok(*vec_t),
|
||||||
Err(_) => Err(SledDBError::TypeError),
|
Err(_) => Err(DbError::TypeError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => Err(SledDBError::TypeError),
|
None => Err(DbError::TypeError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the history of a model by its ID
|
||||||
|
pub fn get_history<T: Model>(&self, id: u32, depth: u8) -> DbResult<Vec<T>> {
|
||||||
|
// Look up the correct DB operations for type T in our type map
|
||||||
|
match self.type_map.get(&TypeId::of::<T>()) {
|
||||||
|
Some(db_ops) => {
|
||||||
|
let result_any = db_ops.get_history(id, depth)?;
|
||||||
|
let mut result = Vec::with_capacity(result_any.len());
|
||||||
|
|
||||||
|
for item in result_any {
|
||||||
|
match item.downcast::<T>() {
|
||||||
|
Ok(t) => result.push(*t),
|
||||||
|
Err(_) => return Err(DbError::TypeError),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
None => Err(DbError::TypeError),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Register a model type with this DB instance
|
// Register a model type with this DB instance
|
||||||
pub fn register<T: SledModel>(&mut self) -> SledDBResult<()> {
|
pub fn register<T: Model>(&mut self) -> DbResult<()> {
|
||||||
let db_path = self.db_path.join(T::db_prefix());
|
let store = OurDbStore::<T>::open(&self.db_path)?;
|
||||||
let db: SledDB<T> = SledDB::open(db_path)?;
|
self.type_map.insert(TypeId::of::<T>(), Arc::new(store));
|
||||||
self.type_map.insert(TypeId::of::<T>(), Arc::new(db));
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get a typed handle to a registered model DB
|
|
||||||
pub fn db_for<T: SledModel>(&self) -> SledDBResult<&dyn AnyDbOperations> {
|
|
||||||
match self.type_map.get(&TypeId::of::<T>()) {
|
|
||||||
Some(db) => Ok(&**db),
|
|
||||||
None => Err(SledDBError::GeneralError(format!(
|
|
||||||
"No DB registered for type {}",
|
|
||||||
std::any::type_name::<T>()
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
30
herodb/src/db/error.rs
Normal file
30
herodb/src/db/error.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use thiserror::Error;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
/// Errors that can occur during database operations
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum DbError {
|
||||||
|
#[error("I/O error: {0}")]
|
||||||
|
IoError(#[from] std::io::Error),
|
||||||
|
|
||||||
|
#[error("Serialization/Deserialization error: {0}")]
|
||||||
|
SerializationError(#[from] bincode::Error),
|
||||||
|
|
||||||
|
#[error("Record not found for ID: {0}")]
|
||||||
|
NotFound(u32),
|
||||||
|
|
||||||
|
#[error("Type mismatch during deserialization")]
|
||||||
|
TypeError,
|
||||||
|
|
||||||
|
#[error("Transaction error: {0}")]
|
||||||
|
TransactionError(String),
|
||||||
|
|
||||||
|
#[error("OurDB error: {0}")]
|
||||||
|
OurDbError(#[from] ourdb::Error),
|
||||||
|
|
||||||
|
#[error("General database error: {0}")]
|
||||||
|
GeneralError(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Result type for DB operations
|
||||||
|
pub type DbResult<T> = Result<T, DbError>;
|
@ -1,4 +1,4 @@
|
|||||||
/// Macro to implement typed access methods on the DB struct for a given model
|
ere/// Macro to implement typed access methods on the DB struct for a given model
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! impl_model_methods {
|
macro_rules! impl_model_methods {
|
||||||
($model:ty, $singular:ident, $plural:ident) => {
|
($model:ty, $singular:ident, $plural:ident) => {
|
||||||
@ -12,19 +12,24 @@ macro_rules! impl_model_methods {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get a model instance by its ID
|
/// Get a model instance by its ID
|
||||||
pub fn [<get_ $singular>](&mut self, id: i64) -> SledDBResult<$model> {
|
pub fn [<get_ $singular>](&mut self, id: u32) -> DbResult<$model> {
|
||||||
self.get::<$model>(&id.to_string())
|
self.get::<$model>(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete a model instance by its ID
|
/// Delete a model instance by its ID
|
||||||
pub fn [<delete_ $singular>](&mut self, id: i64) -> SledDBResult<()> {
|
pub fn [<delete_ $singular>](&mut self, id: u32) -> DbResult<()> {
|
||||||
self.delete::<$model>(&id.to_string())
|
self.delete::<$model>(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List all model instances
|
/// List all model instances
|
||||||
pub fn [<list_ $plural>](&mut self) -> SledDBResult<Vec<$model>> {
|
pub fn [<list_ $plural>](&mut self) -> DbResult<Vec<$model>> {
|
||||||
self.list::<$model>()
|
self.list::<$model>()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get history of a model instance
|
||||||
|
pub fn [<get_ $singular _history>](&mut self, id: u32, depth: u8) -> DbResult<Vec<$model>> {
|
||||||
|
self.get_history::<$model>(id, depth)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,7 +1,18 @@
|
|||||||
pub mod base;
|
s using// Export the error module
|
||||||
pub mod db;
|
pub mod error;
|
||||||
pub mod macros;
|
pub use error::{DbError, DbResult};
|
||||||
pub mod model_methods;
|
|
||||||
|
|
||||||
pub use base::{SledDB, SledDBError, SledDBResult, Storable, SledModel};
|
// Export the model module
|
||||||
pub use db::{DB, DBBuilder};
|
pub mod model;
|
||||||
|
pub use model::{Model, Storable};
|
||||||
|
|
||||||
|
// Export the store module
|
||||||
|
pub mod store;
|
||||||
|
pub use store::{DbOperations, OurDbStore};
|
||||||
|
|
||||||
|
// Export the db module
|
||||||
|
pub mod db;
|
||||||
|
pub use db::{DB, DBBuilder, ModelRegistration, ModelRegistrar};
|
||||||
|
|
||||||
|
// Export macros for model methods
|
||||||
|
pub mod macros;
|
||||||
|
30
herodb/src/db/model.rs
Normal file
30
herodb/src/db/model.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use crate::db::error::{DbError, DbResult};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
/// Trait for models that can be serialized and deserialized
|
||||||
|
pub trait Storable: Serialize + for<'de> Deserialize<'de> + Sized {
|
||||||
|
/// Serializes the instance using bincode
|
||||||
|
fn serialize(&self) -> DbResult<Vec<u8>> {
|
||||||
|
bincode::serialize(self).map_err(DbError::SerializationError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deserializes data from bytes into an instance
|
||||||
|
fn deserialize(data: &[u8]) -> DbResult<Self> {
|
||||||
|
bincode::deserialize(data).map_err(DbError::SerializationError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait identifying a model suitable for the database
|
||||||
|
/// The 'static lifetime bound is required for type identification via Any
|
||||||
|
pub trait Model: Storable + Debug + Clone + Send + Sync + 'static {
|
||||||
|
/// Returns the unique ID for this model instance
|
||||||
|
fn get_id(&self) -> u32;
|
||||||
|
|
||||||
|
/// Returns a prefix used for this model type in the database
|
||||||
|
/// Helps to logically separate different model types
|
||||||
|
fn db_prefix() -> &'static str;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement Storable for common types that might be used in models
|
||||||
|
impl<T: Serialize + for<'de> Deserialize<'de> + Sized> Storable for T {}
|
@ -1,5 +1,5 @@
|
|||||||
use crate::db::db::DB;
|
use crate::db::db::DB;
|
||||||
use crate::db::base::{SledDBResult, SledModel};
|
use crate::db::model::Model;
|
||||||
use crate::impl_model_methods;
|
use crate::impl_model_methods;
|
||||||
use crate::models::biz::{Product, Sale, Currency, ExchangeRate, Service, Customer, Contract, Invoice};
|
use crate::models::biz::{Product, Sale, Currency, ExchangeRate, Service, Customer, Contract, Invoice};
|
||||||
|
|
||||||
|
152
herodb/src/db/store.rs
Normal file
152
herodb/src/db/store.rs
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
use crate::db::error::{DbError, DbResult};
|
||||||
|
use crate::db::model::Model;
|
||||||
|
use ourdb::{OurDB, OurDBConfig, OurDBSetArgs};
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::any::Any;
|
||||||
|
|
||||||
|
/// Trait for type-erased database operations
|
||||||
|
pub trait DbOperations: Send + Sync {
|
||||||
|
fn delete(&self, id: u32) -> DbResult<()>;
|
||||||
|
fn get(&self, id: u32) -> DbResult<Box<dyn Any>>;
|
||||||
|
fn list(&self) -> DbResult<Box<dyn Any>>;
|
||||||
|
fn insert(&self, model: &dyn Any) -> DbResult<()>;
|
||||||
|
fn insert_raw(&self, serialized: &[u8]) -> DbResult<()>;
|
||||||
|
fn get_history(&self, id: u32, depth: u8) -> DbResult<Vec<Box<dyn Any>>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A store implementation using OurDB as the backend
|
||||||
|
pub struct OurDbStore<T: Model> {
|
||||||
|
db: OurDB,
|
||||||
|
path: PathBuf,
|
||||||
|
_phantom: PhantomData<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Model> OurDbStore<T> {
|
||||||
|
/// Opens or creates an OurDB database at the specified path
|
||||||
|
pub fn open<P: AsRef<Path>>(path: P) -> DbResult<Self> {
|
||||||
|
let path_buf = path.as_ref().to_path_buf();
|
||||||
|
let db_path = path_buf.join(T::db_prefix());
|
||||||
|
|
||||||
|
// Create directory if it doesn't exist
|
||||||
|
std::fs::create_dir_all(&db_path).map_err(DbError::IoError)?;
|
||||||
|
|
||||||
|
let config = OurDBConfig {
|
||||||
|
path: db_path.clone(),
|
||||||
|
incremental_mode: true, // Always use incremental mode for auto IDs
|
||||||
|
file_size: None, // Use default (500MB)
|
||||||
|
keysize: None, // Use default (4 bytes)
|
||||||
|
reset: None, // Don't reset existing database
|
||||||
|
};
|
||||||
|
|
||||||
|
let db = OurDB::new(config).map_err(DbError::OurDbError)?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
db,
|
||||||
|
path: db_path,
|
||||||
|
_phantom: PhantomData,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inserts or updates a model instance in the database
|
||||||
|
pub fn insert(&self, model: &T) -> DbResult<()> {
|
||||||
|
let id = model.get_id();
|
||||||
|
let data = model.serialize()?;
|
||||||
|
|
||||||
|
self.db.set(OurDBSetArgs {
|
||||||
|
id: Some(id),
|
||||||
|
data: &data,
|
||||||
|
}).map_err(DbError::OurDbError)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves a model instance by its ID
|
||||||
|
pub fn get(&self, id: u32) -> DbResult<T> {
|
||||||
|
let data = self.db.get(id).map_err(|e| {
|
||||||
|
match e {
|
||||||
|
ourdb::Error::NotFound(_) => DbError::NotFound(id),
|
||||||
|
_ => DbError::OurDbError(e),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
T::deserialize(&data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deletes a model instance by its ID
|
||||||
|
pub fn delete(&self, id: u32) -> DbResult<()> {
|
||||||
|
self.db.delete(id).map_err(|e| {
|
||||||
|
match e {
|
||||||
|
ourdb::Error::NotFound(_) => DbError::NotFound(id),
|
||||||
|
_ => DbError::OurDbError(e),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Lists all models of this type
|
||||||
|
pub fn list(&self) -> DbResult<Vec<T>> {
|
||||||
|
// OurDB doesn't have a built-in list function, so we need to implement it
|
||||||
|
// This is a placeholder - in a real implementation, we would need to
|
||||||
|
// maintain a list of all IDs for each model type
|
||||||
|
Err(DbError::GeneralError("List operation not implemented yet".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the history of a model by its ID
|
||||||
|
pub fn get_history(&self, id: u32, depth: u8) -> DbResult<Vec<T>> {
|
||||||
|
let history_data = self.db.get_history(id, depth).map_err(|e| {
|
||||||
|
match e {
|
||||||
|
ourdb::Error::NotFound(_) => DbError::NotFound(id),
|
||||||
|
_ => DbError::OurDbError(e),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut result = Vec::with_capacity(history_data.len());
|
||||||
|
for data in history_data {
|
||||||
|
result.push(T::deserialize(&data)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Model> DbOperations for OurDbStore<T> {
|
||||||
|
fn delete(&self, id: u32) -> DbResult<()> {
|
||||||
|
self.delete(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get(&self, id: u32) -> DbResult<Box<dyn Any>> {
|
||||||
|
let result = self.get(id)?;
|
||||||
|
Ok(Box::new(result))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(&self) -> DbResult<Box<dyn Any>> {
|
||||||
|
let result = self.list()?;
|
||||||
|
Ok(Box::new(result))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert(&self, model: &dyn Any) -> DbResult<()> {
|
||||||
|
// Downcast to the specific type T
|
||||||
|
match model.downcast_ref::<T>() {
|
||||||
|
Some(t) => self.insert(t),
|
||||||
|
None => Err(DbError::TypeError),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_raw(&self, serialized: &[u8]) -> DbResult<()> {
|
||||||
|
// Deserialize the bytes into model of type T
|
||||||
|
let model = T::deserialize(serialized)?;
|
||||||
|
// Use the regular insert method
|
||||||
|
self.insert(&model)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_history(&self, id: u32, depth: u8) -> DbResult<Vec<Box<dyn Any>>> {
|
||||||
|
let history = self.get_history(id, depth)?;
|
||||||
|
let mut result = Vec::with_capacity(history.len());
|
||||||
|
|
||||||
|
for item in history {
|
||||||
|
result.push(Box::new(item) as Box<dyn Any>);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
@ -1,35 +1,22 @@
|
|||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
/// Error types for HeroDB operations
|
/// Error type for HeroDB operations
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// Error from the underlying sled database
|
|
||||||
#[error("Database error: {0}")]
|
#[error("Database error: {0}")]
|
||||||
Database(#[from] sled::Error),
|
DbError(#[from] crate::db::error::DbError),
|
||||||
|
|
||||||
/// Error during serialization or deserialization
|
#[error("I/O error: {0}")]
|
||||||
|
IoError(#[from] std::io::Error),
|
||||||
|
|
||||||
#[error("Serialization error: {0}")]
|
#[error("Serialization error: {0}")]
|
||||||
Serialization(#[from] serde_json::Error),
|
SerializationError(#[from] bincode::Error),
|
||||||
|
|
||||||
/// Error when a requested item is not found
|
#[error("OurDB error: {0}")]
|
||||||
#[error("Item not found: {0}")]
|
OurDbError(#[from] ourdb::Error),
|
||||||
NotFound(String),
|
|
||||||
|
#[error("General error: {0}")]
|
||||||
/// Error when an item already exists
|
GeneralError(String),
|
||||||
#[error("Item already exists: {0}")]
|
|
||||||
AlreadyExists(String),
|
|
||||||
|
|
||||||
/// Error when a model validation fails
|
|
||||||
#[error("Validation error: {0}")]
|
|
||||||
Validation(String),
|
|
||||||
|
|
||||||
/// Error when a transaction fails
|
|
||||||
#[error("Transaction error: {0}")]
|
|
||||||
Transaction(String),
|
|
||||||
|
|
||||||
/// Other errors
|
|
||||||
#[error("Other error: {0}")]
|
|
||||||
Other(String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Result type for HeroDB operations
|
/// Result type for HeroDB operations
|
||||||
|
0
herodb/src/instructions.md
Normal file
0
herodb/src/instructions.md
Normal file
@ -1,6 +1,6 @@
|
|||||||
//! HeroDB: A database library built on top of sled with model support
|
//! HeroDB: A database library built on top of ourdb with model support
|
||||||
//!
|
//!
|
||||||
//! This library provides a simple interface for working with a sled-based database
|
//! This library provides a simple interface for working with an ourdb-based database
|
||||||
//! and includes support for defining and working with data models.
|
//! and includes support for defining and working with data models.
|
||||||
|
|
||||||
// Core modules
|
// Core modules
|
||||||
@ -13,6 +13,7 @@ pub mod cmd;
|
|||||||
|
|
||||||
// Re-exports
|
// Re-exports
|
||||||
pub use error::Error;
|
pub use error::Error;
|
||||||
|
pub use db::{DB, DBBuilder, Model, Storable, DbError, DbResult};
|
||||||
|
|
||||||
/// Re-export sled for advanced usage
|
/// Re-export ourdb for advanced usage
|
||||||
pub use sled;
|
pub use ourdb;
|
||||||
|
@ -1,19 +1,21 @@
|
|||||||
use crate::db::base::{SledModel, Storable};
|
use crate::db::model::{Model, Storable};
|
||||||
use chrono::{DateTime, Duration, Utc};
|
use chrono::{DateTime, Duration, Utc};
|
||||||
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
||||||
use serde::{Deserialize, Serialize}; // Import Sled traits from db module
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// Currency represents a monetary value with amount and currency code
|
/// Currency represents a monetary value with amount and currency code
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, CustomType)]
|
#[derive(Debug, Clone, Serialize, Deserialize, CustomType)]
|
||||||
pub struct Currency {
|
pub struct Currency {
|
||||||
|
pub id: u32,
|
||||||
pub amount: f64,
|
pub amount: f64,
|
||||||
pub currency_code: String,
|
pub currency_code: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Currency {
|
impl Currency {
|
||||||
/// Create a new currency with amount and code
|
/// Create a new currency with amount and code
|
||||||
pub fn new(amount: f64, currency_code: String) -> Self {
|
pub fn new(id: u32, amount: f64, currency_code: String) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
id,
|
||||||
amount,
|
amount,
|
||||||
currency_code,
|
currency_code,
|
||||||
}
|
}
|
||||||
@ -27,6 +29,7 @@ impl Currency {
|
|||||||
/// Builder for Currency
|
/// Builder for Currency
|
||||||
#[derive(Clone, CustomType)]
|
#[derive(Clone, CustomType)]
|
||||||
pub struct CurrencyBuilder {
|
pub struct CurrencyBuilder {
|
||||||
|
id: Option<u32>,
|
||||||
amount: Option<f64>,
|
amount: Option<f64>,
|
||||||
currency_code: Option<String>,
|
currency_code: Option<String>,
|
||||||
}
|
}
|
||||||
@ -35,11 +38,18 @@ impl CurrencyBuilder {
|
|||||||
/// Create a new CurrencyBuilder with all fields set to None
|
/// Create a new CurrencyBuilder with all fields set to None
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
id: None,
|
||||||
amount: None,
|
amount: None,
|
||||||
currency_code: None,
|
currency_code: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the id
|
||||||
|
pub fn id(mut self, id: u32) -> Self {
|
||||||
|
self.id = Some(id);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Set the amount
|
/// Set the amount
|
||||||
pub fn amount(mut self, amount: f64) -> Self {
|
pub fn amount(mut self, amount: f64) -> Self {
|
||||||
self.amount = Some(amount);
|
self.amount = Some(amount);
|
||||||
@ -55,20 +65,17 @@ impl CurrencyBuilder {
|
|||||||
/// Build the Currency object
|
/// Build the Currency object
|
||||||
pub fn build(self) -> Result<Currency, Box<EvalAltResult>> {
|
pub fn build(self) -> Result<Currency, Box<EvalAltResult>> {
|
||||||
Ok(Currency {
|
Ok(Currency {
|
||||||
|
id: self.id.ok_or("id is required")?,
|
||||||
amount: self.amount.ok_or("amount is required")?,
|
amount: self.amount.ok_or("amount is required")?,
|
||||||
currency_code: self.currency_code.ok_or("currency_code is required")?,
|
currency_code: self.currency_code.ok_or("currency_code is required")?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement Storable trait (provides default dump/load)
|
// Implement Model trait
|
||||||
impl Storable for Currency {}
|
impl Model for Currency {
|
||||||
|
fn get_id(&self) -> u32 {
|
||||||
// Implement SledModel trait
|
self.id
|
||||||
impl SledModel for Currency {
|
|
||||||
fn get_id(&self) -> String {
|
|
||||||
// Use the currency code as the ID
|
|
||||||
self.currency_code.clone()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn db_prefix() -> &'static str {
|
fn db_prefix() -> &'static str {
|
||||||
|
@ -2,11 +2,12 @@ use std::collections::HashMap;
|
|||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use crate::db::base::{SledModel, Storable};
|
use crate::db::model::{Model, Storable};
|
||||||
|
|
||||||
/// ExchangeRate represents an exchange rate between two currencies
|
/// ExchangeRate represents an exchange rate between two currencies
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ExchangeRate {
|
pub struct ExchangeRate {
|
||||||
|
pub id: u32,
|
||||||
pub base_currency: String,
|
pub base_currency: String,
|
||||||
pub target_currency: String,
|
pub target_currency: String,
|
||||||
pub rate: f64,
|
pub rate: f64,
|
||||||
@ -15,8 +16,9 @@ pub struct ExchangeRate {
|
|||||||
|
|
||||||
impl ExchangeRate {
|
impl ExchangeRate {
|
||||||
/// Create a new exchange rate
|
/// Create a new exchange rate
|
||||||
pub fn new(base_currency: String, target_currency: String, rate: f64) -> Self {
|
pub fn new(id: u32, base_currency: String, target_currency: String, rate: f64) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
id,
|
||||||
base_currency,
|
base_currency,
|
||||||
target_currency,
|
target_currency,
|
||||||
rate,
|
rate,
|
||||||
@ -27,6 +29,7 @@ impl ExchangeRate {
|
|||||||
|
|
||||||
/// Builder for ExchangeRate
|
/// Builder for ExchangeRate
|
||||||
pub struct ExchangeRateBuilder {
|
pub struct ExchangeRateBuilder {
|
||||||
|
id: Option<u32>,
|
||||||
base_currency: Option<String>,
|
base_currency: Option<String>,
|
||||||
target_currency: Option<String>,
|
target_currency: Option<String>,
|
||||||
rate: Option<f64>,
|
rate: Option<f64>,
|
||||||
@ -37,6 +40,7 @@ impl ExchangeRateBuilder {
|
|||||||
/// Create a new ExchangeRateBuilder with all fields set to None
|
/// Create a new ExchangeRateBuilder with all fields set to None
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
id: None,
|
||||||
base_currency: None,
|
base_currency: None,
|
||||||
target_currency: None,
|
target_currency: None,
|
||||||
rate: None,
|
rate: None,
|
||||||
@ -44,6 +48,12 @@ impl ExchangeRateBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the id
|
||||||
|
pub fn id(mut self, id: u32) -> Self {
|
||||||
|
self.id = Some(id);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Set the base currency
|
/// Set the base currency
|
||||||
pub fn base_currency<S: Into<String>>(mut self, base_currency: S) -> Self {
|
pub fn base_currency<S: Into<String>>(mut self, base_currency: S) -> Self {
|
||||||
self.base_currency = Some(base_currency.into());
|
self.base_currency = Some(base_currency.into());
|
||||||
@ -72,6 +82,7 @@ impl ExchangeRateBuilder {
|
|||||||
pub fn build(self) -> Result<ExchangeRate, &'static str> {
|
pub fn build(self) -> Result<ExchangeRate, &'static str> {
|
||||||
let now = Utc::now();
|
let now = Utc::now();
|
||||||
Ok(ExchangeRate {
|
Ok(ExchangeRate {
|
||||||
|
id: self.id.ok_or("id is required")?,
|
||||||
base_currency: self.base_currency.ok_or("base_currency is required")?,
|
base_currency: self.base_currency.ok_or("base_currency is required")?,
|
||||||
target_currency: self.target_currency.ok_or("target_currency is required")?,
|
target_currency: self.target_currency.ok_or("target_currency is required")?,
|
||||||
rate: self.rate.ok_or("rate is required")?,
|
rate: self.rate.ok_or("rate is required")?,
|
||||||
@ -80,13 +91,10 @@ impl ExchangeRateBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement Storable trait (provides default dump/load)
|
// Implement Model trait
|
||||||
impl Storable for ExchangeRate {}
|
impl Model for ExchangeRate {
|
||||||
|
fn get_id(&self) -> u32 {
|
||||||
// Implement SledModel trait
|
self.id
|
||||||
impl SledModel for ExchangeRate {
|
|
||||||
fn get_id(&self) -> String {
|
|
||||||
format!("{}_{}", self.base_currency, self.target_currency)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn db_prefix() -> &'static str {
|
fn db_prefix() -> &'static str {
|
||||||
@ -156,11 +164,11 @@ lazy_static::lazy_static! {
|
|||||||
let service = ExchangeRateService::new();
|
let service = ExchangeRateService::new();
|
||||||
|
|
||||||
// Set some default exchange rates
|
// Set some default exchange rates
|
||||||
service.set_rate(ExchangeRate::new("USD".to_string(), "EUR".to_string(), 0.85));
|
service.set_rate(ExchangeRate::new(1, "USD".to_string(), "EUR".to_string(), 0.85));
|
||||||
service.set_rate(ExchangeRate::new("USD".to_string(), "GBP".to_string(), 0.75));
|
service.set_rate(ExchangeRate::new(2, "USD".to_string(), "GBP".to_string(), 0.75));
|
||||||
service.set_rate(ExchangeRate::new("USD".to_string(), "JPY".to_string(), 110.0));
|
service.set_rate(ExchangeRate::new(3, "USD".to_string(), "JPY".to_string(), 110.0));
|
||||||
service.set_rate(ExchangeRate::new("USD".to_string(), "CAD".to_string(), 1.25));
|
service.set_rate(ExchangeRate::new(4, "USD".to_string(), "CAD".to_string(), 1.25));
|
||||||
service.set_rate(ExchangeRate::new("USD".to_string(), "AUD".to_string(), 1.35));
|
service.set_rate(ExchangeRate::new(5, "USD".to_string(), "AUD".to_string(), 1.35));
|
||||||
|
|
||||||
service
|
service
|
||||||
};
|
};
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::db::base::{SledModel, Storable};
|
use crate::db::model::{Model, Storable};
|
||||||
use chrono::{DateTime, Duration, Utc};
|
use chrono::{DateTime, Duration, Utc};
|
||||||
use rhai::{CustomType, EvalAltResult, TypeBuilder, export_module};
|
use rhai::{CustomType, EvalAltResult, TypeBuilder, export_module};
|
||||||
use serde::{Deserialize, Serialize}; // Import Sled traits from db module
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// ProductType represents the type of a product
|
/// ProductType represents the type of a product
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
@ -20,7 +20,7 @@ pub enum ProductStatus {
|
|||||||
/// ProductComponent represents a component of a product
|
/// ProductComponent represents a component of a product
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ProductComponent {
|
pub struct ProductComponent {
|
||||||
pub id: i64,
|
pub id: u32,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub description: String,
|
pub description: String,
|
||||||
pub quantity: i64,
|
pub quantity: i64,
|
||||||
@ -30,7 +30,7 @@ pub struct ProductComponent {
|
|||||||
|
|
||||||
impl ProductComponent {
|
impl ProductComponent {
|
||||||
/// Create a new product component with default timestamps
|
/// Create a new product component with default timestamps
|
||||||
pub fn new(id: i64, name: String, description: String, quantity: i64) -> Self {
|
pub fn new(id: u32, name: String, description: String, quantity: i64) -> Self {
|
||||||
let now = Utc::now();
|
let now = Utc::now();
|
||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
@ -46,7 +46,7 @@ impl ProductComponent {
|
|||||||
/// Builder for ProductComponent
|
/// Builder for ProductComponent
|
||||||
#[derive(Clone, CustomType)]
|
#[derive(Clone, CustomType)]
|
||||||
pub struct ProductComponentBuilder {
|
pub struct ProductComponentBuilder {
|
||||||
id: Option<i64>,
|
id: Option<u32>,
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
description: Option<String>,
|
description: Option<String>,
|
||||||
quantity: Option<i64>,
|
quantity: Option<i64>,
|
||||||
@ -68,7 +68,7 @@ impl ProductComponentBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Set the id
|
/// Set the id
|
||||||
pub fn id(mut self, id: i64) -> Self {
|
pub fn id(mut self, id: u32) -> Self {
|
||||||
self.id = Some(id);
|
self.id = Some(id);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -120,7 +120,7 @@ impl ProductComponentBuilder {
|
|||||||
/// Product represents a product or service offered in the system
|
/// Product represents a product or service offered in the system
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct Product {
|
pub struct Product {
|
||||||
pub id: i64,
|
pub id: u32,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub description: String,
|
pub description: String,
|
||||||
pub price: Currency,
|
pub price: Currency,
|
||||||
@ -135,12 +135,10 @@ pub struct Product {
|
|||||||
pub components: Vec<ProductComponent>,
|
pub components: Vec<ProductComponent>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Removed old Model trait implementation
|
|
||||||
|
|
||||||
impl Product {
|
impl Product {
|
||||||
/// Create a new product with default timestamps
|
/// Create a new product with default timestamps
|
||||||
pub fn new(
|
pub fn new(
|
||||||
id: i64,
|
id: u32,
|
||||||
name: String,
|
name: String,
|
||||||
description: String,
|
description: String,
|
||||||
price: Currency,
|
price: Currency,
|
||||||
@ -201,7 +199,7 @@ impl Product {
|
|||||||
/// Builder for Product
|
/// Builder for Product
|
||||||
#[derive(Clone, CustomType)]
|
#[derive(Clone, CustomType)]
|
||||||
pub struct ProductBuilder {
|
pub struct ProductBuilder {
|
||||||
id: Option<i64>,
|
id: Option<u32>,
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
description: Option<String>,
|
description: Option<String>,
|
||||||
price: Option<Currency>,
|
price: Option<Currency>,
|
||||||
@ -239,7 +237,7 @@ impl ProductBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Set the id
|
/// Set the id
|
||||||
pub fn id(mut self, id: i64) -> Self {
|
pub fn id(mut self, id: u32) -> Self {
|
||||||
self.id = Some(id);
|
self.id = Some(id);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -344,13 +342,10 @@ impl ProductBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement Storable trait (provides default dump/load)
|
// Implement Model trait
|
||||||
impl Storable for Product {}
|
impl Model for Product {
|
||||||
|
fn get_id(&self) -> u32 {
|
||||||
// Implement SledModel trait
|
self.id
|
||||||
impl SledModel for Product {
|
|
||||||
fn get_id(&self) -> String {
|
|
||||||
self.id.to_string()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn db_prefix() -> &'static str {
|
fn db_prefix() -> &'static str {
|
||||||
|
266
herodb_ourdb_migration_plan.md
Normal file
266
herodb_ourdb_migration_plan.md
Normal file
@ -0,0 +1,266 @@
|
|||||||
|
# Migration Plan: Restructuring herodb to Use ourdb as Backend
|
||||||
|
|
||||||
|
This document outlines the plan to restructure herodb to use ourdb as the backend, completely removing all sled references and better aligning with ourdb's design patterns.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
A[Current herodb with sled] --> B[Define new core traits]
|
||||||
|
B --> C[Implement ourdb backend]
|
||||||
|
C --> D[Create new DB manager]
|
||||||
|
D --> E[Implement transaction system]
|
||||||
|
E --> F[Update model implementations]
|
||||||
|
F --> G[Final restructured herodb with ourdb]
|
||||||
|
```
|
||||||
|
|
||||||
|
## New Architecture
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
classDiagram
|
||||||
|
class Model {
|
||||||
|
+get_id() u32
|
||||||
|
+db_prefix() &'static str
|
||||||
|
}
|
||||||
|
class Storable {
|
||||||
|
+serialize() Result<Vec<u8>>
|
||||||
|
+deserialize() Result<Self>
|
||||||
|
}
|
||||||
|
class DB {
|
||||||
|
-path: PathBuf
|
||||||
|
-type_map: HashMap<TypeId, Arc<dyn DbOperations>>
|
||||||
|
-transaction: Arc<RwLock<Option<TransactionState>>>
|
||||||
|
+new(config: DbConfig) Result<Self>
|
||||||
|
+begin_transaction() Result<()>
|
||||||
|
+commit_transaction() Result<()>
|
||||||
|
+rollback_transaction() Result<()>
|
||||||
|
+set<T: Model>(model: &T) Result<()>
|
||||||
|
+get<T: Model>(id: u32) Result<T>
|
||||||
|
+delete<T: Model>(id: u32) Result<()>
|
||||||
|
+list<T: Model>() Result<Vec<T>>
|
||||||
|
+register<T: Model>() Result<()>
|
||||||
|
+get_history<T: Model>(id: u32, depth: u8) Result<Vec<T>>
|
||||||
|
}
|
||||||
|
class DbOperations {
|
||||||
|
<<interface>>
|
||||||
|
+delete(id: u32) Result<()>
|
||||||
|
+get(id: u32) Result<Box<dyn Any>>
|
||||||
|
+list() Result<Box<dyn Any>>
|
||||||
|
+insert(model: &dyn Any) Result<()>
|
||||||
|
+get_history(id: u32, depth: u8) Result<Vec<Box<dyn Any>>>
|
||||||
|
}
|
||||||
|
class OurDbStore~T~ {
|
||||||
|
-db: OurDB
|
||||||
|
-model_type: PhantomData<T>
|
||||||
|
+new(config: OurDBConfig) Result<Self>
|
||||||
|
+insert(model: &T) Result<()>
|
||||||
|
+get(id: u32) Result<T>
|
||||||
|
+delete(id: u32) Result<()>
|
||||||
|
+list() Result<Vec<T>>
|
||||||
|
+get_history(id: u32, depth: u8) Result<Vec<T>>
|
||||||
|
}
|
||||||
|
|
||||||
|
Model --|> Storable
|
||||||
|
OurDbStore ..|> DbOperations
|
||||||
|
DB o-- DbOperations
|
||||||
|
```
|
||||||
|
|
||||||
|
## Detailed Restructuring Steps
|
||||||
|
|
||||||
|
### 1. Define New Core Traits and Types
|
||||||
|
|
||||||
|
1. Create a new `Model` trait to replace `SledModel`
|
||||||
|
2. Create a new `Storable` trait for serialization/deserialization
|
||||||
|
3. Define a new error type hierarchy based on ourdb's error types
|
||||||
|
4. Create a `DbOperations` trait for database operations
|
||||||
|
|
||||||
|
### 2. Implement ourdb Backend
|
||||||
|
|
||||||
|
1. Create an `OurDbStore<T>` type that wraps ourdb
|
||||||
|
2. Implement the `DbOperations` trait for `OurDbStore<T>`
|
||||||
|
3. Add support for history tracking
|
||||||
|
|
||||||
|
### 3. Create New DB Manager
|
||||||
|
|
||||||
|
1. Create a new `DB` struct that manages multiple model types
|
||||||
|
2. Implement a builder pattern for configuration
|
||||||
|
3. Add methods for CRUD operations
|
||||||
|
|
||||||
|
### 4. Implement Transaction System
|
||||||
|
|
||||||
|
1. Create a transaction system that works with ourdb
|
||||||
|
2. Implement transaction operations (begin, commit, rollback)
|
||||||
|
3. Handle transaction state tracking
|
||||||
|
|
||||||
|
### 5. Update Model Implementations
|
||||||
|
|
||||||
|
1. Update all models to use `u32` IDs
|
||||||
|
2. Implement the new `Model` trait for all models
|
||||||
|
3. Update model constructors and builders
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### 1. Core Traits and Types
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// Error types
|
||||||
|
pub enum DbError {
|
||||||
|
IoError(std::io::Error),
|
||||||
|
SerializationError(bincode::Error),
|
||||||
|
NotFound(u32),
|
||||||
|
TransactionError(String),
|
||||||
|
// Map to ourdb error types
|
||||||
|
OurDbError(ourdb::Error),
|
||||||
|
// Other error types as needed
|
||||||
|
}
|
||||||
|
|
||||||
|
// Result type alias
|
||||||
|
pub type DbResult<T> = Result<T, DbError>;
|
||||||
|
|
||||||
|
// Storable trait
|
||||||
|
pub trait Storable: Serialize + for<'de> Deserialize<'de> + Sized {
|
||||||
|
fn serialize(&self) -> DbResult<Vec<u8>> {
|
||||||
|
// Default implementation using bincode
|
||||||
|
Ok(bincode::serialize(self)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize(data: &[u8]) -> DbResult<Self> {
|
||||||
|
// Default implementation using bincode
|
||||||
|
Ok(bincode::deserialize(data)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Model trait
|
||||||
|
pub trait Model: Storable + Debug + Clone + Send + Sync + 'static {
|
||||||
|
fn get_id(&self) -> u32;
|
||||||
|
fn db_prefix() -> &'static str;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. ourdb Backend Implementation
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub struct OurDbStore<T: Model> {
|
||||||
|
db: OurDB,
|
||||||
|
_phantom: PhantomData<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Model> OurDbStore<T> {
|
||||||
|
pub fn new(config: OurDBConfig) -> DbResult<Self> {
|
||||||
|
let db = OurDB::new(config)?;
|
||||||
|
Ok(Self {
|
||||||
|
db,
|
||||||
|
_phantom: PhantomData,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implementation of CRUD operations
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Model> DbOperations for OurDbStore<T> {
|
||||||
|
// Implementation of DbOperations trait
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. DB Manager Implementation
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub struct DB {
|
||||||
|
path: PathBuf,
|
||||||
|
type_map: HashMap<TypeId, Arc<dyn DbOperations>>,
|
||||||
|
transaction: Arc<RwLock<Option<TransactionState>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DB {
|
||||||
|
pub fn new(config: DbConfig) -> DbResult<Self> {
|
||||||
|
// Implementation
|
||||||
|
}
|
||||||
|
|
||||||
|
// CRUD operations and other methods
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Transaction System
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub struct TransactionState {
|
||||||
|
operations: Vec<DbOperation>,
|
||||||
|
active: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum DbOperation {
|
||||||
|
Set {
|
||||||
|
model_type: TypeId,
|
||||||
|
serialized: Vec<u8>,
|
||||||
|
},
|
||||||
|
Delete {
|
||||||
|
model_type: TypeId,
|
||||||
|
id: u32,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DB {
|
||||||
|
pub fn begin_transaction(&self) -> DbResult<()> {
|
||||||
|
// Implementation
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn commit_transaction(&self) -> DbResult<()> {
|
||||||
|
// Implementation
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback_transaction(&self) -> DbResult<()> {
|
||||||
|
// Implementation
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Model Implementation Updates
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// Example for Product model
|
||||||
|
impl Model for Product {
|
||||||
|
fn get_id(&self) -> u32 {
|
||||||
|
self.id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn db_prefix() -> &'static str {
|
||||||
|
"product"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Storable for Product {}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Technical Considerations
|
||||||
|
|
||||||
|
1. **Clean Architecture**: The new design provides a cleaner separation of concerns.
|
||||||
|
|
||||||
|
2. **Incremental IDs**: All models will use `u32` IDs, and ourdb will be configured in incremental mode.
|
||||||
|
|
||||||
|
3. **History Tracking**: The new API will expose ourdb's history tracking capabilities.
|
||||||
|
|
||||||
|
4. **Transaction Support**: We'll implement a custom transaction system on top of ourdb.
|
||||||
|
|
||||||
|
5. **Error Handling**: New error types will map directly to ourdb's error types.
|
||||||
|
|
||||||
|
6. **Serialization**: We'll use bincode for serialization/deserialization by default.
|
||||||
|
|
||||||
|
## Migration Risks and Mitigations
|
||||||
|
|
||||||
|
| Risk | Mitigation |
|
||||||
|
|------|------------|
|
||||||
|
| Breaking API changes | Create a compatibility layer if needed |
|
||||||
|
| Data migration complexity | Develop a data migration utility |
|
||||||
|
| Performance impact | Benchmark before and after |
|
||||||
|
| Implementation complexity | Implement in phases with thorough testing |
|
||||||
|
| Integration issues | Create comprehensive integration tests |
|
||||||
|
|
||||||
|
## Implementation Phases
|
||||||
|
|
||||||
|
1. **Phase 1**: Define core traits and types
|
||||||
|
2. **Phase 2**: Implement ourdb backend
|
||||||
|
3. **Phase 3**: Create DB manager
|
||||||
|
4. **Phase 4**: Implement transaction system
|
||||||
|
5. **Phase 5**: Update model implementations
|
||||||
|
6. **Phase 6**: Create tests and benchmarks
|
||||||
|
7. **Phase 7**: Develop data migration utility
|
@ -14,61 +14,42 @@ fn criterion_benchmark(c: &mut Criterion) {
|
|||||||
incremental_mode: true,
|
incremental_mode: true,
|
||||||
file_size: Some(10 * 1024 * 1024), // 10MB
|
file_size: Some(10 * 1024 * 1024), // 10MB
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
||||||
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
let mut db = OurDB::new(config).unwrap();
|
||||||
let test_data = vec![b'X'; 100]; // 100 bytes of data
|
let test_data = vec![b'X'; 100]; // 100 bytes of data
|
||||||
let mut i = 0;
|
|
||||||
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let args = OurDBSetArgs {
|
let _ = db.set(OurDBSetArgs {
|
||||||
id: None, // Let the DB assign an ID
|
id: None,
|
||||||
data: &test_data,
|
data: &test_data,
|
||||||
};
|
}).unwrap();
|
||||||
black_box(db.set(args).unwrap());
|
|
||||||
i += 1;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
db.close().unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Setup database with data for other benchmarks
|
// Benchmark get operation (retrieval)
|
||||||
let setup_config = OurDBConfig {
|
|
||||||
path: db_path.clone(),
|
|
||||||
incremental_mode: true,
|
|
||||||
file_size: Some(10 * 1024 * 1024), // 10MB
|
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut setup_db = OurDB::new(setup_config).unwrap();
|
|
||||||
let test_data = vec![b'X'; 100]; // 100 bytes of data
|
|
||||||
let mut ids = Vec::with_capacity(1000);
|
|
||||||
|
|
||||||
// Insert 1000 records
|
|
||||||
for _ in 0..1000 {
|
|
||||||
let args = OurDBSetArgs {
|
|
||||||
id: None,
|
|
||||||
data: &test_data,
|
|
||||||
};
|
|
||||||
let id = setup_db.set(args).unwrap();
|
|
||||||
ids.push(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Benchmark get operation
|
|
||||||
c.bench_function("get", |b| {
|
c.bench_function("get", |b| {
|
||||||
let config = OurDBConfig {
|
// Setup: Create a database and insert a record
|
||||||
|
let setup_config = OurDBConfig {
|
||||||
path: db_path.clone(),
|
path: db_path.clone(),
|
||||||
incremental_mode: true,
|
incremental_mode: true,
|
||||||
file_size: Some(10 * 1024 * 1024),
|
file_size: Some(10 * 1024 * 1024),
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
keysize: Some(6),
|
||||||
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
let mut db = OurDB::new(setup_config).unwrap();
|
||||||
let mut i = 0;
|
let test_data = vec![b'X'; 100];
|
||||||
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||||
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let id = ids[i % ids.len()];
|
let _ = db.get(id).unwrap();
|
||||||
black_box(db.get(id).unwrap());
|
|
||||||
i += 1;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
db.close().unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Benchmark update operation
|
// Benchmark update operation
|
||||||
@ -77,199 +58,143 @@ fn criterion_benchmark(c: &mut Criterion) {
|
|||||||
path: db_path.clone(),
|
path: db_path.clone(),
|
||||||
incremental_mode: true,
|
incremental_mode: true,
|
||||||
file_size: Some(10 * 1024 * 1024),
|
file_size: Some(10 * 1024 * 1024),
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
keysize: Some(6),
|
||||||
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
let mut db = OurDB::new(config).unwrap();
|
||||||
let updated_data = vec![b'Y'; 100]; // Different data for updates
|
let test_data = vec![b'X'; 100];
|
||||||
let mut i = 0;
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||||
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let id = ids[i % ids.len()];
|
let _ = db.set(OurDBSetArgs {
|
||||||
let args = OurDBSetArgs {
|
|
||||||
id: Some(id),
|
id: Some(id),
|
||||||
data: &updated_data,
|
data: &test_data,
|
||||||
};
|
}).unwrap();
|
||||||
black_box(db.set(args).unwrap());
|
|
||||||
i += 1;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
db.close().unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Benchmark get_history operation
|
// Benchmark delete operation
|
||||||
|
c.bench_function("delete", |b| {
|
||||||
|
let config = OurDBConfig {
|
||||||
|
path: db_path.clone(),
|
||||||
|
incremental_mode: true,
|
||||||
|
file_size: Some(10 * 1024 * 1024),
|
||||||
|
keysize: Some(6),
|
||||||
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut db = OurDB::new(config).unwrap();
|
||||||
|
|
||||||
|
// Create a test data vector outside the closure
|
||||||
|
let test_data = vec![b'X'; 100];
|
||||||
|
|
||||||
|
b.iter_with_setup(
|
||||||
|
// Setup: Insert a record before each iteration
|
||||||
|
|| {
|
||||||
|
db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap()
|
||||||
|
},
|
||||||
|
// Benchmark: Delete the record
|
||||||
|
|id| {
|
||||||
|
db.delete(id).unwrap();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
db.close().unwrap();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Benchmark history tracking
|
||||||
c.bench_function("get_history", |b| {
|
c.bench_function("get_history", |b| {
|
||||||
let config = OurDBConfig {
|
let config = OurDBConfig {
|
||||||
path: db_path.clone(),
|
path: db_path.clone(),
|
||||||
incremental_mode: true,
|
incremental_mode: true,
|
||||||
file_size: Some(10 * 1024 * 1024),
|
file_size: Some(10 * 1024 * 1024),
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
keysize: Some(6),
|
||||||
};
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
|
||||||
let mut i = 0;
|
|
||||||
|
|
||||||
b.iter(|| {
|
|
||||||
let id = ids[i % ids.len()];
|
|
||||||
black_box(db.get_history(id, 2).unwrap());
|
|
||||||
i += 1;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Benchmark delete operation
|
|
||||||
c.bench_function("delete", |b| {
|
|
||||||
// Create a fresh database for deletion benchmarks
|
|
||||||
let delete_dir = tempdir().expect("Failed to create temp directory");
|
|
||||||
let delete_path = delete_dir.path().to_path_buf();
|
|
||||||
|
|
||||||
let config = OurDBConfig {
|
|
||||||
path: delete_path.clone(),
|
|
||||||
incremental_mode: true,
|
|
||||||
file_size: Some(10 * 1024 * 1024),
|
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
let mut db = OurDB::new(config).unwrap();
|
||||||
let test_data = vec![b'X'; 100];
|
let test_data = vec![b'X'; 100];
|
||||||
|
|
||||||
// Setup keys to delete
|
// Create a record with history
|
||||||
let mut delete_ids = Vec::with_capacity(1000);
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||||
for _ in 0..1000 {
|
|
||||||
let args = OurDBSetArgs {
|
// Update it a few times to create history
|
||||||
id: None,
|
for _ in 0..5 {
|
||||||
data: &test_data,
|
db.set(OurDBSetArgs { id: Some(id), data: &test_data }).unwrap();
|
||||||
};
|
|
||||||
let id = db.set(args).unwrap();
|
|
||||||
delete_ids.push(id);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut i = 0;
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let id = delete_ids[i % delete_ids.len()];
|
let _ = db.get_history(id, 3).unwrap();
|
||||||
// Only try to delete if it exists (not already deleted)
|
|
||||||
if db.get(id).is_ok() {
|
|
||||||
black_box(db.delete(id).unwrap());
|
|
||||||
}
|
|
||||||
i += 1;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
db.close().unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Benchmark key-value mode vs incremental mode
|
// Benchmark large data handling
|
||||||
let mut group = c.benchmark_group("mode_comparison");
|
c.bench_function("large_data", |b| {
|
||||||
|
|
||||||
// Benchmark set in key-value mode
|
|
||||||
group.bench_function("set_keyvalue_mode", |b| {
|
|
||||||
let kv_dir = tempdir().expect("Failed to create temp directory");
|
|
||||||
let kv_path = kv_dir.path().to_path_buf();
|
|
||||||
|
|
||||||
let config = OurDBConfig {
|
let config = OurDBConfig {
|
||||||
path: kv_path.clone(),
|
path: db_path.clone(),
|
||||||
incremental_mode: false, // Key-value mode
|
incremental_mode: true,
|
||||||
file_size: Some(10 * 1024 * 1024),
|
file_size: Some(10 * 1024 * 1024),
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
keysize: Some(6),
|
||||||
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
let mut db = OurDB::new(config).unwrap();
|
||||||
let test_data = vec![b'X'; 100];
|
let large_data = vec![b'X'; 10 * 1024]; // 10KB
|
||||||
let mut i = 0;
|
|
||||||
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let id = i + 1; // Explicit ID
|
let id = db.set(OurDBSetArgs { id: None, data: &large_data }).unwrap();
|
||||||
let args = OurDBSetArgs {
|
let _ = db.get(id).unwrap();
|
||||||
id: Some(id as u32),
|
db.delete(id).unwrap();
|
||||||
data: &test_data,
|
|
||||||
};
|
|
||||||
black_box(db.set(args).unwrap());
|
|
||||||
i += 1;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
db.close().unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Benchmark set in incremental mode
|
// Benchmark concurrent operations (simulated)
|
||||||
group.bench_function("set_incremental_mode", |b| {
|
c.bench_function("concurrent_ops", |b| {
|
||||||
let inc_dir = tempdir().expect("Failed to create temp directory");
|
|
||||||
let inc_path = inc_dir.path().to_path_buf();
|
|
||||||
|
|
||||||
let config = OurDBConfig {
|
let config = OurDBConfig {
|
||||||
path: inc_path.clone(),
|
path: db_path.clone(),
|
||||||
incremental_mode: true, // Incremental mode
|
incremental_mode: true,
|
||||||
file_size: Some(10 * 1024 * 1024),
|
file_size: Some(10 * 1024 * 1024),
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
keysize: Some(6),
|
||||||
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
let mut db = OurDB::new(config).unwrap();
|
||||||
let test_data = vec![b'X'; 100];
|
let test_data = vec![b'X'; 100];
|
||||||
|
|
||||||
|
// Pre-insert some data
|
||||||
|
let mut ids = Vec::with_capacity(100);
|
||||||
|
for _ in 0..100 {
|
||||||
|
let id = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||||
|
ids.push(id);
|
||||||
|
}
|
||||||
|
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let args = OurDBSetArgs {
|
// Simulate mixed workload
|
||||||
id: None, // Auto-generated ID
|
for i in 0..10 {
|
||||||
data: &test_data,
|
if i % 3 == 0 {
|
||||||
};
|
// Insert
|
||||||
black_box(db.set(args).unwrap());
|
let _ = db.set(OurDBSetArgs { id: None, data: &test_data }).unwrap();
|
||||||
});
|
} else if i % 3 == 1 {
|
||||||
});
|
// Read
|
||||||
|
let idx = i % ids.len();
|
||||||
group.finish();
|
let _ = db.get(ids[idx]).unwrap();
|
||||||
|
} else {
|
||||||
// Benchmark with different record sizes
|
// Update
|
||||||
let mut size_group = c.benchmark_group("record_size");
|
let idx = i % ids.len();
|
||||||
|
db.set(OurDBSetArgs { id: Some(ids[idx]), data: &test_data }).unwrap();
|
||||||
for &size in &[10, 100, 1000, 10000] {
|
}
|
||||||
size_group.bench_function(format!("set_size_{}", size), |b| {
|
}
|
||||||
let size_dir = tempdir().expect("Failed to create temp directory");
|
|
||||||
let size_path = size_dir.path().to_path_buf();
|
|
||||||
|
|
||||||
let config = OurDBConfig {
|
|
||||||
path: size_path.clone(),
|
|
||||||
incremental_mode: true,
|
|
||||||
file_size: Some(10 * 1024 * 1024),
|
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
|
||||||
let test_data = vec![b'X'; size];
|
|
||||||
|
|
||||||
b.iter(|| {
|
|
||||||
let args = OurDBSetArgs {
|
|
||||||
id: None,
|
|
||||||
data: &test_data,
|
|
||||||
};
|
|
||||||
black_box(db.set(args).unwrap());
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
size_group.bench_function(format!("get_size_{}", size), |b| {
|
db.close().unwrap();
|
||||||
let size_dir = tempdir().expect("Failed to create temp directory");
|
});
|
||||||
let size_path = size_dir.path().to_path_buf();
|
|
||||||
|
|
||||||
let config = OurDBConfig {
|
|
||||||
path: size_path.clone(),
|
|
||||||
incremental_mode: true,
|
|
||||||
file_size: Some(10 * 1024 * 1024),
|
|
||||||
keysize: Some(6), // Use keysize=6 to allow non-zero file_nr
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut db = OurDB::new(config).unwrap();
|
|
||||||
let test_data = vec![b'X'; size];
|
|
||||||
|
|
||||||
// Insert some records first
|
|
||||||
let mut size_ids = Vec::with_capacity(100);
|
|
||||||
for _ in 0..100 {
|
|
||||||
let args = OurDBSetArgs {
|
|
||||||
id: None,
|
|
||||||
data: &test_data,
|
|
||||||
};
|
|
||||||
let id = db.set(args).unwrap();
|
|
||||||
size_ids.push(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut i = 0;
|
|
||||||
b.iter(|| {
|
|
||||||
let id = size_ids[i % size_ids.len()];
|
|
||||||
black_box(db.get(id).unwrap());
|
|
||||||
i += 1;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
size_group.finish();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
criterion_group!(benches, criterion_benchmark);
|
criterion_group!(benches, criterion_benchmark);
|
||||||
|
@ -41,6 +41,7 @@ fn key_value_mode_example(base_path: &PathBuf) -> Result<(), ourdb::Error> {
|
|||||||
incremental_mode: false,
|
incremental_mode: false,
|
||||||
file_size: Some(1024 * 1024), // 1MB for testing
|
file_size: Some(1024 * 1024), // 1MB for testing
|
||||||
keysize: Some(2), // Small key size for demonstration
|
keysize: Some(2), // Small key size for demonstration
|
||||||
|
reset: None, // Don't reset existing database
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config)?;
|
let mut db = OurDB::new(config)?;
|
||||||
@ -94,6 +95,7 @@ fn incremental_mode_example(base_path: &PathBuf) -> Result<(), ourdb::Error> {
|
|||||||
incremental_mode: true,
|
incremental_mode: true,
|
||||||
file_size: Some(1024 * 1024), // 1MB for testing
|
file_size: Some(1024 * 1024), // 1MB for testing
|
||||||
keysize: Some(3), // 3-byte keys
|
keysize: Some(3), // 3-byte keys
|
||||||
|
reset: None, // Don't reset existing database
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config)?;
|
let mut db = OurDB::new(config)?;
|
||||||
@ -136,7 +138,8 @@ fn performance_benchmark(base_path: &PathBuf) -> Result<(), ourdb::Error> {
|
|||||||
path: db_path,
|
path: db_path,
|
||||||
incremental_mode: true,
|
incremental_mode: true,
|
||||||
file_size: Some(1024 * 1024), // 10MB
|
file_size: Some(1024 * 1024), // 10MB
|
||||||
keysize: Some(4), // 4-byte keys
|
keysize: Some(4), // 4-byte keys
|
||||||
|
reset: None, // Don't reset existing database
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config)?;
|
let mut db = OurDB::new(config)?;
|
||||||
|
@ -13,6 +13,7 @@ fn main() -> Result<(), ourdb::Error> {
|
|||||||
incremental_mode: true,
|
incremental_mode: true,
|
||||||
file_size: None, // Use default (500MB)
|
file_size: None, // Use default (500MB)
|
||||||
keysize: None, // Use default (4 bytes)
|
keysize: None, // Use default (4 bytes)
|
||||||
|
reset: None, // Don't reset existing database
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config)?;
|
let mut db = OurDB::new(config)?;
|
||||||
|
@ -1,22 +1,28 @@
|
|||||||
use ourdb::{OurDB, OurDBConfig, OurDBSetArgs};
|
use ourdb::{OurDB, OurDBConfig, OurDBSetArgs};
|
||||||
use std::time::{Duration, Instant};
|
use std::time::Instant;
|
||||||
|
|
||||||
fn main() -> Result<(), ourdb::Error> {
|
fn main() -> Result<(), ourdb::Error> {
|
||||||
// Parse command line arguments
|
// Parse command-line arguments
|
||||||
let args: Vec<String> = std::env::args().collect();
|
let args: Vec<String> = std::env::args().collect();
|
||||||
|
|
||||||
let (num_operations, record_size, incremental_mode, keysize) = parse_args(&args);
|
// Default values
|
||||||
|
let mut incremental_mode = true;
|
||||||
|
let mut keysize: u8 = 4;
|
||||||
|
let mut num_operations = 10000;
|
||||||
|
|
||||||
println!("OurDB Benchmark");
|
// Parse arguments
|
||||||
println!("===============");
|
for i in 1..args.len() {
|
||||||
println!("Operations: {}", num_operations);
|
if args[i] == "--no-incremental" {
|
||||||
println!("Record size: {} bytes", record_size);
|
incremental_mode = false;
|
||||||
println!("Mode: {}", if incremental_mode { "Incremental" } else { "Key-Value" });
|
} else if args[i] == "--keysize" && i + 1 < args.len() {
|
||||||
println!("Key size: {} bytes", keysize);
|
keysize = args[i + 1].parse().unwrap_or(4);
|
||||||
println!();
|
} else if args[i] == "--ops" && i + 1 < args.len() {
|
||||||
|
num_operations = args[i + 1].parse().unwrap_or(10000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create a temporary directory for the database
|
// Create a temporary directory for the database
|
||||||
let db_path = std::env::temp_dir().join(format!("ourdb_benchmark_{}", std::process::id()));
|
let db_path = std::env::temp_dir().join("ourdb_benchmark");
|
||||||
std::fs::create_dir_all(&db_path)?;
|
std::fs::create_dir_all(&db_path)?;
|
||||||
|
|
||||||
println!("Database path: {}", db_path.display());
|
println!("Database path: {}", db_path.display());
|
||||||
@ -27,24 +33,27 @@ fn main() -> Result<(), ourdb::Error> {
|
|||||||
incremental_mode,
|
incremental_mode,
|
||||||
file_size: Some(1024 * 1024),
|
file_size: Some(1024 * 1024),
|
||||||
keysize: Some(keysize),
|
keysize: Some(keysize),
|
||||||
|
reset: Some(true), // Reset the database for benchmarking
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut db = OurDB::new(config)?;
|
let mut db = OurDB::new(config)?;
|
||||||
|
|
||||||
// Prepare test data
|
// Prepare test data (100 bytes per record)
|
||||||
let test_data = vec![b'X'; record_size];
|
let test_data = vec![b'A'; 100];
|
||||||
|
|
||||||
// Benchmark write operations
|
// Benchmark write operations
|
||||||
println!("\nBenchmarking writes...");
|
println!("Benchmarking {} write operations (incremental: {}, keysize: {})...",
|
||||||
|
num_operations, incremental_mode, keysize);
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
||||||
let mut ids = Vec::with_capacity(num_operations);
|
let mut ids = Vec::with_capacity(num_operations);
|
||||||
for i in 0..num_operations {
|
for _ in 0..num_operations {
|
||||||
let id = if incremental_mode {
|
let id = if incremental_mode {
|
||||||
db.set(OurDBSetArgs { id: None, data: &test_data })?
|
db.set(OurDBSetArgs { id: None, data: &test_data })?
|
||||||
} else {
|
} else {
|
||||||
// In key-value mode, we provide explicit IDs
|
// In non-incremental mode, we need to provide IDs
|
||||||
let id = i as u32 + 1;
|
let id = ids.len() as u32 + 1;
|
||||||
db.set(OurDBSetArgs { id: Some(id), data: &test_data })?;
|
db.set(OurDBSetArgs { id: Some(id), data: &test_data })?;
|
||||||
id
|
id
|
||||||
};
|
};
|
||||||
@ -52,10 +61,15 @@ fn main() -> Result<(), ourdb::Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let write_duration = start.elapsed();
|
let write_duration = start.elapsed();
|
||||||
print_performance_stats("Write", num_operations, write_duration);
|
let writes_per_second = num_operations as f64 / write_duration.as_secs_f64();
|
||||||
|
|
||||||
|
println!("Write performance: {:.2} ops/sec ({:.2} ms/op)",
|
||||||
|
writes_per_second,
|
||||||
|
write_duration.as_secs_f64() * 1000.0 / num_operations as f64);
|
||||||
|
|
||||||
|
// Benchmark read operations
|
||||||
|
println!("Benchmarking {} read operations...", num_operations);
|
||||||
|
|
||||||
// Benchmark read operations (sequential)
|
|
||||||
println!("\nBenchmarking sequential reads...");
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
||||||
for &id in &ids {
|
for &id in &ids {
|
||||||
@ -63,123 +77,31 @@ fn main() -> Result<(), ourdb::Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let read_duration = start.elapsed();
|
let read_duration = start.elapsed();
|
||||||
print_performance_stats("Sequential read", num_operations, read_duration);
|
let reads_per_second = num_operations as f64 / read_duration.as_secs_f64();
|
||||||
|
|
||||||
// Benchmark random reads
|
println!("Read performance: {:.2} ops/sec ({:.2} ms/op)",
|
||||||
println!("\nBenchmarking random reads...");
|
reads_per_second,
|
||||||
let start = Instant::now();
|
read_duration.as_secs_f64() * 1000.0 / num_operations as f64);
|
||||||
|
|
||||||
use std::collections::HashSet;
|
|
||||||
let mut rng = rand::thread_rng();
|
|
||||||
let mut random_indices = HashSet::new();
|
|
||||||
|
|
||||||
// Select 20% of the IDs randomly for testing
|
|
||||||
let sample_size = num_operations / 5;
|
|
||||||
while random_indices.len() < sample_size {
|
|
||||||
let idx = rand::Rng::gen_range(&mut rng, 0..ids.len());
|
|
||||||
random_indices.insert(idx);
|
|
||||||
}
|
|
||||||
|
|
||||||
for idx in random_indices {
|
|
||||||
let _ = db.get(ids[idx])?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let random_read_duration = start.elapsed();
|
|
||||||
print_performance_stats("Random read", sample_size, random_read_duration);
|
|
||||||
|
|
||||||
// Benchmark update operations
|
// Benchmark update operations
|
||||||
println!("\nBenchmarking updates...");
|
println!("Benchmarking {} update operations...", num_operations);
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
||||||
for &id in &ids[0..num_operations/2] {
|
for &id in &ids {
|
||||||
db.set(OurDBSetArgs { id: Some(id), data: &test_data })?;
|
db.set(OurDBSetArgs { id: Some(id), data: &test_data })?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let update_duration = start.elapsed();
|
let update_duration = start.elapsed();
|
||||||
print_performance_stats("Update", num_operations/2, update_duration);
|
let updates_per_second = num_operations as f64 / update_duration.as_secs_f64();
|
||||||
|
|
||||||
// Benchmark history retrieval
|
println!("Update performance: {:.2} ops/sec ({:.2} ms/op)",
|
||||||
println!("\nBenchmarking history retrieval...");
|
updates_per_second,
|
||||||
let start = Instant::now();
|
update_duration.as_secs_f64() * 1000.0 / num_operations as f64);
|
||||||
|
|
||||||
for &id in &ids[0..num_operations/10] {
|
// Clean up
|
||||||
let _ = db.get_history(id, 2)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let history_duration = start.elapsed();
|
|
||||||
print_performance_stats("History retrieval", num_operations/10, history_duration);
|
|
||||||
|
|
||||||
// Benchmark delete operations
|
|
||||||
println!("\nBenchmarking deletes...");
|
|
||||||
let start = Instant::now();
|
|
||||||
|
|
||||||
for &id in &ids[0..num_operations/4] {
|
|
||||||
db.delete(id)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let delete_duration = start.elapsed();
|
|
||||||
print_performance_stats("Delete", num_operations/4, delete_duration);
|
|
||||||
|
|
||||||
// Close and clean up
|
|
||||||
db.close()?;
|
db.close()?;
|
||||||
std::fs::remove_dir_all(&db_path)?;
|
std::fs::remove_dir_all(&db_path)?;
|
||||||
|
|
||||||
println!("\nBenchmark completed successfully");
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_args(args: &[String]) -> (usize, usize, bool, u8) {
|
|
||||||
let mut num_operations = 100000;
|
|
||||||
let mut record_size = 100;
|
|
||||||
let mut incremental_mode = true;
|
|
||||||
let mut keysize = 4;
|
|
||||||
|
|
||||||
for i in 1..args.len() {
|
|
||||||
if args[i] == "--ops" && i + 1 < args.len() {
|
|
||||||
if let Ok(n) = args[i + 1].parse() {
|
|
||||||
num_operations = n;
|
|
||||||
}
|
|
||||||
} else if args[i] == "--size" && i + 1 < args.len() {
|
|
||||||
if let Ok(n) = args[i + 1].parse() {
|
|
||||||
record_size = n;
|
|
||||||
}
|
|
||||||
} else if args[i] == "--keyvalue" {
|
|
||||||
incremental_mode = false;
|
|
||||||
} else if args[i] == "--keysize" && i + 1 < args.len() {
|
|
||||||
if let Ok(n) = args[i + 1].parse() {
|
|
||||||
if [2, 3, 4, 6].contains(&n) {
|
|
||||||
keysize = n;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if args[i] == "--help" {
|
|
||||||
print_usage();
|
|
||||||
std::process::exit(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(num_operations, record_size, incremental_mode, keysize)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_usage() {
|
|
||||||
println!("OurDB Benchmark Tool");
|
|
||||||
println!("Usage: cargo run --example benchmark [OPTIONS]");
|
|
||||||
println!();
|
|
||||||
println!("Options:");
|
|
||||||
println!(" --ops N Number of operations to perform (default: 100000)");
|
|
||||||
println!(" --size N Size of each record in bytes (default: 100)");
|
|
||||||
println!(" --keyvalue Use key-value mode instead of incremental mode");
|
|
||||||
println!(" --keysize N Key size in bytes (2, 3, 4, or 6) (default: 4)");
|
|
||||||
println!(" --help Print this help message");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_performance_stats(operation: &str, count: usize, duration: Duration) {
|
|
||||||
let ops_per_second = count as f64 / duration.as_secs_f64();
|
|
||||||
let ms_per_op = duration.as_secs_f64() * 1000.0 / count as f64;
|
|
||||||
|
|
||||||
println!("{} performance:", operation);
|
|
||||||
println!(" Total time: {:.2} seconds", duration.as_secs_f64());
|
|
||||||
println!(" Operations: {}", count);
|
|
||||||
println!(" Speed: {:.2} ops/sec", ops_per_second);
|
|
||||||
println!(" Average: {:.3} ms/op", ms_per_op);
|
|
||||||
}
|
|
||||||
|
Loading…
Reference in New Issue
Block a user