first commit
This commit is contained in:
73
src/bin/runner/engine.rs
Normal file
73
src/bin/runner/engine.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
/// OSIRIS Engine Factory
|
||||
///
|
||||
/// Creates a Rhai engine configured with OSIRIS objects and methods.
|
||||
|
||||
use osiris::rhai_support::{register_note_api, register_event_api, OsirisInstance};
|
||||
use rhai::Engine;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
||||
/// Configuration for multiple OSIRIS instances
|
||||
pub struct OsirisConfig {
|
||||
pub instances: HashMap<String, (String, u16)>, // name -> (url, db_id)
|
||||
}
|
||||
|
||||
impl OsirisConfig {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
instances: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_instance(&mut self, name: impl ToString, url: impl ToString, db_id: u16) {
|
||||
self.instances.insert(name.to_string(), (url.to_string(), db_id));
|
||||
}
|
||||
|
||||
pub fn single(url: impl ToString, db_id: u16) -> Self {
|
||||
let mut config = Self::new();
|
||||
config.add_instance("default", url, db_id);
|
||||
config
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new Rhai engine with OSIRIS support
|
||||
pub fn create_osiris_engine(
|
||||
herodb_url: &str,
|
||||
db_id: u16,
|
||||
) -> Result<(Engine, rhai::Scope<'static>), Box<dyn std::error::Error>> {
|
||||
let config = OsirisConfig::single(herodb_url, db_id);
|
||||
create_osiris_engine_with_config(config)
|
||||
}
|
||||
|
||||
/// Create a new Rhai engine with multiple OSIRIS instances
|
||||
/// Returns (Engine, Scope) where Scope contains predefined instances
|
||||
pub fn create_osiris_engine_with_config(
|
||||
config: OsirisConfig,
|
||||
) -> Result<(Engine, rhai::Scope<'static>), Box<dyn std::error::Error>> {
|
||||
let mut engine = Engine::new();
|
||||
|
||||
// Register Note API
|
||||
register_note_api(&mut engine);
|
||||
|
||||
// Register Event API
|
||||
register_event_api(&mut engine);
|
||||
|
||||
// Register OsirisInstance type
|
||||
engine.build_type::<OsirisInstance>();
|
||||
|
||||
// Register osiris() constructor function for dynamic creation
|
||||
engine.register_fn("osiris", |name: &str, url: &str, db_id: rhai::INT| -> Result<OsirisInstance, Box<rhai::EvalAltResult>> {
|
||||
OsirisInstance::new(name, url, db_id as u16)
|
||||
.map_err(|e| format!("Failed to create OSIRIS instance: {}", e).into())
|
||||
});
|
||||
|
||||
// Create predefined instances and inject them as global constants in scope
|
||||
let mut scope = rhai::Scope::new();
|
||||
for (name, (url, db_id)) in config.instances {
|
||||
let instance = OsirisInstance::new(&name, &url, db_id)?;
|
||||
scope.push_constant(&name, instance);
|
||||
}
|
||||
|
||||
Ok((engine, scope))
|
||||
}
|
||||
135
src/bin/runner/main.rs
Normal file
135
src/bin/runner/main.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
/// OSIRIS Runner
|
||||
///
|
||||
/// A standalone runner for executing OSIRIS Rhai scripts.
|
||||
/// Can run in script mode (single execution) or daemon mode (continuous).
|
||||
///
|
||||
/// Usage:
|
||||
/// ```bash
|
||||
/// # Script mode
|
||||
/// cargo run --bin runner --features rhai-support -- runner1 --script "let note = note('test').title('Hi'); put_note(note);"
|
||||
///
|
||||
/// # Daemon mode (requires runner_rust infrastructure)
|
||||
/// cargo run --bin runner --features rhai-support -- runner1 --redis-url redis://localhost:6379
|
||||
/// ```
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
mod engine;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
use engine::create_osiris_engine;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about = "OSIRIS Rhai Script Runner", long_about = None)]
|
||||
struct Args {
|
||||
/// Runner ID
|
||||
runner_id: String,
|
||||
|
||||
/// HeroDB URL
|
||||
#[arg(short = 'r', long, default_value = "redis://localhost:6379")]
|
||||
redis_url: String,
|
||||
|
||||
/// HeroDB database ID
|
||||
#[arg(short = 'd', long, default_value_t = 1)]
|
||||
db_id: u16,
|
||||
|
||||
/// Script to execute in single-job mode (optional)
|
||||
#[arg(short, long)]
|
||||
script: Option<String>,
|
||||
|
||||
/// Script file to execute
|
||||
#[arg(short = 'f', long)]
|
||||
script_file: Option<String>,
|
||||
|
||||
/// Predefined instances in format: name:url:db_id (can be repeated)
|
||||
/// Example: --instance freezone:redis://localhost:6379:1
|
||||
#[arg(short = 'i', long = "instance")]
|
||||
instances: Vec<String>,
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "rhai-support"))]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
eprintln!("❌ Error: OSIRIS runner requires the 'rhai-support' feature");
|
||||
eprintln!("Run with: cargo run --bin runner --features rhai-support");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
println!("🚀 OSIRIS Runner");
|
||||
println!("Runner ID: {}", args.runner_id);
|
||||
println!("HeroDB: {} (DB {})", args.redis_url, args.db_id);
|
||||
|
||||
// Parse predefined instances
|
||||
let mut config = engine::OsirisConfig::new();
|
||||
|
||||
if args.instances.is_empty() {
|
||||
// No predefined instances, use default
|
||||
config.add_instance("default", &args.redis_url, args.db_id);
|
||||
} else {
|
||||
// Parse instance definitions (format: name:url:db_id)
|
||||
// We need to split carefully since URL contains colons
|
||||
for instance_def in &args.instances {
|
||||
// Find the first colon (name separator)
|
||||
let first_colon = instance_def.find(':')
|
||||
.ok_or_else(|| format!("Invalid instance format: '{}'. Expected: name:url:db_id", instance_def))?;
|
||||
|
||||
let name = &instance_def[..first_colon];
|
||||
let rest = &instance_def[first_colon + 1..];
|
||||
|
||||
// Find the last colon (db_id separator)
|
||||
let last_colon = rest.rfind(':')
|
||||
.ok_or_else(|| format!("Invalid instance format: '{}'. Expected: name:url:db_id", instance_def))?;
|
||||
|
||||
let url = &rest[..last_colon];
|
||||
let db_id_str = &rest[last_colon + 1..];
|
||||
|
||||
let db_id: u16 = db_id_str.parse()
|
||||
.map_err(|_| format!("Invalid db_id in instance '{}': {}", instance_def, db_id_str))?;
|
||||
|
||||
config.add_instance(name, url, db_id);
|
||||
println!(" Instance: {} → {} (DB {})", name, url, db_id);
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Determine script source
|
||||
let script_content = if let Some(script) = args.script {
|
||||
script
|
||||
} else if let Some(file_path) = args.script_file {
|
||||
std::fs::read_to_string(&file_path)
|
||||
.map_err(|e| format!("Failed to read script file '{}': {}", file_path, e))?
|
||||
} else {
|
||||
return Err("No script provided. Use --script or --script-file".into());
|
||||
};
|
||||
|
||||
println!("📝 Executing script...\n");
|
||||
println!("─────────────────────────────────────");
|
||||
|
||||
// Create engine with predefined instances
|
||||
let (engine, mut scope) = engine::create_osiris_engine_with_config(config)?;
|
||||
|
||||
match engine.eval_with_scope::<rhai::Dynamic>(&mut scope, &script_content) {
|
||||
Ok(result) => {
|
||||
println!("─────────────────────────────────────");
|
||||
println!("\n✅ Script completed successfully!");
|
||||
if !result.is_unit() {
|
||||
println!("Result: {}", result);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
println!("─────────────────────────────────────");
|
||||
println!("\n❌ Script execution failed!");
|
||||
println!("Error: {}", e);
|
||||
Err(Box::new(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
60
src/config/mod.rs
Normal file
60
src/config/mod.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
pub mod model;
|
||||
|
||||
pub use model::{Config, HeroDbConfig, NamespaceConfig};
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Load configuration from file
|
||||
pub fn load_config(path: Option<PathBuf>) -> Result<Config> {
|
||||
let config_path = path.unwrap_or_else(default_config_path);
|
||||
|
||||
if !config_path.exists() {
|
||||
return Err(Error::Config(format!(
|
||||
"Configuration file not found: {}",
|
||||
config_path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&config_path)?;
|
||||
let config: Config = toml::from_str(&content)
|
||||
.map_err(|e| Error::Config(format!("Failed to parse config: {}", e)))?;
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
/// Save configuration to file
|
||||
pub fn save_config(config: &Config, path: Option<PathBuf>) -> Result<()> {
|
||||
let config_path = path.unwrap_or_else(default_config_path);
|
||||
|
||||
// Create parent directory if it doesn't exist
|
||||
if let Some(parent) = config_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let content = toml::to_string_pretty(config)
|
||||
.map_err(|e| Error::Config(format!("Failed to serialize config: {}", e)))?;
|
||||
|
||||
fs::write(&config_path, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the default configuration file path
|
||||
pub fn default_config_path() -> PathBuf {
|
||||
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
|
||||
PathBuf::from(home)
|
||||
.join(".config")
|
||||
.join("osiris")
|
||||
.join("config.toml")
|
||||
}
|
||||
|
||||
/// Create a default configuration
|
||||
pub fn create_default_config(herodb_url: String) -> Config {
|
||||
Config {
|
||||
herodb: HeroDbConfig { url: herodb_url },
|
||||
namespaces: HashMap::new(),
|
||||
}
|
||||
}
|
||||
55
src/config/model.rs
Normal file
55
src/config/model.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// OSIRIS configuration
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
/// HeroDB connection configuration
|
||||
pub herodb: HeroDbConfig,
|
||||
|
||||
/// Namespace configurations
|
||||
#[serde(default)]
|
||||
pub namespaces: HashMap<String, NamespaceConfig>,
|
||||
}
|
||||
|
||||
/// HeroDB connection configuration
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct HeroDbConfig {
|
||||
/// HeroDB URL (e.g., "redis://localhost:6379")
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
/// Namespace configuration
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NamespaceConfig {
|
||||
/// HeroDB database ID for this namespace
|
||||
pub db_id: u16,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Get namespace configuration by name
|
||||
pub fn get_namespace(&self, name: &str) -> Option<&NamespaceConfig> {
|
||||
self.namespaces.get(name)
|
||||
}
|
||||
|
||||
/// Add or update a namespace
|
||||
pub fn set_namespace(&mut self, name: String, config: NamespaceConfig) {
|
||||
self.namespaces.insert(name, config);
|
||||
}
|
||||
|
||||
/// Remove a namespace
|
||||
pub fn remove_namespace(&mut self, name: &str) -> Option<NamespaceConfig> {
|
||||
self.namespaces.remove(name)
|
||||
}
|
||||
|
||||
/// Get the next available database ID
|
||||
pub fn next_db_id(&self) -> u16 {
|
||||
let max_id = self
|
||||
.namespaces
|
||||
.values()
|
||||
.map(|ns| ns.db_id)
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
max_id + 1
|
||||
}
|
||||
}
|
||||
46
src/error.rs
Normal file
46
src/error.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Redis(redis::RedisError),
|
||||
Serialization(serde_json::Error),
|
||||
NotFound(String),
|
||||
InvalidInput(String),
|
||||
Config(String),
|
||||
Io(std::io::Error),
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Error::Redis(e) => write!(f, "Redis error: {}", e),
|
||||
Error::Serialization(e) => write!(f, "Serialization error: {}", e),
|
||||
Error::NotFound(msg) => write!(f, "Not found: {}", msg),
|
||||
Error::InvalidInput(msg) => write!(f, "Invalid input: {}", msg),
|
||||
Error::Config(msg) => write!(f, "Configuration error: {}", msg),
|
||||
Error::Io(e) => write!(f, "IO error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl From<redis::RedisError> for Error {
|
||||
fn from(e: redis::RedisError) -> Self {
|
||||
Error::Redis(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for Error {
|
||||
fn from(e: serde_json::Error) -> Self {
|
||||
Error::Serialization(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
Error::Io(e)
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
139
src/index/field_index.rs
Normal file
139
src/index/field_index.rs
Normal file
@@ -0,0 +1,139 @@
|
||||
use crate::error::Result;
|
||||
use crate::store::{HeroDbClient, OsirisObject};
|
||||
|
||||
/// Field indexing for fast filtering by tags and metadata
|
||||
pub struct FieldIndex {
|
||||
client: HeroDbClient,
|
||||
}
|
||||
|
||||
impl FieldIndex {
|
||||
/// Create a new field index
|
||||
pub fn new(client: HeroDbClient) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
/// Index an object (add to field indexes)
|
||||
pub async fn index_object(&self, obj: &OsirisObject) -> Result<()> {
|
||||
// Index tags
|
||||
for (key, value) in &obj.meta.tags {
|
||||
let field_key = format!("field:tag:{}={}", key, value);
|
||||
self.client.sadd(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Index MIME type if present
|
||||
if let Some(mime) = &obj.meta.mime {
|
||||
let field_key = format!("field:mime:{}", mime);
|
||||
self.client.sadd(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Index title if present (for exact match)
|
||||
if let Some(title) = &obj.meta.title {
|
||||
let field_key = format!("field:title:{}", title);
|
||||
self.client.sadd(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Add to scan index for text search
|
||||
self.client.sadd("scan:index", &obj.id).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove an object from indexes
|
||||
pub async fn deindex_object(&self, obj: &OsirisObject) -> Result<()> {
|
||||
// Remove from tag indexes
|
||||
for (key, value) in &obj.meta.tags {
|
||||
let field_key = format!("field:tag:{}={}", key, value);
|
||||
self.client.srem(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Remove from MIME index
|
||||
if let Some(mime) = &obj.meta.mime {
|
||||
let field_key = format!("field:mime:{}", mime);
|
||||
self.client.srem(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Remove from title index
|
||||
if let Some(title) = &obj.meta.title {
|
||||
let field_key = format!("field:title:{}", title);
|
||||
self.client.srem(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Remove from scan index
|
||||
self.client.srem("scan:index", &obj.id).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update object indexes (remove old, add new)
|
||||
pub async fn reindex_object(&self, old_obj: &OsirisObject, new_obj: &OsirisObject) -> Result<()> {
|
||||
self.deindex_object(old_obj).await?;
|
||||
self.index_object(new_obj).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all IDs matching a tag filter
|
||||
pub async fn get_ids_by_tag(&self, key: &str, value: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("field:tag:{}={}", key, value);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs matching a MIME type
|
||||
pub async fn get_ids_by_mime(&self, mime: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("field:mime:{}", mime);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs matching a title
|
||||
pub async fn get_ids_by_title(&self, title: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("field:title:{}", title);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs in the scan index
|
||||
pub async fn get_all_ids(&self) -> Result<Vec<String>> {
|
||||
self.client.smembers("scan:index").await
|
||||
}
|
||||
|
||||
/// Get intersection of multiple field filters
|
||||
pub async fn get_ids_by_filters(&self, filters: &[(String, String)]) -> Result<Vec<String>> {
|
||||
if filters.is_empty() {
|
||||
return self.get_all_ids().await;
|
||||
}
|
||||
|
||||
let keys: Vec<String> = filters
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
if k == "mime" {
|
||||
format!("field:mime:{}", v)
|
||||
} else if k == "title" {
|
||||
format!("field:title:{}", v)
|
||||
} else {
|
||||
format!("field:tag:{}={}", k, v)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.client.sinter(&keys).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_index_object() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let index = FieldIndex::new(client);
|
||||
|
||||
let mut obj = OsirisObject::new("test".to_string(), Some("Hello".to_string()));
|
||||
obj.set_tag("topic".to_string(), "rust".to_string());
|
||||
obj.set_mime(Some("text/plain".to_string()));
|
||||
|
||||
index.index_object(&obj).await.unwrap();
|
||||
|
||||
let ids = index.get_ids_by_tag("topic", "rust").await.unwrap();
|
||||
assert!(ids.contains(&obj.id));
|
||||
}
|
||||
}
|
||||
3
src/index/mod.rs
Normal file
3
src/index/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod field_index;
|
||||
|
||||
pub use field_index::FieldIndex;
|
||||
408
src/interfaces/cli.rs
Normal file
408
src/interfaces/cli.rs
Normal file
@@ -0,0 +1,408 @@
|
||||
use crate::config::{self, NamespaceConfig};
|
||||
use crate::error::{Error, Result};
|
||||
use crate::index::FieldIndex;
|
||||
use crate::retrieve::{RetrievalQuery, SearchEngine};
|
||||
use crate::store::{HeroDbClient, OsirisObject};
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs;
|
||||
use std::io::{self, Read};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "osiris")]
|
||||
#[command(about = "OSIRIS - Object Storage, Indexing & Retrieval Intelligent System", long_about = None)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
pub command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
pub enum Commands {
|
||||
/// Initialize OSIRIS configuration
|
||||
Init {
|
||||
/// HeroDB URL
|
||||
#[arg(long, default_value = "redis://localhost:6379")]
|
||||
herodb: String,
|
||||
},
|
||||
|
||||
/// Namespace management
|
||||
Ns {
|
||||
#[command(subcommand)]
|
||||
command: NsCommands,
|
||||
},
|
||||
|
||||
/// Put an object
|
||||
Put {
|
||||
/// Object path (namespace/name)
|
||||
path: String,
|
||||
|
||||
/// File to upload (use '-' for stdin)
|
||||
file: String,
|
||||
|
||||
/// Tags (key=value pairs, comma-separated)
|
||||
#[arg(long)]
|
||||
tags: Option<String>,
|
||||
|
||||
/// MIME type
|
||||
#[arg(long)]
|
||||
mime: Option<String>,
|
||||
|
||||
/// Title
|
||||
#[arg(long)]
|
||||
title: Option<String>,
|
||||
},
|
||||
|
||||
/// Get an object
|
||||
Get {
|
||||
/// Object path (namespace/name or namespace/id)
|
||||
path: String,
|
||||
|
||||
/// Output file (default: stdout)
|
||||
#[arg(long)]
|
||||
output: Option<PathBuf>,
|
||||
|
||||
/// Output raw content only (no metadata)
|
||||
#[arg(long)]
|
||||
raw: bool,
|
||||
},
|
||||
|
||||
/// Delete an object
|
||||
Del {
|
||||
/// Object path (namespace/name or namespace/id)
|
||||
path: String,
|
||||
},
|
||||
|
||||
/// Search/find objects
|
||||
Find {
|
||||
/// Text query (optional)
|
||||
query: Option<String>,
|
||||
|
||||
/// Namespace to search
|
||||
#[arg(long)]
|
||||
ns: String,
|
||||
|
||||
/// Filters (key=value pairs, comma-separated)
|
||||
#[arg(long)]
|
||||
filter: Option<String>,
|
||||
|
||||
/// Maximum number of results
|
||||
#[arg(long, default_value = "10")]
|
||||
topk: usize,
|
||||
|
||||
/// Output as JSON
|
||||
#[arg(long)]
|
||||
json: bool,
|
||||
},
|
||||
|
||||
/// Show statistics
|
||||
Stats {
|
||||
/// Namespace (optional, shows all if not specified)
|
||||
#[arg(long)]
|
||||
ns: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug, Clone)]
|
||||
pub enum NsCommands {
|
||||
/// Create a new namespace
|
||||
Create {
|
||||
/// Namespace name
|
||||
name: String,
|
||||
},
|
||||
|
||||
/// List all namespaces
|
||||
List,
|
||||
|
||||
/// Delete a namespace
|
||||
Delete {
|
||||
/// Namespace name
|
||||
name: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
pub async fn run(self) -> Result<()> {
|
||||
match self.command {
|
||||
Commands::Init { herodb } => {
|
||||
let config = config::create_default_config(herodb);
|
||||
config::save_config(&config, None)?;
|
||||
println!("✓ OSIRIS initialized");
|
||||
println!(" Config: {}", config::default_config_path().display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Commands::Ns { ref command } => self.handle_ns_command(command.clone()).await,
|
||||
Commands::Put { ref path, ref file, ref tags, ref mime, ref title } => {
|
||||
self.handle_put(path.clone(), file.clone(), tags.clone(), mime.clone(), title.clone()).await
|
||||
}
|
||||
Commands::Get { ref path, ref output, raw } => {
|
||||
self.handle_get(path.clone(), output.clone(), raw).await
|
||||
}
|
||||
Commands::Del { ref path } => self.handle_del(path.clone()).await,
|
||||
Commands::Find { ref query, ref ns, ref filter, topk, json } => {
|
||||
self.handle_find(query.clone(), ns.clone(), filter.clone(), topk, json).await
|
||||
}
|
||||
Commands::Stats { ref ns } => self.handle_stats(ns.clone()).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_ns_command(&self, command: NsCommands) -> Result<()> {
|
||||
let mut config = config::load_config(None)?;
|
||||
|
||||
match command {
|
||||
NsCommands::Create { name } => {
|
||||
if config.get_namespace(&name).is_some() {
|
||||
return Err(Error::InvalidInput(format!(
|
||||
"Namespace '{}' already exists",
|
||||
name
|
||||
)));
|
||||
}
|
||||
|
||||
let db_id = config.next_db_id();
|
||||
let ns_config = NamespaceConfig { db_id };
|
||||
|
||||
config.set_namespace(name.clone(), ns_config);
|
||||
config::save_config(&config, None)?;
|
||||
|
||||
println!("✓ Created namespace '{}' (DB {})", name, db_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
NsCommands::List => {
|
||||
if config.namespaces.is_empty() {
|
||||
println!("No namespaces configured");
|
||||
} else {
|
||||
println!("Namespaces:");
|
||||
for (name, ns_config) in &config.namespaces {
|
||||
println!(" {} → DB {}", name, ns_config.db_id);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
NsCommands::Delete { name } => {
|
||||
if config.remove_namespace(&name).is_none() {
|
||||
return Err(Error::NotFound(format!("Namespace '{}'", name)));
|
||||
}
|
||||
|
||||
config::save_config(&config, None)?;
|
||||
println!("✓ Deleted namespace '{}'", name);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_put(
|
||||
&self,
|
||||
path: String,
|
||||
file: String,
|
||||
tags: Option<String>,
|
||||
mime: Option<String>,
|
||||
title: Option<String>,
|
||||
) -> Result<()> {
|
||||
let (ns, name) = parse_path(&path)?;
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
// Read content
|
||||
let content = if file == "-" {
|
||||
let mut buffer = String::new();
|
||||
io::stdin().read_to_string(&mut buffer)?;
|
||||
buffer
|
||||
} else {
|
||||
fs::read_to_string(&file)?
|
||||
};
|
||||
|
||||
// Create object
|
||||
let mut obj = OsirisObject::with_id(name.clone(), ns.clone(), Some(content));
|
||||
|
||||
if let Some(title) = title {
|
||||
obj.set_title(Some(title));
|
||||
}
|
||||
|
||||
if let Some(mime) = mime {
|
||||
obj.set_mime(Some(mime));
|
||||
}
|
||||
|
||||
// Parse tags
|
||||
if let Some(tags_str) = tags {
|
||||
let tag_map = parse_tags(&tags_str)?;
|
||||
for (key, value) in tag_map {
|
||||
obj.set_tag(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Store object
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let index = FieldIndex::new(client.clone());
|
||||
|
||||
client.put_object(&obj).await?;
|
||||
index.index_object(&obj).await?;
|
||||
|
||||
println!("✓ Stored {}/{}", ns, obj.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_get(&self, path: String, output: Option<PathBuf>, raw: bool) -> Result<()> {
|
||||
let (ns, id) = parse_path(&path)?;
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let obj = client.get_object(&id).await?;
|
||||
|
||||
if raw {
|
||||
// Output raw content only
|
||||
let content = obj.text.unwrap_or_default();
|
||||
if let Some(output_path) = output {
|
||||
fs::write(output_path, content)?;
|
||||
} else {
|
||||
print!("{}", content);
|
||||
}
|
||||
} else {
|
||||
// Output full object as JSON
|
||||
let json = serde_json::to_string_pretty(&obj)?;
|
||||
if let Some(output_path) = output {
|
||||
fs::write(output_path, json)?;
|
||||
} else {
|
||||
println!("{}", json);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_del(&self, path: String) -> Result<()> {
|
||||
let (ns, id) = parse_path(&path)?;
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let index = FieldIndex::new(client.clone());
|
||||
|
||||
// Get object first to deindex it
|
||||
let obj = client.get_object(&id).await?;
|
||||
index.deindex_object(&obj).await?;
|
||||
|
||||
let deleted = client.delete_object(&id).await?;
|
||||
|
||||
if deleted {
|
||||
println!("✓ Deleted {}/{}", ns, id);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::NotFound(format!("{}/{}", ns, id)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_find(
|
||||
&self,
|
||||
query: Option<String>,
|
||||
ns: String,
|
||||
filter: Option<String>,
|
||||
topk: usize,
|
||||
json: bool,
|
||||
) -> Result<()> {
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let engine = SearchEngine::new(client.clone());
|
||||
|
||||
// Build query
|
||||
let mut retrieval_query = RetrievalQuery::new(ns.clone()).with_top_k(topk);
|
||||
|
||||
if let Some(text) = query {
|
||||
retrieval_query = retrieval_query.with_text(text);
|
||||
}
|
||||
|
||||
if let Some(filter_str) = filter {
|
||||
let filters = parse_tags(&filter_str)?;
|
||||
for (key, value) in filters {
|
||||
retrieval_query = retrieval_query.with_filter(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Execute search
|
||||
let results = engine.search(&retrieval_query).await?;
|
||||
|
||||
if json {
|
||||
println!("{}", serde_json::to_string_pretty(&results)?);
|
||||
} else {
|
||||
if results.is_empty() {
|
||||
println!("No results found");
|
||||
} else {
|
||||
println!("Found {} result(s):\n", results.len());
|
||||
for (i, result) in results.iter().enumerate() {
|
||||
println!("{}. {} (score: {:.2})", i + 1, result.id, result.score);
|
||||
if let Some(snippet) = &result.snippet {
|
||||
println!(" {}", snippet);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_stats(&self, ns: Option<String>) -> Result<()> {
|
||||
let config = config::load_config(None)?;
|
||||
|
||||
if let Some(ns_name) = ns {
|
||||
let ns_config = config.get_namespace(&ns_name)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns_name)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let size = client.dbsize().await?;
|
||||
|
||||
println!("Namespace: {}", ns_name);
|
||||
println!(" DB ID: {}", ns_config.db_id);
|
||||
println!(" Keys: {}", size);
|
||||
} else {
|
||||
println!("OSIRIS Statistics\n");
|
||||
println!("Namespaces: {}", config.namespaces.len());
|
||||
for (name, ns_config) in &config.namespaces {
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let size = client.dbsize().await?;
|
||||
println!(" {} (DB {}) → {} keys", name, ns_config.db_id, size);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a path into namespace and name/id
|
||||
fn parse_path(path: &str) -> Result<(String, String)> {
|
||||
let parts: Vec<&str> = path.splitn(2, '/').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(Error::InvalidInput(format!(
|
||||
"Invalid path format. Expected 'namespace/name', got '{}'",
|
||||
path
|
||||
)));
|
||||
}
|
||||
Ok((parts[0].to_string(), parts[1].to_string()))
|
||||
}
|
||||
|
||||
/// Parse tags from comma-separated key=value pairs
|
||||
fn parse_tags(tags_str: &str) -> Result<BTreeMap<String, String>> {
|
||||
let mut tags = BTreeMap::new();
|
||||
|
||||
for pair in tags_str.split(',') {
|
||||
let parts: Vec<&str> = pair.trim().splitn(2, '=').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(Error::InvalidInput(format!(
|
||||
"Invalid tag format. Expected 'key=value', got '{}'",
|
||||
pair
|
||||
)));
|
||||
}
|
||||
tags.insert(parts[0].to_string(), parts[1].to_string());
|
||||
}
|
||||
|
||||
Ok(tags)
|
||||
}
|
||||
3
src/interfaces/mod.rs
Normal file
3
src/interfaces/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod cli;
|
||||
|
||||
pub use cli::Cli;
|
||||
23
src/lib.rs
Normal file
23
src/lib.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
// Allow the crate to reference itself as ::osiris for the derive macro
|
||||
extern crate self as osiris;
|
||||
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod index;
|
||||
pub mod interfaces;
|
||||
pub mod objects;
|
||||
pub mod retrieve;
|
||||
pub mod store;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai_support;
|
||||
|
||||
pub use error::{Error, Result};
|
||||
pub use store::{BaseData, IndexKey, Object, Storable};
|
||||
|
||||
// Re-export the derive macro
|
||||
pub use osiris_derive::Object as DeriveObject;
|
||||
|
||||
// OsirisInstance is the main type for Rhai integration
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub use rhai_support::OsirisInstance;
|
||||
22
src/main.rs
Normal file
22
src/main.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use clap::Parser;
|
||||
use osiris::interfaces::Cli;
|
||||
use tracing_subscriber::{fmt, EnvFilter};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// Initialize tracing
|
||||
fmt()
|
||||
.with_env_filter(
|
||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")),
|
||||
)
|
||||
.init();
|
||||
|
||||
// Parse CLI arguments
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Run the command
|
||||
if let Err(e) = cli.run().await {
|
||||
eprintln!("Error: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
139
src/objects/event/mod.rs
Normal file
139
src/objects/event/mod.rs
Normal file
@@ -0,0 +1,139 @@
|
||||
use crate::store::BaseData;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai;
|
||||
|
||||
/// Event status
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
|
||||
pub enum EventStatus {
|
||||
#[default]
|
||||
Draft,
|
||||
Published,
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
/// A calendar event object
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, crate::DeriveObject)]
|
||||
pub struct Event {
|
||||
/// Base data
|
||||
pub base_data: BaseData,
|
||||
|
||||
/// Title of the event
|
||||
#[index]
|
||||
pub title: String,
|
||||
|
||||
/// Optional description
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Start time
|
||||
#[index]
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub start_time: OffsetDateTime,
|
||||
|
||||
/// End time
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub end_time: OffsetDateTime,
|
||||
|
||||
/// Optional location
|
||||
#[index]
|
||||
pub location: Option<String>,
|
||||
|
||||
/// Event status
|
||||
#[index]
|
||||
pub status: EventStatus,
|
||||
|
||||
/// Whether this is an all-day event
|
||||
pub all_day: bool,
|
||||
|
||||
/// Optional category
|
||||
#[index]
|
||||
pub category: Option<String>,
|
||||
}
|
||||
|
||||
impl Event {
|
||||
/// Create a new event
|
||||
pub fn new(ns: String, title: impl ToString) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
base_data: BaseData::new(ns),
|
||||
title: title.to_string(),
|
||||
description: None,
|
||||
start_time: now,
|
||||
end_time: now,
|
||||
location: None,
|
||||
status: EventStatus::default(),
|
||||
all_day: false,
|
||||
category: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an event with specific ID
|
||||
pub fn with_id(id: String, ns: String, title: impl ToString) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
base_data: BaseData::with_id(id, ns),
|
||||
title: title.to_string(),
|
||||
description: None,
|
||||
start_time: now,
|
||||
end_time: now,
|
||||
location: None,
|
||||
status: EventStatus::default(),
|
||||
all_day: false,
|
||||
category: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the description
|
||||
pub fn set_description(mut self, description: impl ToString) -> Self {
|
||||
self.description = Some(description.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the start time
|
||||
pub fn set_start_time(mut self, start_time: OffsetDateTime) -> Self {
|
||||
self.start_time = start_time;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the end time
|
||||
pub fn set_end_time(mut self, end_time: OffsetDateTime) -> Self {
|
||||
self.end_time = end_time;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the location
|
||||
pub fn set_location(mut self, location: impl ToString) -> Self {
|
||||
self.location = Some(location.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the status
|
||||
pub fn set_status(mut self, status: EventStatus) -> Self {
|
||||
self.status = status;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set as all-day event
|
||||
pub fn set_all_day(mut self, all_day: bool) -> Self {
|
||||
self.all_day = all_day;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the category
|
||||
pub fn set_category(mut self, category: impl ToString) -> Self {
|
||||
self.category = Some(category.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// Object trait implementation is auto-generated by #[derive(DeriveObject)]
|
||||
// The derive macro generates: object_type(), base_data(), base_data_mut(), index_keys(), indexed_fields()
|
||||
64
src/objects/event/rhai.rs
Normal file
64
src/objects/event/rhai.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use crate::objects::Event;
|
||||
use rhai::{CustomType, Engine, TypeBuilder};
|
||||
|
||||
impl CustomType for Event {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("Event")
|
||||
.with_fn("new", |ns: String, title: String| Event::new(ns, title))
|
||||
.with_fn("set_description", |event: &mut Event, desc: String| {
|
||||
event.description = Some(desc);
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_location", |event: &mut Event, location: String| {
|
||||
event.location = Some(location);
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_category", |event: &mut Event, category: String| {
|
||||
event.category = Some(category);
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_all_day", |event: &mut Event, all_day: bool| {
|
||||
event.all_day = all_day;
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("get_id", |event: &mut Event| event.base_data.id.clone())
|
||||
.with_fn("get_title", |event: &mut Event| event.title.clone())
|
||||
.with_fn("to_json", |event: &mut Event| {
|
||||
serde_json::to_string_pretty(event).unwrap_or_default()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Register Event API in Rhai engine
|
||||
pub fn register_event_api(engine: &mut Engine) {
|
||||
engine.build_type::<Event>();
|
||||
|
||||
// Register builder-style constructor
|
||||
engine.register_fn("event", |ns: String, title: String| Event::new(ns, title));
|
||||
|
||||
// Register chainable methods that return Self
|
||||
engine.register_fn("description", |mut event: Event, desc: String| {
|
||||
event.description = Some(desc);
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
|
||||
engine.register_fn("location", |mut event: Event, location: String| {
|
||||
event.location = Some(location);
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
|
||||
engine.register_fn("category", |mut event: Event, category: String| {
|
||||
event.category = Some(category);
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
|
||||
engine.register_fn("all_day", |mut event: Event, all_day: bool| {
|
||||
event.all_day = all_day;
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
}
|
||||
5
src/objects/mod.rs
Normal file
5
src/objects/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod note;
|
||||
pub mod event;
|
||||
|
||||
pub use note::Note;
|
||||
pub use event::Event;
|
||||
78
src/objects/note/mod.rs
Normal file
78
src/objects/note/mod.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use crate::store::BaseData;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai;
|
||||
|
||||
/// A simple note object
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, crate::DeriveObject)]
|
||||
pub struct Note {
|
||||
/// Base data
|
||||
pub base_data: BaseData,
|
||||
|
||||
/// Title of the note
|
||||
#[index]
|
||||
pub title: Option<String>,
|
||||
|
||||
/// Content of the note (searchable but not indexed)
|
||||
pub content: Option<String>,
|
||||
|
||||
/// Tags for categorization
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
impl Note {
|
||||
/// Create a new note
|
||||
pub fn new(ns: String) -> Self {
|
||||
Self {
|
||||
base_data: BaseData::new(ns),
|
||||
title: None,
|
||||
content: None,
|
||||
tags: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a note with specific ID
|
||||
pub fn with_id(id: String, ns: String) -> Self {
|
||||
Self {
|
||||
base_data: BaseData::with_id(id, ns),
|
||||
title: None,
|
||||
content: None,
|
||||
tags: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the title
|
||||
pub fn set_title(mut self, title: impl ToString) -> Self {
|
||||
self.title = Some(title.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the content
|
||||
pub fn set_content(mut self, content: impl ToString) -> Self {
|
||||
let content_str = content.to_string();
|
||||
self.base_data.set_size(Some(content_str.len() as u64));
|
||||
self.content = Some(content_str);
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a tag
|
||||
pub fn add_tag(mut self, key: impl ToString, value: impl ToString) -> Self {
|
||||
self.tags.insert(key.to_string(), value.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set MIME type
|
||||
pub fn set_mime(mut self, mime: impl ToString) -> Self {
|
||||
self.base_data.set_mime(Some(mime.to_string()));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// Object trait implementation is auto-generated by #[derive(DeriveObject)]
|
||||
// The derive macro generates: object_type(), base_data(), base_data_mut(), index_keys(), indexed_fields()
|
||||
67
src/objects/note/rhai.rs
Normal file
67
src/objects/note/rhai.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use crate::objects::Note;
|
||||
use rhai::{CustomType, Engine, TypeBuilder};
|
||||
|
||||
impl CustomType for Note {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("Note")
|
||||
.with_fn("new", |ns: String| Note::new(ns))
|
||||
.with_fn("set_title", |note: &mut Note, title: String| {
|
||||
note.title = Some(title);
|
||||
note.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_content", |note: &mut Note, content: String| {
|
||||
let size = content.len() as u64;
|
||||
note.content = Some(content);
|
||||
note.base_data.set_size(Some(size));
|
||||
note.base_data.update_modified();
|
||||
})
|
||||
.with_fn("add_tag", |note: &mut Note, key: String, value: String| {
|
||||
note.tags.insert(key, value);
|
||||
note.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_mime", |note: &mut Note, mime: String| {
|
||||
note.base_data.set_mime(Some(mime));
|
||||
})
|
||||
.with_fn("get_id", |note: &mut Note| note.base_data.id.clone())
|
||||
.with_fn("get_title", |note: &mut Note| note.title.clone().unwrap_or_default())
|
||||
.with_fn("get_content", |note: &mut Note| note.content.clone().unwrap_or_default())
|
||||
.with_fn("to_json", |note: &mut Note| {
|
||||
serde_json::to_string_pretty(note).unwrap_or_default()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Register Note API in Rhai engine
|
||||
pub fn register_note_api(engine: &mut Engine) {
|
||||
engine.build_type::<Note>();
|
||||
|
||||
// Register builder-style constructor
|
||||
engine.register_fn("note", |ns: String| Note::new(ns));
|
||||
|
||||
// Register chainable methods that return Self
|
||||
engine.register_fn("title", |mut note: Note, title: String| {
|
||||
note.title = Some(title);
|
||||
note.base_data.update_modified();
|
||||
note
|
||||
});
|
||||
|
||||
engine.register_fn("content", |mut note: Note, content: String| {
|
||||
let size = content.len() as u64;
|
||||
note.content = Some(content);
|
||||
note.base_data.set_size(Some(size));
|
||||
note.base_data.update_modified();
|
||||
note
|
||||
});
|
||||
|
||||
engine.register_fn("tag", |mut note: Note, key: String, value: String| {
|
||||
note.tags.insert(key, value);
|
||||
note.base_data.update_modified();
|
||||
note
|
||||
});
|
||||
|
||||
engine.register_fn("mime", |mut note: Note, mime: String| {
|
||||
note.base_data.set_mime(Some(mime));
|
||||
note
|
||||
});
|
||||
}
|
||||
5
src/retrieve/mod.rs
Normal file
5
src/retrieve/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod query;
|
||||
pub mod search;
|
||||
|
||||
pub use query::RetrievalQuery;
|
||||
pub use search::SearchEngine;
|
||||
74
src/retrieve/query.rs
Normal file
74
src/retrieve/query.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
/// Retrieval query structure
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RetrievalQuery {
|
||||
/// Optional text query for keyword substring matching
|
||||
pub text: Option<String>,
|
||||
|
||||
/// Namespace to search in
|
||||
pub ns: String,
|
||||
|
||||
/// Field filters (key=value pairs)
|
||||
pub filters: Vec<(String, String)>,
|
||||
|
||||
/// Maximum number of results to return
|
||||
pub top_k: usize,
|
||||
}
|
||||
|
||||
impl RetrievalQuery {
|
||||
/// Create a new retrieval query
|
||||
pub fn new(ns: String) -> Self {
|
||||
Self {
|
||||
text: None,
|
||||
ns,
|
||||
filters: Vec::new(),
|
||||
top_k: 10,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the text query
|
||||
pub fn with_text(mut self, text: String) -> Self {
|
||||
self.text = Some(text);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a filter
|
||||
pub fn with_filter(mut self, key: String, value: String) -> Self {
|
||||
self.filters.push((key, value));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the maximum number of results
|
||||
pub fn with_top_k(mut self, top_k: usize) -> Self {
|
||||
self.top_k = top_k;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Search result
|
||||
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub struct SearchResult {
|
||||
/// Object ID
|
||||
pub id: String,
|
||||
|
||||
/// Match score (0.0 to 1.0)
|
||||
pub score: f32,
|
||||
|
||||
/// Matched text snippet (if applicable)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub snippet: Option<String>,
|
||||
}
|
||||
|
||||
impl SearchResult {
|
||||
pub fn new(id: String, score: f32) -> Self {
|
||||
Self {
|
||||
id,
|
||||
score,
|
||||
snippet: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_snippet(mut self, snippet: String) -> Self {
|
||||
self.snippet = Some(snippet);
|
||||
self
|
||||
}
|
||||
}
|
||||
150
src/retrieve/search.rs
Normal file
150
src/retrieve/search.rs
Normal file
@@ -0,0 +1,150 @@
|
||||
use crate::error::Result;
|
||||
use crate::index::FieldIndex;
|
||||
use crate::retrieve::query::{RetrievalQuery, SearchResult};
|
||||
use crate::store::{HeroDbClient, OsirisObject};
|
||||
|
||||
/// Search engine for OSIRIS
|
||||
pub struct SearchEngine {
|
||||
client: HeroDbClient,
|
||||
index: FieldIndex,
|
||||
}
|
||||
|
||||
impl SearchEngine {
|
||||
/// Create a new search engine
|
||||
pub fn new(client: HeroDbClient) -> Self {
|
||||
let index = FieldIndex::new(client.clone());
|
||||
Self { client, index }
|
||||
}
|
||||
|
||||
/// Execute a search query
|
||||
pub async fn search(&self, query: &RetrievalQuery) -> Result<Vec<SearchResult>> {
|
||||
// Step 1: Get candidate IDs from field filters
|
||||
let candidate_ids = if query.filters.is_empty() {
|
||||
self.index.get_all_ids().await?
|
||||
} else {
|
||||
self.index.get_ids_by_filters(&query.filters).await?
|
||||
};
|
||||
|
||||
// Step 2: If text query is provided, filter by substring match
|
||||
let mut results = Vec::new();
|
||||
|
||||
if let Some(text_query) = &query.text {
|
||||
let text_query_lower = text_query.to_lowercase();
|
||||
|
||||
for id in candidate_ids {
|
||||
// Fetch the object
|
||||
if let Ok(obj) = self.client.get_object(&id).await {
|
||||
// Check if text matches
|
||||
let score = self.compute_text_score(&obj, &text_query_lower);
|
||||
|
||||
if score > 0.0 {
|
||||
let snippet = self.extract_snippet(&obj, &text_query_lower);
|
||||
results.push(SearchResult::new(id, score).with_snippet(snippet));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No text query, return all candidates with score 1.0
|
||||
for id in candidate_ids {
|
||||
results.push(SearchResult::new(id, 1.0));
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Sort by score (descending) and limit
|
||||
results.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap());
|
||||
results.truncate(query.top_k);
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Compute text match score (simple substring matching)
|
||||
fn compute_text_score(&self, obj: &OsirisObject, query: &str) -> f32 {
|
||||
let mut score = 0.0;
|
||||
|
||||
// Check title
|
||||
if let Some(title) = &obj.meta.title {
|
||||
if title.to_lowercase().contains(query) {
|
||||
score += 0.5;
|
||||
}
|
||||
}
|
||||
|
||||
// Check text content
|
||||
if let Some(text) = &obj.text {
|
||||
if text.to_lowercase().contains(query) {
|
||||
score += 0.5;
|
||||
|
||||
// Bonus for multiple occurrences
|
||||
let count = text.to_lowercase().matches(query).count();
|
||||
score += (count as f32 - 1.0) * 0.1;
|
||||
}
|
||||
}
|
||||
|
||||
// Check tags
|
||||
for (key, value) in &obj.meta.tags {
|
||||
if key.to_lowercase().contains(query) || value.to_lowercase().contains(query) {
|
||||
score += 0.2;
|
||||
}
|
||||
}
|
||||
|
||||
score.min(1.0)
|
||||
}
|
||||
|
||||
/// Extract a snippet around the matched text
|
||||
fn extract_snippet(&self, obj: &OsirisObject, query: &str) -> String {
|
||||
const SNIPPET_LENGTH: usize = 100;
|
||||
|
||||
// Try to find snippet in text
|
||||
if let Some(text) = &obj.text {
|
||||
let text_lower = text.to_lowercase();
|
||||
if let Some(pos) = text_lower.find(query) {
|
||||
let start = pos.saturating_sub(SNIPPET_LENGTH / 2);
|
||||
let end = (pos + query.len() + SNIPPET_LENGTH / 2).min(text.len());
|
||||
|
||||
let mut snippet = text[start..end].to_string();
|
||||
if start > 0 {
|
||||
snippet = format!("...{}", snippet);
|
||||
}
|
||||
if end < text.len() {
|
||||
snippet = format!("{}...", snippet);
|
||||
}
|
||||
|
||||
return snippet;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to title or first N chars
|
||||
if let Some(title) = &obj.meta.title {
|
||||
return title.clone();
|
||||
}
|
||||
|
||||
if let Some(text) = &obj.text {
|
||||
let end = SNIPPET_LENGTH.min(text.len());
|
||||
let mut snippet = text[..end].to_string();
|
||||
if end < text.len() {
|
||||
snippet = format!("{}...", snippet);
|
||||
}
|
||||
return snippet;
|
||||
}
|
||||
|
||||
String::from("[No content]")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_search() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let engine = SearchEngine::new(client);
|
||||
|
||||
let query = RetrievalQuery::new("test".to_string())
|
||||
.with_text("rust".to_string())
|
||||
.with_top_k(10);
|
||||
|
||||
let results = engine.search(&query).await.unwrap();
|
||||
assert!(results.len() <= 10);
|
||||
}
|
||||
}
|
||||
121
src/rhai_support/instance.rs
Normal file
121
src/rhai_support/instance.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
/// OSIRIS Instance for Rhai
|
||||
///
|
||||
/// Represents a named OSIRIS instance that can be used in Rhai scripts.
|
||||
/// Multiple instances can coexist, each with their own HeroDB connection.
|
||||
|
||||
use crate::objects::{Event, Note};
|
||||
use crate::store::{GenericStore, HeroDbClient};
|
||||
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
||||
use std::sync::Arc;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
/// A named OSIRIS instance for use in Rhai scripts
|
||||
#[derive(Clone)]
|
||||
pub struct OsirisInstance {
|
||||
name: String,
|
||||
store: Arc<GenericStore>,
|
||||
runtime: Arc<Runtime>,
|
||||
}
|
||||
|
||||
impl OsirisInstance {
|
||||
/// Create a new OSIRIS instance
|
||||
pub fn new(name: impl ToString, herodb_url: &str, db_id: u16) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let client = HeroDbClient::new(herodb_url, db_id)?;
|
||||
let store = GenericStore::new(client);
|
||||
let runtime = Runtime::new()?;
|
||||
|
||||
Ok(Self {
|
||||
name: name.to_string(),
|
||||
store: Arc::new(store),
|
||||
runtime: Arc::new(runtime),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the instance name
|
||||
pub fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
/// Put a Note object
|
||||
pub fn put_note(&self, note: Note) -> Result<String, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
let id = note.base_data.id.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.put(¬e).await })
|
||||
.map_err(|e| format!("[{}] Failed to put note: {}", self.name, e).into())
|
||||
.map(|_| id)
|
||||
}
|
||||
|
||||
/// Get a Note object by ID
|
||||
pub fn get_note(&self, ns: String, id: String) -> Result<Note, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.get::<Note>(&ns, &id).await })
|
||||
.map_err(|e| format!("[{}] Failed to get note: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Put an Event object
|
||||
pub fn put_event(&self, event: Event) -> Result<String, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
let id = event.base_data.id.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.put(&event).await })
|
||||
.map_err(|e| format!("[{}] Failed to put event: {}", self.name, e).into())
|
||||
.map(|_| id)
|
||||
}
|
||||
|
||||
/// Get an Event object by ID
|
||||
pub fn get_event(&self, ns: String, id: String) -> Result<Event, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.get::<Event>(&ns, &id).await })
|
||||
.map_err(|e| format!("[{}] Failed to get event: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Query by index
|
||||
pub fn query(&self, ns: String, field: String, value: String) -> Result<rhai::Array, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.get_ids_by_index(&ns, &field, &value).await })
|
||||
.map(|ids| ids.into_iter().map(rhai::Dynamic::from).collect())
|
||||
.map_err(|e| format!("[{}] Failed to query: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Delete a Note
|
||||
pub fn delete_note(&self, note: Note) -> Result<bool, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.delete(¬e).await })
|
||||
.map_err(|e| format!("[{}] Failed to delete note: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Delete an Event
|
||||
pub fn delete_event(&self, event: Event) -> Result<bool, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.delete(&event).await })
|
||||
.map_err(|e| format!("[{}] Failed to delete event: {}", self.name, e).into())
|
||||
}
|
||||
}
|
||||
|
||||
impl CustomType for OsirisInstance {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("OsirisInstance")
|
||||
.with_fn("name", |instance: &mut OsirisInstance| instance.name())
|
||||
.with_fn("put_note", |instance: &mut OsirisInstance, note: Note| instance.put_note(note))
|
||||
.with_fn("get_note", |instance: &mut OsirisInstance, ns: String, id: String| instance.get_note(ns, id))
|
||||
.with_fn("put_event", |instance: &mut OsirisInstance, event: Event| instance.put_event(event))
|
||||
.with_fn("get_event", |instance: &mut OsirisInstance, ns: String, id: String| instance.get_event(ns, id))
|
||||
.with_fn("query", |instance: &mut OsirisInstance, ns: String, field: String, value: String| instance.query(ns, field, value))
|
||||
.with_fn("delete_note", |instance: &mut OsirisInstance, note: Note| instance.delete_note(note))
|
||||
.with_fn("delete_event", |instance: &mut OsirisInstance, event: Event| instance.delete_event(event));
|
||||
}
|
||||
}
|
||||
12
src/rhai_support/mod.rs
Normal file
12
src/rhai_support/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
/// Rhai support for OSIRIS
|
||||
///
|
||||
/// This module provides Rhai integration infrastructure for OSIRIS.
|
||||
/// Object-specific Rhai support is located in each object's module (e.g., objects/note/rhai.rs).
|
||||
|
||||
pub mod instance;
|
||||
|
||||
pub use instance::OsirisInstance;
|
||||
|
||||
// Re-export registration functions from object modules
|
||||
pub use crate::objects::note::rhai::register_note_api;
|
||||
pub use crate::objects::event::rhai::register_event_api;
|
||||
78
src/store/base_data.rs
Normal file
78
src/store/base_data.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
/// Base data that all OSIRIS objects must include
|
||||
/// Similar to heromodels BaseModelData but adapted for OSIRIS
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
||||
pub struct BaseData {
|
||||
/// Unique ID (auto-generated or user-assigned)
|
||||
pub id: String,
|
||||
|
||||
/// Namespace this object belongs to
|
||||
pub ns: String,
|
||||
|
||||
/// Unix timestamp for creation time
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub created_at: OffsetDateTime,
|
||||
|
||||
/// Unix timestamp for last modification time
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub modified_at: OffsetDateTime,
|
||||
|
||||
/// Optional MIME type
|
||||
pub mime: Option<String>,
|
||||
|
||||
/// Content size in bytes
|
||||
pub size: Option<u64>,
|
||||
}
|
||||
|
||||
impl BaseData {
|
||||
/// Create new base data with generated UUID
|
||||
pub fn new(ns: String) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
ns,
|
||||
created_at: now,
|
||||
modified_at: now,
|
||||
mime: None,
|
||||
size: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create new base data with specific ID
|
||||
pub fn with_id(id: String, ns: String) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id,
|
||||
ns,
|
||||
created_at: now,
|
||||
modified_at: now,
|
||||
mime: None,
|
||||
size: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the modified timestamp
|
||||
pub fn update_modified(&mut self) {
|
||||
self.modified_at = OffsetDateTime::now_utc();
|
||||
}
|
||||
|
||||
/// Set the MIME type
|
||||
pub fn set_mime(&mut self, mime: Option<String>) {
|
||||
self.mime = mime;
|
||||
self.update_modified();
|
||||
}
|
||||
|
||||
/// Set the size
|
||||
pub fn set_size(&mut self, size: Option<u64>) {
|
||||
self.size = size;
|
||||
self.update_modified();
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BaseData {
|
||||
fn default() -> Self {
|
||||
Self::new(String::from("default"))
|
||||
}
|
||||
}
|
||||
124
src/store/generic_store.rs
Normal file
124
src/store/generic_store.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use crate::error::Result;
|
||||
use crate::index::FieldIndex;
|
||||
use crate::store::{HeroDbClient, Object};
|
||||
|
||||
/// Generic storage layer for OSIRIS objects
|
||||
pub struct GenericStore {
|
||||
client: HeroDbClient,
|
||||
index: FieldIndex,
|
||||
}
|
||||
|
||||
impl GenericStore {
|
||||
/// Create a new generic store
|
||||
pub fn new(client: HeroDbClient) -> Self {
|
||||
let index = FieldIndex::new(client.clone());
|
||||
Self { client, index }
|
||||
}
|
||||
|
||||
/// Store an object
|
||||
pub async fn put<T: Object>(&self, obj: &T) -> Result<()> {
|
||||
// Serialize object to JSON
|
||||
let json = obj.to_json()?;
|
||||
let key = format!("obj:{}:{}", obj.namespace(), obj.id());
|
||||
|
||||
// Store in HeroDB
|
||||
self.client.set(&key, &json).await?;
|
||||
|
||||
// Index the object
|
||||
self.index_object(obj).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get an object by ID
|
||||
pub async fn get<T: Object>(&self, ns: &str, id: &str) -> Result<T> {
|
||||
let key = format!("obj:{}:{}", ns, id);
|
||||
let json = self.client.get(&key).await?
|
||||
.ok_or_else(|| crate::error::Error::NotFound(format!("Object {}:{}", ns, id)))?;
|
||||
|
||||
T::from_json(&json)
|
||||
}
|
||||
|
||||
/// Delete an object
|
||||
pub async fn delete<T: Object>(&self, obj: &T) -> Result<bool> {
|
||||
let key = format!("obj:{}:{}", obj.namespace(), obj.id());
|
||||
|
||||
// Deindex first
|
||||
self.deindex_object(obj).await?;
|
||||
|
||||
// Delete from HeroDB
|
||||
self.client.del(&key).await
|
||||
}
|
||||
|
||||
/// Check if an object exists
|
||||
pub async fn exists(&self, ns: &str, id: &str) -> Result<bool> {
|
||||
let key = format!("obj:{}:{}", ns, id);
|
||||
self.client.exists(&key).await
|
||||
}
|
||||
|
||||
/// Index an object
|
||||
async fn index_object<T: Object>(&self, obj: &T) -> Result<()> {
|
||||
let index_keys = obj.index_keys();
|
||||
|
||||
for key in index_keys {
|
||||
let field_key = format!("idx:{}:{}:{}", obj.namespace(), key.name, key.value);
|
||||
self.client.sadd(&field_key, obj.id()).await?;
|
||||
}
|
||||
|
||||
// Add to scan index for full-text search
|
||||
let scan_key = format!("scan:{}", obj.namespace());
|
||||
self.client.sadd(&scan_key, obj.id()).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Deindex an object
|
||||
async fn deindex_object<T: Object>(&self, obj: &T) -> Result<()> {
|
||||
let index_keys = obj.index_keys();
|
||||
|
||||
for key in index_keys {
|
||||
let field_key = format!("idx:{}:{}:{}", obj.namespace(), key.name, key.value);
|
||||
self.client.srem(&field_key, obj.id()).await?;
|
||||
}
|
||||
|
||||
// Remove from scan index
|
||||
let scan_key = format!("scan:{}", obj.namespace());
|
||||
self.client.srem(&scan_key, obj.id()).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all IDs matching an index key
|
||||
pub async fn get_ids_by_index(&self, ns: &str, field: &str, value: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("idx:{}:{}:{}", ns, field, value);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs in a namespace
|
||||
pub async fn get_all_ids(&self, ns: &str) -> Result<Vec<String>> {
|
||||
let scan_key = format!("scan:{}", ns);
|
||||
self.client.smembers(&scan_key).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::objects::Note;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_generic_store() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let store = GenericStore::new(client);
|
||||
|
||||
let note = Note::new("test".to_string())
|
||||
.set_title("Test Note")
|
||||
.set_content("This is a test");
|
||||
|
||||
store.put(¬e).await.unwrap();
|
||||
|
||||
let retrieved: Note = store.get("test", note.id()).await.unwrap();
|
||||
assert_eq!(retrieved.title, note.title);
|
||||
}
|
||||
}
|
||||
161
src/store/herodb_client.rs
Normal file
161
src/store/herodb_client.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
use crate::error::{Error, Result};
|
||||
use crate::store::OsirisObject;
|
||||
use redis::aio::MultiplexedConnection;
|
||||
use redis::{AsyncCommands, Client};
|
||||
|
||||
/// HeroDB client wrapper for OSIRIS operations
|
||||
#[derive(Clone)]
|
||||
pub struct HeroDbClient {
|
||||
client: Client,
|
||||
pub db_id: u16,
|
||||
}
|
||||
|
||||
impl HeroDbClient {
|
||||
/// Create a new HeroDB client
|
||||
pub fn new(url: &str, db_id: u16) -> Result<Self> {
|
||||
let client = Client::open(url)?;
|
||||
Ok(Self { client, db_id })
|
||||
}
|
||||
|
||||
/// Get a connection to the database
|
||||
pub async fn get_connection(&self) -> Result<MultiplexedConnection> {
|
||||
let mut conn = self.client.get_multiplexed_async_connection().await?;
|
||||
|
||||
// Select the appropriate database
|
||||
if self.db_id > 0 {
|
||||
redis::cmd("SELECT")
|
||||
.arg(self.db_id)
|
||||
.query_async(&mut conn)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(conn)
|
||||
}
|
||||
|
||||
/// Store an object in HeroDB
|
||||
pub async fn put_object(&self, obj: &OsirisObject) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", obj.id);
|
||||
let value = serde_json::to_string(obj)?;
|
||||
|
||||
conn.set(&key, value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Retrieve an object from HeroDB
|
||||
pub async fn get_object(&self, id: &str) -> Result<OsirisObject> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", id);
|
||||
|
||||
let value: Option<String> = conn.get(&key).await?;
|
||||
match value {
|
||||
Some(v) => {
|
||||
let obj: OsirisObject = serde_json::from_str(&v)?;
|
||||
Ok(obj)
|
||||
}
|
||||
None => Err(Error::NotFound(format!("Object not found: {}", id))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete an object from HeroDB
|
||||
pub async fn delete_object(&self, id: &str) -> Result<bool> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", id);
|
||||
|
||||
let deleted: i32 = conn.del(&key).await?;
|
||||
Ok(deleted > 0)
|
||||
}
|
||||
|
||||
/// Check if an object exists
|
||||
pub async fn exists(&self, id: &str) -> Result<bool> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", id);
|
||||
|
||||
let exists: bool = conn.exists(&key).await?;
|
||||
Ok(exists)
|
||||
}
|
||||
|
||||
/// Add an ID to a set (for field indexing)
|
||||
pub async fn sadd(&self, set_key: &str, member: &str) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
conn.sadd(set_key, member).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove an ID from a set
|
||||
pub async fn srem(&self, set_key: &str, member: &str) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
conn.srem(set_key, member).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all members of a set
|
||||
pub async fn smembers(&self, set_key: &str) -> Result<Vec<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let members: Vec<String> = conn.smembers(set_key).await?;
|
||||
Ok(members)
|
||||
}
|
||||
|
||||
/// Get the intersection of multiple sets
|
||||
pub async fn sinter(&self, keys: &[String]) -> Result<Vec<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let members: Vec<String> = conn.sinter(keys).await?;
|
||||
Ok(members)
|
||||
}
|
||||
|
||||
/// Get all keys matching a pattern
|
||||
pub async fn keys(&self, pattern: &str) -> Result<Vec<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let keys: Vec<String> = conn.keys(pattern).await?;
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
/// Set a key-value pair
|
||||
pub async fn set(&self, key: &str, value: &str) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
conn.set(key, value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a value by key
|
||||
pub async fn get(&self, key: &str) -> Result<Option<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let value: Option<String> = conn.get(key).await?;
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
/// Delete a key
|
||||
pub async fn del(&self, key: &str) -> Result<bool> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let deleted: i32 = conn.del(key).await?;
|
||||
Ok(deleted > 0)
|
||||
}
|
||||
|
||||
/// Get database size (number of keys)
|
||||
pub async fn dbsize(&self) -> Result<usize> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let size: usize = redis::cmd("DBSIZE").query_async(&mut conn).await?;
|
||||
Ok(size)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// Note: These tests require a running HeroDB instance
|
||||
// They are ignored by default
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_put_get_object() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let obj = OsirisObject::new("test".to_string(), Some("Hello".to_string()));
|
||||
|
||||
client.put_object(&obj).await.unwrap();
|
||||
let retrieved = client.get_object(&obj.id).await.unwrap();
|
||||
|
||||
assert_eq!(obj.id, retrieved.id);
|
||||
assert_eq!(obj.text, retrieved.text);
|
||||
}
|
||||
}
|
||||
11
src/store/mod.rs
Normal file
11
src/store/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
pub mod base_data;
|
||||
pub mod object_trait;
|
||||
pub mod herodb_client;
|
||||
pub mod generic_store;
|
||||
pub mod object; // Keep old implementation for backwards compat temporarily
|
||||
|
||||
pub use base_data::BaseData;
|
||||
pub use object_trait::{IndexKey, Object, Storable};
|
||||
pub use herodb_client::HeroDbClient;
|
||||
pub use generic_store::GenericStore;
|
||||
pub use object::{Metadata, OsirisObject}; // Old implementation
|
||||
160
src/store/object.rs
Normal file
160
src/store/object.rs
Normal file
@@ -0,0 +1,160 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
/// Core OSIRIS object structure
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct OsirisObject {
|
||||
/// Unique identifier (UUID or user-assigned)
|
||||
pub id: String,
|
||||
|
||||
/// Namespace (e.g., "notes", "calendar")
|
||||
pub ns: String,
|
||||
|
||||
/// Metadata
|
||||
pub meta: Metadata,
|
||||
|
||||
/// Optional plain text content
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub text: Option<String>,
|
||||
}
|
||||
|
||||
/// Object metadata
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Metadata {
|
||||
/// Optional human-readable title
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
|
||||
/// MIME type
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub mime: Option<String>,
|
||||
|
||||
/// Key-value tags for categorization
|
||||
#[serde(default)]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
|
||||
/// Creation timestamp
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub created: OffsetDateTime,
|
||||
|
||||
/// Last update timestamp
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub updated: OffsetDateTime,
|
||||
|
||||
/// Content size in bytes
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub size: Option<u64>,
|
||||
}
|
||||
|
||||
impl OsirisObject {
|
||||
/// Create a new object with generated UUID
|
||||
pub fn new(ns: String, text: Option<String>) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
ns,
|
||||
meta: Metadata {
|
||||
title: None,
|
||||
mime: None,
|
||||
tags: BTreeMap::new(),
|
||||
created: now,
|
||||
updated: now,
|
||||
size: text.as_ref().map(|t| t.len() as u64),
|
||||
},
|
||||
text,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new object with specific ID
|
||||
pub fn with_id(id: String, ns: String, text: Option<String>) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id,
|
||||
ns,
|
||||
meta: Metadata {
|
||||
title: None,
|
||||
mime: None,
|
||||
tags: BTreeMap::new(),
|
||||
created: now,
|
||||
updated: now,
|
||||
size: text.as_ref().map(|t| t.len() as u64),
|
||||
},
|
||||
text,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the object's text content
|
||||
pub fn update_text(&mut self, text: Option<String>) {
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
self.meta.size = text.as_ref().map(|t| t.len() as u64);
|
||||
self.text = text;
|
||||
}
|
||||
|
||||
/// Add or update a tag
|
||||
pub fn set_tag(&mut self, key: String, value: String) {
|
||||
self.meta.tags.insert(key, value);
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
|
||||
/// Remove a tag
|
||||
pub fn remove_tag(&mut self, key: &str) -> Option<String> {
|
||||
let result = self.meta.tags.remove(key);
|
||||
if result.is_some() {
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Set the title
|
||||
pub fn set_title(&mut self, title: Option<String>) {
|
||||
self.meta.title = title;
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
|
||||
/// Set the MIME type
|
||||
pub fn set_mime(&mut self, mime: Option<String>) {
|
||||
self.meta.mime = mime;
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_new_object() {
|
||||
let obj = OsirisObject::new("notes".to_string(), Some("Hello, world!".to_string()));
|
||||
assert_eq!(obj.ns, "notes");
|
||||
assert_eq!(obj.text, Some("Hello, world!".to_string()));
|
||||
assert_eq!(obj.meta.size, Some(13));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_text() {
|
||||
let mut obj = OsirisObject::new("notes".to_string(), Some("Initial".to_string()));
|
||||
let initial_updated = obj.meta.updated;
|
||||
|
||||
std::thread::sleep(std::time::Duration::from_millis(10));
|
||||
obj.update_text(Some("Updated".to_string()));
|
||||
|
||||
assert_eq!(obj.text, Some("Updated".to_string()));
|
||||
assert_eq!(obj.meta.size, Some(7));
|
||||
assert!(obj.meta.updated > initial_updated);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tags() {
|
||||
let mut obj = OsirisObject::new("notes".to_string(), None);
|
||||
obj.set_tag("topic".to_string(), "rust".to_string());
|
||||
obj.set_tag("project".to_string(), "osiris".to_string());
|
||||
|
||||
assert_eq!(obj.meta.tags.get("topic"), Some(&"rust".to_string()));
|
||||
assert_eq!(obj.meta.tags.get("project"), Some(&"osiris".to_string()));
|
||||
|
||||
let removed = obj.remove_tag("topic");
|
||||
assert_eq!(removed, Some("rust".to_string()));
|
||||
assert_eq!(obj.meta.tags.get("topic"), None);
|
||||
}
|
||||
}
|
||||
108
src/store/object_trait.rs
Normal file
108
src/store/object_trait.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use crate::error::Result;
|
||||
use crate::store::BaseData;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// Represents an index key for an object field
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct IndexKey {
|
||||
/// The name of the index key (field name)
|
||||
pub name: &'static str,
|
||||
|
||||
/// The value of the index key for this object instance
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl IndexKey {
|
||||
pub fn new(name: &'static str, value: impl ToString) -> Self {
|
||||
Self {
|
||||
name,
|
||||
value: value.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Core trait that all OSIRIS objects must implement
|
||||
/// Similar to heromodels Model trait but adapted for OSIRIS
|
||||
pub trait Object: Debug + Clone + Serialize + for<'de> Deserialize<'de> + Send + Sync {
|
||||
/// Get the object type name (used for routing/identification)
|
||||
fn object_type() -> &'static str
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// Get a reference to the base data
|
||||
fn base_data(&self) -> &BaseData;
|
||||
|
||||
/// Get a mutable reference to the base data
|
||||
fn base_data_mut(&mut self) -> &mut BaseData;
|
||||
|
||||
/// Get the unique ID for this object
|
||||
fn id(&self) -> &str {
|
||||
&self.base_data().id
|
||||
}
|
||||
|
||||
/// Get the namespace for this object
|
||||
fn namespace(&self) -> &str {
|
||||
&self.base_data().ns
|
||||
}
|
||||
|
||||
/// Returns a list of index keys for this object instance
|
||||
/// These are generated from fields marked with #[index]
|
||||
/// The default implementation returns base_data indexes only
|
||||
fn index_keys(&self) -> Vec<IndexKey> {
|
||||
let base = self.base_data();
|
||||
let mut keys = Vec::new();
|
||||
|
||||
// Index MIME type if present
|
||||
if let Some(mime) = &base.mime {
|
||||
keys.push(IndexKey::new("mime", mime));
|
||||
}
|
||||
|
||||
keys
|
||||
}
|
||||
|
||||
/// Return a list of field names which have an index applied
|
||||
/// This should be implemented by the derive macro
|
||||
fn indexed_fields() -> Vec<&'static str>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
/// Get the full-text searchable content for this object
|
||||
/// Override this to provide custom searchable text
|
||||
fn searchable_text(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Serialize the object to JSON
|
||||
fn to_json(&self) -> Result<String> {
|
||||
serde_json::to_string(self).map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Deserialize the object from JSON
|
||||
fn from_json(json: &str) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
serde_json::from_str(json).map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Update the modified timestamp
|
||||
fn touch(&mut self) {
|
||||
self.base_data_mut().update_modified();
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for objects that can be stored in OSIRIS
|
||||
/// This is automatically implemented for all types that implement Object
|
||||
pub trait Storable: Object {
|
||||
/// Prepare the object for storage (update timestamps, etc.)
|
||||
fn prepare_for_storage(&mut self) {
|
||||
self.touch();
|
||||
}
|
||||
}
|
||||
|
||||
// Blanket implementation
|
||||
impl<T: Object> Storable for T {}
|
||||
Reference in New Issue
Block a user