first commit
This commit is contained in:
9
.gitignore
vendored
Normal file
9
.gitignore
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/target/
|
||||
Cargo.lock
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
.idea/
|
||||
.vscode/
|
||||
*.iml
|
||||
35
Cargo.toml
Normal file
35
Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "osiris"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "osiris"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "runner"
|
||||
path = "src/bin/runner/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
redis = { version = "0.24", features = ["aio", "tokio-comp"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
time = { version = "0.3", features = ["serde", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.23", features = ["full"] }
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
toml = "0.8"
|
||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
osiris_derive = { path = "osiris_derive" }
|
||||
rhai = { version = "1.21.0", features = ["std", "sync", "serde"], optional = true }
|
||||
env_logger = "0.10"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.8"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
rhai-support = ["rhai"]
|
||||
454
EXAMPLES.md
Normal file
454
EXAMPLES.md
Normal file
@@ -0,0 +1,454 @@
|
||||
# OSIRIS Examples
|
||||
|
||||
This document provides practical examples of using OSIRIS for various use cases.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. **Start HeroDB**:
|
||||
```bash
|
||||
cd /path/to/herodb
|
||||
cargo run --release -- --dir ./data --admin-secret mysecret --port 6379
|
||||
```
|
||||
|
||||
2. **Build OSIRIS**:
|
||||
```bash
|
||||
cd /path/to/osiris
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
3. **Initialize OSIRIS**:
|
||||
```bash
|
||||
./target/release/osiris init --herodb redis://localhost:6379
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 1: Personal Note Management
|
||||
|
||||
### Create a namespace for notes
|
||||
```bash
|
||||
./target/release/osiris ns create notes
|
||||
```
|
||||
|
||||
### Add notes with tags
|
||||
```bash
|
||||
# Create a note about Rust
|
||||
echo "Rust is a systems programming language focused on safety and performance." | \
|
||||
./target/release/osiris put notes/rust-intro - \
|
||||
--title "Introduction to Rust" \
|
||||
--tags topic=rust,level=beginner,type=tutorial \
|
||||
--mime text/plain
|
||||
|
||||
# Create a note about OSIRIS
|
||||
echo "OSIRIS is an object storage system built on HeroDB." | \
|
||||
./target/release/osiris put notes/osiris-overview - \
|
||||
--title "OSIRIS Overview" \
|
||||
--tags topic=osiris,level=intermediate,type=documentation \
|
||||
--mime text/plain
|
||||
|
||||
# Create a note from a file
|
||||
./target/release/osiris put notes/network-latency ./network-notes.md \
|
||||
--title "Network Latency Analysis" \
|
||||
--tags topic=networking,priority=high,type=analysis \
|
||||
--mime text/markdown
|
||||
```
|
||||
|
||||
### Search notes
|
||||
```bash
|
||||
# Search for notes about Rust
|
||||
./target/release/osiris find "rust" --ns notes
|
||||
|
||||
# Filter by tag
|
||||
./target/release/osiris find --ns notes --filter topic=rust
|
||||
|
||||
# Combine text search and filters
|
||||
./target/release/osiris find "performance" --ns notes --filter level=beginner
|
||||
|
||||
# Get more results
|
||||
./target/release/osiris find "programming" --ns notes --topk 20
|
||||
|
||||
# Output as JSON
|
||||
./target/release/osiris find "rust" --ns notes --json
|
||||
```
|
||||
|
||||
### Retrieve notes
|
||||
```bash
|
||||
# Get note as JSON (with metadata)
|
||||
./target/release/osiris get notes/rust-intro
|
||||
|
||||
# Get raw content only
|
||||
./target/release/osiris get notes/rust-intro --raw
|
||||
|
||||
# Save to file
|
||||
./target/release/osiris get notes/rust-intro --raw --output /tmp/rust-intro.txt
|
||||
```
|
||||
|
||||
### Delete notes
|
||||
```bash
|
||||
./target/release/osiris del notes/rust-intro
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 2: Calendar/Event Management
|
||||
|
||||
### Create a calendar namespace
|
||||
```bash
|
||||
./target/release/osiris ns create calendar
|
||||
```
|
||||
|
||||
### Add events
|
||||
```bash
|
||||
# Add a meeting
|
||||
echo '{"title":"Team Standup","when":"2025-10-20T10:00:00Z","attendees":["alice","bob"]}' | \
|
||||
./target/release/osiris put calendar/standup-2025-10-20 - \
|
||||
--title "Team Standup" \
|
||||
--tags type=meeting,team=eng,priority=high \
|
||||
--mime application/json
|
||||
|
||||
# Add a deadline
|
||||
echo '{"title":"Project Deadline","when":"2025-10-31T23:59:59Z","project":"osiris-mvp"}' | \
|
||||
./target/release/osiris put calendar/deadline-osiris - \
|
||||
--title "OSIRIS MVP Deadline" \
|
||||
--tags type=deadline,project=osiris,priority=critical \
|
||||
--mime application/json
|
||||
|
||||
# Add a reminder
|
||||
echo '{"title":"Code Review","when":"2025-10-21T14:00:00Z","pr":"#123"}' | \
|
||||
./target/release/osiris put calendar/review-pr123 - \
|
||||
--title "Code Review PR #123" \
|
||||
--tags type=reminder,team=eng \
|
||||
--mime application/json
|
||||
```
|
||||
|
||||
### Search events
|
||||
```bash
|
||||
# Find all meetings
|
||||
./target/release/osiris find --ns calendar --filter type=meeting
|
||||
|
||||
# Find high-priority items
|
||||
./target/release/osiris find --ns calendar --filter priority=high
|
||||
|
||||
# Search by text
|
||||
./target/release/osiris find "standup" --ns calendar
|
||||
|
||||
# Find project-specific events
|
||||
./target/release/osiris find --ns calendar --filter project=osiris
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 3: Code Snippet Library
|
||||
|
||||
### Create a snippets namespace
|
||||
```bash
|
||||
./target/release/osiris ns create snippets
|
||||
```
|
||||
|
||||
### Add code snippets
|
||||
```bash
|
||||
# Rust snippet
|
||||
cat > /tmp/rust-error-handling.rs <<'EOF'
|
||||
use anyhow::Result;
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let result = risky_operation()?;
|
||||
println!("Success: {}", result);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn risky_operation() -> Result<String> {
|
||||
Ok("All good!".to_string())
|
||||
}
|
||||
EOF
|
||||
|
||||
./target/release/osiris put snippets/rust-error-handling /tmp/rust-error-handling.rs \
|
||||
--title "Rust Error Handling with anyhow" \
|
||||
--tags language=rust,topic=error-handling,pattern=result \
|
||||
--mime text/x-rust
|
||||
|
||||
# Python snippet
|
||||
cat > /tmp/python-async.py <<'EOF'
|
||||
import asyncio
|
||||
|
||||
async def fetch_data(url):
|
||||
await asyncio.sleep(1)
|
||||
return f"Data from {url}"
|
||||
|
||||
async def main():
|
||||
result = await fetch_data("https://example.com")
|
||||
print(result)
|
||||
|
||||
asyncio.run(main())
|
||||
EOF
|
||||
|
||||
./target/release/osiris put snippets/python-async /tmp/python-async.py \
|
||||
--title "Python Async/Await Example" \
|
||||
--tags language=python,topic=async,pattern=asyncio \
|
||||
--mime text/x-python
|
||||
```
|
||||
|
||||
### Search snippets
|
||||
```bash
|
||||
# Find all Rust snippets
|
||||
./target/release/osiris find --ns snippets --filter language=rust
|
||||
|
||||
# Find async patterns
|
||||
./target/release/osiris find "async" --ns snippets
|
||||
|
||||
# Find error handling examples
|
||||
./target/release/osiris find --ns snippets --filter topic=error-handling
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 4: Document Management
|
||||
|
||||
### Create a documents namespace
|
||||
```bash
|
||||
./target/release/osiris ns create docs
|
||||
```
|
||||
|
||||
### Add documents
|
||||
```bash
|
||||
# Add a specification
|
||||
./target/release/osiris put docs/osiris-spec ./docs/specs/osiris-mvp.md \
|
||||
--title "OSIRIS MVP Specification" \
|
||||
--tags type=spec,project=osiris,status=draft \
|
||||
--mime text/markdown
|
||||
|
||||
# Add a README
|
||||
./target/release/osiris put docs/readme ./README.md \
|
||||
--title "OSIRIS README" \
|
||||
--tags type=readme,project=osiris,status=published \
|
||||
--mime text/markdown
|
||||
|
||||
# Add meeting notes
|
||||
echo "# Team Meeting 2025-10-20\n\n- Discussed OSIRIS MVP\n- Decided on minimal feature set" | \
|
||||
./target/release/osiris put docs/meeting-2025-10-20 - \
|
||||
--title "Team Meeting Notes" \
|
||||
--tags type=notes,date=2025-10-20,team=eng \
|
||||
--mime text/markdown
|
||||
```
|
||||
|
||||
### Search documents
|
||||
```bash
|
||||
# Find all specifications
|
||||
./target/release/osiris find --ns docs --filter type=spec
|
||||
|
||||
# Find draft documents
|
||||
./target/release/osiris find --ns docs --filter status=draft
|
||||
|
||||
# Search by content
|
||||
./target/release/osiris find "MVP" --ns docs
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 5: Multi-Namespace Operations
|
||||
|
||||
### List all namespaces
|
||||
```bash
|
||||
./target/release/osiris ns list
|
||||
```
|
||||
|
||||
### Get statistics
|
||||
```bash
|
||||
# Overall stats
|
||||
./target/release/osiris stats
|
||||
|
||||
# Namespace-specific stats
|
||||
./target/release/osiris stats --ns notes
|
||||
./target/release/osiris stats --ns calendar
|
||||
./target/release/osiris stats --ns snippets
|
||||
```
|
||||
|
||||
### Delete a namespace
|
||||
```bash
|
||||
./target/release/osiris ns delete snippets
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 6: Batch Operations
|
||||
|
||||
### Bulk import notes
|
||||
```bash
|
||||
# Create multiple notes from a directory
|
||||
for file in ./my-notes/*.md; do
|
||||
filename=$(basename "$file" .md)
|
||||
./target/release/osiris put "notes/$filename" "$file" \
|
||||
--tags source=import,format=markdown \
|
||||
--mime text/markdown
|
||||
done
|
||||
```
|
||||
|
||||
### Export all notes
|
||||
```bash
|
||||
# Get all note IDs and export them
|
||||
./target/release/osiris find --ns notes --topk 1000 --json | \
|
||||
jq -r '.[].id' | \
|
||||
while read id; do
|
||||
./target/release/osiris get "notes/$id" --raw --output "./export/$id.txt"
|
||||
done
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 7: Advanced Search Patterns
|
||||
|
||||
### Complex filtering
|
||||
```bash
|
||||
# Find high-priority engineering tasks
|
||||
./target/release/osiris find --ns calendar \
|
||||
--filter priority=high \
|
||||
--filter team=eng
|
||||
|
||||
# Find beginner-level Rust tutorials
|
||||
./target/release/osiris find "rust" --ns notes \
|
||||
--filter level=beginner \
|
||||
--filter type=tutorial
|
||||
```
|
||||
|
||||
### Combining text search with filters
|
||||
```bash
|
||||
# Find notes about "performance" tagged as high priority
|
||||
./target/release/osiris find "performance" --ns notes \
|
||||
--filter priority=high
|
||||
|
||||
# Find meetings about "standup"
|
||||
./target/release/osiris find "standup" --ns calendar \
|
||||
--filter type=meeting
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example 8: JSON Output and Scripting
|
||||
|
||||
### Get search results as JSON
|
||||
```bash
|
||||
# Search and process with jq
|
||||
./target/release/osiris find "rust" --ns notes --json | \
|
||||
jq '.[] | {id: .id, score: .score, snippet: .snippet}'
|
||||
|
||||
# Count results
|
||||
./target/release/osiris find "programming" --ns notes --json | \
|
||||
jq 'length'
|
||||
|
||||
# Get top result
|
||||
./target/release/osiris find "osiris" --ns notes --json | \
|
||||
jq '.[0]'
|
||||
```
|
||||
|
||||
### Scripting with OSIRIS
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Script to find and display all high-priority items
|
||||
|
||||
echo "High Priority Items:"
|
||||
echo "==================="
|
||||
|
||||
# Search notes
|
||||
echo -e "\nNotes:"
|
||||
./target/release/osiris find --ns notes --filter priority=high --json | \
|
||||
jq -r '.[] | "- \(.id): \(.snippet)"'
|
||||
|
||||
# Search calendar
|
||||
echo -e "\nEvents:"
|
||||
./target/release/osiris find --ns calendar --filter priority=high --json | \
|
||||
jq -r '.[] | "- \(.id): \(.snippet)"'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Tips and Best Practices
|
||||
|
||||
### 1. Consistent Tagging
|
||||
Use consistent tag names across your objects:
|
||||
```bash
|
||||
# Good: consistent tag names
|
||||
--tags topic=rust,level=beginner,type=tutorial
|
||||
|
||||
# Avoid: inconsistent naming
|
||||
--tags Topic=Rust,skill_level=Beginner,kind=Tutorial
|
||||
```
|
||||
|
||||
### 2. Meaningful IDs
|
||||
Use descriptive IDs that make sense:
|
||||
```bash
|
||||
# Good: descriptive ID
|
||||
./target/release/osiris put notes/rust-ownership-guide ...
|
||||
|
||||
# Avoid: cryptic ID
|
||||
./target/release/osiris put notes/abc123 ...
|
||||
```
|
||||
|
||||
### 3. Use MIME Types
|
||||
Always specify MIME types for better organization:
|
||||
```bash
|
||||
--mime text/markdown
|
||||
--mime application/json
|
||||
--mime text/x-rust
|
||||
--mime text/plain
|
||||
```
|
||||
|
||||
### 4. Leverage Filters
|
||||
Use filters to narrow down search results:
|
||||
```bash
|
||||
# Instead of searching all notes
|
||||
./target/release/osiris find "rust" --ns notes
|
||||
|
||||
# Filter by specific criteria
|
||||
./target/release/osiris find "rust" --ns notes --filter level=beginner
|
||||
```
|
||||
|
||||
### 5. Regular Backups
|
||||
Export your data regularly:
|
||||
```bash
|
||||
# Export all namespaces
|
||||
for ns in notes calendar docs; do
|
||||
./target/release/osiris find --ns "$ns" --topk 10000 --json > "backup-$ns.json"
|
||||
done
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Connection Issues
|
||||
```bash
|
||||
# Check if HeroDB is running
|
||||
redis-cli -p 6379 PING
|
||||
|
||||
# Verify configuration
|
||||
cat ~/.config/osiris/config.toml
|
||||
```
|
||||
|
||||
### Object Not Found
|
||||
```bash
|
||||
# List all objects in a namespace
|
||||
./target/release/osiris find --ns notes --topk 1000
|
||||
|
||||
# Check if namespace exists
|
||||
./target/release/osiris ns list
|
||||
```
|
||||
|
||||
### Search Returns No Results
|
||||
```bash
|
||||
# Try without filters first
|
||||
./target/release/osiris find "keyword" --ns notes
|
||||
|
||||
# Check if objects have the expected tags
|
||||
./target/release/osiris get notes/some-id
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
- Explore the [README](README.md) for more information
|
||||
- Read the [MVP Specification](docs/specs/osiris-mvp.md)
|
||||
- Check out the [source code](src/) to understand the implementation
|
||||
- Contribute improvements or report issues
|
||||
|
||||
Happy organizing with OSIRIS! 🎯
|
||||
341
MULTI_INSTANCE.md
Normal file
341
MULTI_INSTANCE.md
Normal file
@@ -0,0 +1,341 @@
|
||||
# OSIRIS Multi-Instance Support ✅
|
||||
|
||||
OSIRIS now supports multiple instances in a single Rhai script, allowing you to work with different HeroDB databases simultaneously.
|
||||
|
||||
## 🎉 Status: FULLY OPERATIONAL
|
||||
|
||||
```
|
||||
✅ OsirisInstance type created
|
||||
✅ Dynamic instance creation
|
||||
✅ Independent storage per instance
|
||||
✅ Same note/event in multiple instances
|
||||
✅ Test script working
|
||||
```
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Create Multiple Instances
|
||||
|
||||
```rhai
|
||||
// Create two OSIRIS instances pointing to different databases
|
||||
let freezone = osiris("freezone", "redis://localhost:6379", 1);
|
||||
let my_osiris = osiris("my_osiris", "redis://localhost:6379", 2);
|
||||
|
||||
// Create a note
|
||||
let my_note = note("notes")
|
||||
.title("Shared Note")
|
||||
.content("This will be stored in both instances");
|
||||
|
||||
// Store in both instances
|
||||
let id1 = freezone.put_note(my_note);
|
||||
let id2 = my_osiris.put_note(my_note);
|
||||
```
|
||||
|
||||
## 📝 Complete Example
|
||||
|
||||
```rhai
|
||||
// Multi-Instance OSIRIS Example
|
||||
|
||||
print("Creating OSIRIS instances...");
|
||||
let freezone = osiris("freezone", "redis://localhost:6379", 1);
|
||||
let my_osiris = osiris("my_osiris", "redis://localhost:6379", 2);
|
||||
|
||||
print(`Created: ${freezone.name()}`);
|
||||
print(`Created: ${my_osiris.name()}`);
|
||||
|
||||
// Create a note
|
||||
let my_note = note("shared_notes")
|
||||
.title("Multi-Instance Test")
|
||||
.content("Stored in multiple OSIRIS instances")
|
||||
.tag("shared", "true");
|
||||
|
||||
// Store in freezone
|
||||
let freezone_id = freezone.put_note(my_note);
|
||||
print(`Stored in freezone: ${freezone_id}`);
|
||||
|
||||
// Store in my_osiris
|
||||
let my_id = my_osiris.put_note(my_note);
|
||||
print(`Stored in my_osiris: ${my_id}`);
|
||||
|
||||
// Retrieve from each
|
||||
let note1 = freezone.get_note("shared_notes", freezone_id);
|
||||
let note2 = my_osiris.get_note("shared_notes", my_id);
|
||||
|
||||
// Query each instance
|
||||
let ids1 = freezone.query("shared_notes", "tags:tag", "shared=true");
|
||||
let ids2 = my_osiris.query("shared_notes", "tags:tag", "shared=true");
|
||||
```
|
||||
|
||||
## 🎯 Use Cases
|
||||
|
||||
### 1. **Multi-Tenant Systems**
|
||||
```rhai
|
||||
// Each tenant has their own OSIRIS instance
|
||||
let tenant1 = osiris("tenant1", "redis://localhost:6379", 1);
|
||||
let tenant2 = osiris("tenant2", "redis://localhost:6379", 2);
|
||||
|
||||
// Store tenant-specific data
|
||||
tenant1.put_note(tenant1_note);
|
||||
tenant2.put_note(tenant2_note);
|
||||
```
|
||||
|
||||
### 2. **Data Replication**
|
||||
```rhai
|
||||
// Primary and backup instances
|
||||
let primary = osiris("primary", "redis://primary:6379", 1);
|
||||
let backup = osiris("backup", "redis://backup:6379", 1);
|
||||
|
||||
// Store in both
|
||||
primary.put_note(note);
|
||||
backup.put_note(note);
|
||||
```
|
||||
|
||||
### 3. **Environment Separation**
|
||||
```rhai
|
||||
// Development and production
|
||||
let dev = osiris("dev", "redis://dev:6379", 1);
|
||||
let prod = osiris("prod", "redis://prod:6379", 1);
|
||||
|
||||
// Test in dev first
|
||||
dev.put_note(test_note);
|
||||
|
||||
// Then promote to prod
|
||||
prod.put_note(test_note);
|
||||
```
|
||||
|
||||
### 4. **Cross-Database Operations**
|
||||
```rhai
|
||||
// Different databases for different data types
|
||||
let notes_db = osiris("notes", "redis://localhost:6379", 1);
|
||||
let events_db = osiris("events", "redis://localhost:6379", 2);
|
||||
|
||||
notes_db.put_note(note);
|
||||
events_db.put_event(event);
|
||||
```
|
||||
|
||||
## 📚 API Reference
|
||||
|
||||
### Creating an Instance
|
||||
|
||||
```rhai
|
||||
let instance = osiris(name, url, db_id);
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (string) - Instance name for identification
|
||||
- `url` (string) - HeroDB connection URL
|
||||
- `db_id` (int) - Database ID (0-15 typically)
|
||||
|
||||
**Returns:** `OsirisInstance`
|
||||
|
||||
### Instance Methods
|
||||
|
||||
#### `name()`
|
||||
Get the instance name.
|
||||
|
||||
```rhai
|
||||
let name = instance.name();
|
||||
print(`Instance: ${name}`);
|
||||
```
|
||||
|
||||
#### `put_note(note)`
|
||||
Store a note in this instance.
|
||||
|
||||
```rhai
|
||||
let id = instance.put_note(note);
|
||||
```
|
||||
|
||||
**Returns:** Note ID (string)
|
||||
|
||||
#### `get_note(namespace, id)`
|
||||
Retrieve a note from this instance.
|
||||
|
||||
```rhai
|
||||
let note = instance.get_note("notes", id);
|
||||
```
|
||||
|
||||
**Returns:** Note object
|
||||
|
||||
#### `put_event(event)`
|
||||
Store an event in this instance.
|
||||
|
||||
```rhai
|
||||
let id = instance.put_event(event);
|
||||
```
|
||||
|
||||
**Returns:** Event ID (string)
|
||||
|
||||
#### `get_event(namespace, id)`
|
||||
Retrieve an event from this instance.
|
||||
|
||||
```rhai
|
||||
let event = instance.get_event("calendar", id);
|
||||
```
|
||||
|
||||
**Returns:** Event object
|
||||
|
||||
#### `query(namespace, field, value)`
|
||||
Query by indexed field in this instance.
|
||||
|
||||
```rhai
|
||||
let ids = instance.query("notes", "title", "My Note");
|
||||
```
|
||||
|
||||
**Returns:** Array of IDs
|
||||
|
||||
#### `delete_note(note)`
|
||||
Delete a note from this instance.
|
||||
|
||||
```rhai
|
||||
let deleted = instance.delete_note(note);
|
||||
```
|
||||
|
||||
**Returns:** Boolean (true if deleted)
|
||||
|
||||
#### `delete_event(event)`
|
||||
Delete an event from this instance.
|
||||
|
||||
```rhai
|
||||
let deleted = instance.delete_event(event);
|
||||
```
|
||||
|
||||
**Returns:** Boolean (true if deleted)
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ Rhai Script │
|
||||
│ let freezone = osiris(...); │
|
||||
│ let my_osiris = osiris(...); │
|
||||
└────────────┬────────────────────────┘
|
||||
│
|
||||
┌────────────▼────────────────────────┐
|
||||
│ OsirisInstance (Clone) │
|
||||
│ - name: String │
|
||||
│ - store: Arc<GenericStore> │
|
||||
│ - runtime: Arc<Runtime> │
|
||||
└────┬───────────────────┬────────────┘
|
||||
│ │
|
||||
┌────▼──────────┐ ┌───▼───────────┐
|
||||
│ HeroDB DB 1 │ │ HeroDB DB 2 │
|
||||
│ (freezone) │ │ (my_osiris) │
|
||||
└───────────────┘ └───────────────┘
|
||||
```
|
||||
|
||||
## ✨ Features
|
||||
|
||||
### 1. **Independent Storage**
|
||||
Each instance maintains its own storage, indexes, and namespaces.
|
||||
|
||||
### 2. **Shared Objects**
|
||||
The same note or event object can be stored in multiple instances.
|
||||
|
||||
### 3. **Clone-Safe**
|
||||
Instances are cloneable and can be passed around in scripts.
|
||||
|
||||
### 4. **Error Isolation**
|
||||
Errors in one instance don't affect others.
|
||||
|
||||
### 5. **Named Instances**
|
||||
Each instance has a name for easy identification in logs and errors.
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
### Run the Multi-Instance Test
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- test1 --script-file scripts/multi_instance.rhai
|
||||
```
|
||||
|
||||
### Expected Output
|
||||
|
||||
```
|
||||
=== Multi-Instance OSIRIS Test ===
|
||||
|
||||
Creating OSIRIS instances...
|
||||
✓ Created: freezone
|
||||
✓ Created: my_osiris
|
||||
|
||||
Creating note...
|
||||
Note created: Multi-Instance Test Note
|
||||
|
||||
Storing in freezone...
|
||||
✓ Stored in freezone with ID: c274731c-678d-4f3e-bc4a-22eb70dae698
|
||||
|
||||
Storing in my_osiris...
|
||||
✓ Stored in my_osiris with ID: c274731c-678d-4f3e-bc4a-22eb70dae698
|
||||
|
||||
Retrieving from freezone...
|
||||
✓ Retrieved from freezone: Multi-Instance Test Note
|
||||
|
||||
Retrieving from my_osiris...
|
||||
✓ Retrieved from my_osiris: Multi-Instance Test Note
|
||||
|
||||
Querying freezone...
|
||||
✓ Found in freezone:
|
||||
- c274731c-678d-4f3e-bc4a-22eb70dae698
|
||||
|
||||
Querying my_osiris...
|
||||
✓ Found in my_osiris:
|
||||
- c274731c-678d-4f3e-bc4a-22eb70dae698
|
||||
|
||||
=== Test Complete ===
|
||||
✅ Script completed successfully!
|
||||
```
|
||||
|
||||
## 💡 Best Practices
|
||||
|
||||
### 1. **Use Descriptive Names**
|
||||
```rhai
|
||||
// Good
|
||||
let production = osiris("production", url, 1);
|
||||
let staging = osiris("staging", url, 2);
|
||||
|
||||
// Less clear
|
||||
let db1 = osiris("db1", url, 1);
|
||||
let db2 = osiris("db2", url, 2);
|
||||
```
|
||||
|
||||
### 2. **Centralize Instance Creation**
|
||||
```rhai
|
||||
// Create all instances at the start
|
||||
let freezone = osiris("freezone", "redis://localhost:6379", 1);
|
||||
let my_osiris = osiris("my_osiris", "redis://localhost:6379", 2);
|
||||
|
||||
// Then use them throughout the script
|
||||
freezone.put_note(note1);
|
||||
my_osiris.put_note(note2);
|
||||
```
|
||||
|
||||
### 3. **Handle Errors Per Instance**
|
||||
```rhai
|
||||
// Each instance can fail independently
|
||||
try {
|
||||
freezone.put_note(note);
|
||||
} catch (e) {
|
||||
print(`Freezone error: ${e}`);
|
||||
}
|
||||
|
||||
try {
|
||||
my_osiris.put_note(note);
|
||||
} catch (e) {
|
||||
print(`My OSIRIS error: ${e}`);
|
||||
}
|
||||
```
|
||||
|
||||
### 4. **Use Different Databases**
|
||||
```rhai
|
||||
// Separate databases for isolation
|
||||
let instance1 = osiris("inst1", "redis://localhost:6379", 1);
|
||||
let instance2 = osiris("inst2", "redis://localhost:6379", 2);
|
||||
```
|
||||
|
||||
## 🎉 Success!
|
||||
|
||||
Multi-instance OSIRIS support is **fully operational** and ready for:
|
||||
- ✅ Multi-tenant applications
|
||||
- ✅ Data replication
|
||||
- ✅ Environment separation
|
||||
- ✅ Cross-database operations
|
||||
- ✅ Production use
|
||||
387
PREDEFINED_INSTANCES.md
Normal file
387
PREDEFINED_INSTANCES.md
Normal file
@@ -0,0 +1,387 @@
|
||||
# OSIRIS Predefined Instances ✅
|
||||
|
||||
OSIRIS runner now supports predefined instances that are automatically available in your Rhai scripts without needing to create them manually.
|
||||
|
||||
## 🎉 Status: FULLY OPERATIONAL
|
||||
|
||||
```
|
||||
✅ CLI argument parsing for instances
|
||||
✅ Automatic instance creation
|
||||
✅ Global scope injection
|
||||
✅ Multiple instances support
|
||||
✅ Test script working
|
||||
```
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Define Instances via CLI
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance freezone:redis://localhost:6379:1 \
|
||||
--instance my:redis://localhost:6379:2 \
|
||||
--script-file scripts/predefined_instances.rhai
|
||||
```
|
||||
|
||||
### Use Them Directly in Scripts
|
||||
|
||||
```rhai
|
||||
// No need to create instances - they're already available!
|
||||
freezone.put_note(my_note);
|
||||
my.put_note(my_note);
|
||||
```
|
||||
|
||||
## 📝 Complete Example
|
||||
|
||||
### Command Line
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- test1 \
|
||||
--instance freezone:redis://localhost:6379:1 \
|
||||
--instance my:redis://localhost:6379:2 \
|
||||
--script-file scripts/predefined_instances.rhai
|
||||
```
|
||||
|
||||
### Script (scripts/predefined_instances.rhai)
|
||||
|
||||
```rhai
|
||||
print("=== Predefined Instances Example ===");
|
||||
|
||||
// freezone and my are already available!
|
||||
print(`Using: ${freezone.name()}`);
|
||||
print(`Using: ${my.name()}`);
|
||||
|
||||
// Create a note
|
||||
let my_note = note("notes")
|
||||
.title("Test Note")
|
||||
.content("Using predefined instances!");
|
||||
|
||||
// Use them directly - no setup needed!
|
||||
freezone.put_note(my_note);
|
||||
my.put_note(my_note);
|
||||
|
||||
// Query each instance
|
||||
let ids1 = freezone.query("notes", "title", "Test Note");
|
||||
let ids2 = my.query("notes", "title", "Test Note");
|
||||
```
|
||||
|
||||
### Output
|
||||
|
||||
```
|
||||
🚀 OSIRIS Runner
|
||||
Runner ID: test1
|
||||
HeroDB: redis://localhost:6379 (DB 1)
|
||||
Instance: freezone → redis://localhost:6379 (DB 1)
|
||||
Instance: my → redis://localhost:6379 (DB 2)
|
||||
|
||||
📝 Executing script...
|
||||
|
||||
─────────────────────────────────────
|
||||
=== Predefined Instances Example ===
|
||||
|
||||
Using predefined instance: freezone
|
||||
Using predefined instance: my
|
||||
|
||||
Creating note...
|
||||
Note created: Predefined Instance Test
|
||||
|
||||
Storing in freezone...
|
||||
✓ Stored in freezone: 61ea54fe-504d-4f43-be50-6548a82338dd
|
||||
|
||||
Storing in my...
|
||||
✓ Stored in my: 61ea54fe-504d-4f43-be50-6548a82338dd
|
||||
|
||||
✅ Script completed successfully!
|
||||
```
|
||||
|
||||
## 🎯 CLI Arguments
|
||||
|
||||
### `--instance` (or `-i`)
|
||||
|
||||
Define a predefined instance that will be available in your script.
|
||||
|
||||
**Format:** `name:url:db_id`
|
||||
|
||||
**Examples:**
|
||||
```bash
|
||||
# Single instance
|
||||
--instance freezone:redis://localhost:6379:1
|
||||
|
||||
# Multiple instances
|
||||
--instance freezone:redis://localhost:6379:1 \
|
||||
--instance my:redis://localhost:6379:2 \
|
||||
--instance production:redis://prod.example.com:6379:1
|
||||
|
||||
# Different hosts
|
||||
--instance local:redis://localhost:6379:1 \
|
||||
--instance remote:redis://remote.example.com:6379:1
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` - Instance name (will be available as a variable in scripts)
|
||||
- `url` - HeroDB connection URL (redis://host:port)
|
||||
- `db_id` - Database ID (0-15 typically)
|
||||
|
||||
## 📚 Use Cases
|
||||
|
||||
### 1. **Multi-Tenant Setup**
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance tenant1:redis://localhost:6379:1 \
|
||||
--instance tenant2:redis://localhost:6379:2 \
|
||||
--instance tenant3:redis://localhost:6379:3 \
|
||||
--script-file process_tenants.rhai
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Script automatically has tenant1, tenant2, tenant3 available
|
||||
tenant1.put_note(note1);
|
||||
tenant2.put_note(note2);
|
||||
tenant3.put_note(note3);
|
||||
```
|
||||
|
||||
### 2. **Environment Separation**
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance dev:redis://dev:6379:1 \
|
||||
--instance staging:redis://staging:6379:1 \
|
||||
--instance prod:redis://prod:6379:1 \
|
||||
--script-file deploy.rhai
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Test in dev first
|
||||
dev.put_note(test_note);
|
||||
|
||||
// Then staging
|
||||
staging.put_note(test_note);
|
||||
|
||||
// Finally production
|
||||
prod.put_note(test_note);
|
||||
```
|
||||
|
||||
### 3. **Data Migration**
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance source:redis://old-server:6379:1 \
|
||||
--instance target:redis://new-server:6379:1 \
|
||||
--script-file migrate.rhai
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Migrate data from source to target
|
||||
let ids = source.query("notes", "tags:tag", "migrate=true");
|
||||
for id in ids {
|
||||
let note = source.get_note("notes", id);
|
||||
target.put_note(note);
|
||||
}
|
||||
```
|
||||
|
||||
### 4. **Freezone + Personal OSIRIS**
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance freezone:redis://freezone.io:6379:1 \
|
||||
--instance my:redis://localhost:6379:1 \
|
||||
--script-file sync.rhai
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Your exact use case!
|
||||
let my_note = note("notes")
|
||||
.title("Shared Note")
|
||||
.content("Available in both instances");
|
||||
|
||||
freezone.put_note(my_note);
|
||||
my.put_note(my_note);
|
||||
```
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ CLI Arguments │
|
||||
│ --instance freezone:redis:...:1 │
|
||||
│ --instance my:redis:...:2 │
|
||||
└────────────┬────────────────────────┘
|
||||
│
|
||||
┌────────────▼────────────────────────┐
|
||||
│ OsirisConfig │
|
||||
│ Parse and validate instances │
|
||||
└────────────┬────────────────────────┘
|
||||
│
|
||||
┌────────────▼────────────────────────┐
|
||||
│ Engine + Scope │
|
||||
│ Create instances and inject │
|
||||
│ into Rhai scope as constants │
|
||||
└────────────┬────────────────────────┘
|
||||
│
|
||||
┌────────────▼────────────────────────┐
|
||||
│ Rhai Script │
|
||||
│ freezone.put_note(...) │
|
||||
│ my.put_note(...) │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## ✨ Features
|
||||
|
||||
### 1. **Zero Boilerplate**
|
||||
No need to create instances in scripts - they're already there!
|
||||
|
||||
```rhai
|
||||
// Before (manual creation)
|
||||
let freezone = osiris("freezone", "redis://localhost:6379", 1);
|
||||
let my = osiris("my", "redis://localhost:6379", 2);
|
||||
|
||||
// After (predefined)
|
||||
// Just use them!
|
||||
freezone.put_note(note);
|
||||
my.put_note(note);
|
||||
```
|
||||
|
||||
### 2. **Type Safety**
|
||||
Instances are strongly typed and validated at startup.
|
||||
|
||||
### 3. **Configuration as Code**
|
||||
Instance configuration is explicit in the command line.
|
||||
|
||||
### 4. **Multiple Instances**
|
||||
Support unlimited predefined instances.
|
||||
|
||||
### 5. **Named Access**
|
||||
Access instances by their meaningful names.
|
||||
|
||||
## 🔧 Advanced Usage
|
||||
|
||||
### Combining Predefined and Dynamic Instances
|
||||
|
||||
```bash
|
||||
# Predefined instances
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance freezone:redis://localhost:6379:1 \
|
||||
--instance my:redis://localhost:6379:2 \
|
||||
--script-file script.rhai
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Use predefined instances
|
||||
freezone.put_note(note1);
|
||||
my.put_note(note2);
|
||||
|
||||
// Create additional dynamic instances
|
||||
let temp = osiris("temp", "redis://localhost:6379", 3);
|
||||
temp.put_note(note3);
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
You can use environment variables in your shell:
|
||||
|
||||
```bash
|
||||
export FREEZONE_URL="redis://freezone.io:6379"
|
||||
export MY_URL="redis://localhost:6379"
|
||||
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance freezone:${FREEZONE_URL}:1 \
|
||||
--instance my:${MY_URL}:1 \
|
||||
--script-file script.rhai
|
||||
```
|
||||
|
||||
### Configuration File (Future Enhancement)
|
||||
|
||||
```toml
|
||||
# osiris.toml
|
||||
[instances]
|
||||
freezone = { url = "redis://localhost:6379", db_id = 1 }
|
||||
my = { url = "redis://localhost:6379", db_id = 2 }
|
||||
```
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--config osiris.toml \
|
||||
--script-file script.rhai
|
||||
```
|
||||
|
||||
## 💡 Best Practices
|
||||
|
||||
### 1. **Use Descriptive Names**
|
||||
```bash
|
||||
# Good
|
||||
--instance production:redis://prod:6379:1
|
||||
--instance staging:redis://staging:6379:1
|
||||
|
||||
# Less clear
|
||||
--instance db1:redis://prod:6379:1
|
||||
--instance db2:redis://staging:6379:1
|
||||
```
|
||||
|
||||
### 2. **Consistent Naming**
|
||||
Use the same instance names across all your scripts for consistency.
|
||||
|
||||
### 3. **Document Your Instances**
|
||||
Add comments in your scripts explaining what each instance is for:
|
||||
|
||||
```rhai
|
||||
// freezone: Public shared OSIRIS instance
|
||||
// my: Personal local OSIRIS instance
|
||||
|
||||
freezone.put_note(public_note);
|
||||
my.put_note(private_note);
|
||||
```
|
||||
|
||||
### 4. **Separate Databases**
|
||||
Use different database IDs for different purposes:
|
||||
|
||||
```bash
|
||||
--instance notes:redis://localhost:6379:1 \
|
||||
--instance events:redis://localhost:6379:2 \
|
||||
--instance cache:redis://localhost:6379:3
|
||||
```
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
### Test Script
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- test1 \
|
||||
--instance freezone:redis://localhost:6379:1 \
|
||||
--instance my:redis://localhost:6379:2 \
|
||||
--script-file scripts/predefined_instances.rhai
|
||||
```
|
||||
|
||||
### Expected Output
|
||||
|
||||
```
|
||||
✓ Instance: freezone → redis://localhost:6379 (DB 1)
|
||||
✓ Instance: my → redis://localhost:6379 (DB 2)
|
||||
✓ Stored in freezone: 61ea54fe-504d-4f43-be50-6548a82338dd
|
||||
✓ Stored in my: 61ea54fe-504d-4f43-be50-6548a82338dd
|
||||
✅ Script completed successfully!
|
||||
```
|
||||
|
||||
## 🎉 Success!
|
||||
|
||||
Predefined instances are **fully operational** and ready for:
|
||||
- ✅ Zero-boilerplate scripts
|
||||
- ✅ Multi-tenant systems
|
||||
- ✅ Environment separation
|
||||
- ✅ Data migration
|
||||
- ✅ Freezone + personal OSIRIS
|
||||
- ✅ Production use
|
||||
|
||||
Your exact use case is now supported:
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--instance freezone:redis://freezone.io:6379:1 \
|
||||
--instance my:redis://localhost:6379:1 \
|
||||
--script-file my_script.rhai
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Just use them!
|
||||
freezone.put_note(my_note);
|
||||
my.put_note(my_note);
|
||||
```
|
||||
190
QUICKSTART.md
Normal file
190
QUICKSTART.md
Normal file
@@ -0,0 +1,190 @@
|
||||
# OSIRIS Quick Start Guide
|
||||
|
||||
Get up and running with OSIRIS in 5 minutes!
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Rust toolchain (1.70+)
|
||||
- HeroDB running on localhost:6379
|
||||
|
||||
## Step 1: Start HeroDB
|
||||
|
||||
```bash
|
||||
cd /path/to/herodb
|
||||
cargo run --release -- --dir ./data --admin-secret mysecret --port 6379
|
||||
```
|
||||
|
||||
Keep this terminal open.
|
||||
|
||||
## Step 2: Build OSIRIS
|
||||
|
||||
Open a new terminal:
|
||||
|
||||
```bash
|
||||
cd /path/to/osiris
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
## Step 3: Initialize OSIRIS
|
||||
|
||||
```bash
|
||||
./target/release/osiris init --herodb redis://localhost:6379
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
✓ OSIRIS initialized
|
||||
Config: /Users/you/.config/osiris/config.toml
|
||||
```
|
||||
|
||||
## Step 4: Create Your First Namespace
|
||||
|
||||
```bash
|
||||
./target/release/osiris ns create notes
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
✓ Created namespace 'notes' (DB 1)
|
||||
```
|
||||
|
||||
## Step 5: Add Your First Object
|
||||
|
||||
```bash
|
||||
echo "OSIRIS is awesome!" | \
|
||||
./target/release/osiris put notes/first-note - \
|
||||
--title "My First Note" \
|
||||
--tags topic=osiris,mood=excited
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
✓ Stored notes/first-note
|
||||
```
|
||||
|
||||
## Step 6: Search for Your Object
|
||||
|
||||
```bash
|
||||
./target/release/osiris find "awesome" --ns notes
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
Found 1 result(s):
|
||||
|
||||
1. first-note (score: 0.50)
|
||||
OSIRIS is awesome!
|
||||
```
|
||||
|
||||
## Step 7: Retrieve Your Object
|
||||
|
||||
```bash
|
||||
./target/release/osiris get notes/first-note
|
||||
```
|
||||
|
||||
Output (JSON):
|
||||
```json
|
||||
{
|
||||
"id": "first-note",
|
||||
"ns": "notes",
|
||||
"meta": {
|
||||
"title": "My First Note",
|
||||
"tags": {
|
||||
"mood": "excited",
|
||||
"topic": "osiris"
|
||||
},
|
||||
"created": "2025-10-20T10:30:00Z",
|
||||
"updated": "2025-10-20T10:30:00Z",
|
||||
"size": 18
|
||||
},
|
||||
"text": "OSIRIS is awesome!"
|
||||
}
|
||||
```
|
||||
|
||||
## Step 8: Try More Features
|
||||
|
||||
### Add a note from a file
|
||||
```bash
|
||||
echo "This is a longer note about OSIRIS features" > /tmp/note.txt
|
||||
./target/release/osiris put notes/features /tmp/note.txt \
|
||||
--title "OSIRIS Features" \
|
||||
--tags topic=osiris,type=documentation
|
||||
```
|
||||
|
||||
### Search with filters
|
||||
```bash
|
||||
./target/release/osiris find --ns notes --filter topic=osiris
|
||||
```
|
||||
|
||||
### Get raw content
|
||||
```bash
|
||||
./target/release/osiris get notes/first-note --raw
|
||||
```
|
||||
|
||||
### View statistics
|
||||
```bash
|
||||
./target/release/osiris stats --ns notes
|
||||
```
|
||||
|
||||
### List all namespaces
|
||||
```bash
|
||||
./target/release/osiris ns list
|
||||
```
|
||||
|
||||
## Common Commands Cheat Sheet
|
||||
|
||||
```bash
|
||||
# Initialize
|
||||
osiris init --herodb redis://localhost:6379
|
||||
|
||||
# Namespace management
|
||||
osiris ns create <name>
|
||||
osiris ns list
|
||||
osiris ns delete <name>
|
||||
|
||||
# Object operations
|
||||
osiris put <ns>/<name> <file> [--tags k=v,...] [--title "..."] [--mime "..."]
|
||||
osiris get <ns>/<name> [--raw] [--output file]
|
||||
osiris del <ns>/<name>
|
||||
|
||||
# Search
|
||||
osiris find "<query>" --ns <ns> [--filter k=v,...] [--topk N] [--json]
|
||||
|
||||
# Statistics
|
||||
osiris stats [--ns <ns>]
|
||||
```
|
||||
|
||||
## What's Next?
|
||||
|
||||
- **Read the [Examples](EXAMPLES.md)** for more use cases
|
||||
- **Check the [README](README.md)** for detailed documentation
|
||||
- **Review the [MVP Spec](docs/specs/osiris-mvp.md)** to understand the architecture
|
||||
- **Explore the [source code](src/)** to see how it works
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Connection refused"
|
||||
Make sure HeroDB is running on port 6379:
|
||||
```bash
|
||||
redis-cli -p 6379 PING
|
||||
```
|
||||
|
||||
### "Namespace not found"
|
||||
Create the namespace first:
|
||||
```bash
|
||||
osiris ns create <namespace>
|
||||
```
|
||||
|
||||
### "Config file not found"
|
||||
Run `osiris init` first:
|
||||
```bash
|
||||
osiris init --herodb redis://localhost:6379
|
||||
```
|
||||
|
||||
## Need Help?
|
||||
|
||||
- Check the [EXAMPLES.md](EXAMPLES.md) for detailed usage patterns
|
||||
- Review the [README.md](README.md) for architecture details
|
||||
- Look at the [docs/specs/osiris-mvp.md](docs/specs/osiris-mvp.md) for the full specification
|
||||
|
||||
Happy organizing! 🚀
|
||||
125
README.md
Normal file
125
README.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# OSIRIS
|
||||
|
||||
**Object Storage, Indexing & Retrieval Intelligent System**
|
||||
|
||||
OSIRIS is a Rust-native object storage and retrieval layer built on top of HeroDB, providing structured storage with metadata, field indexing, and search capabilities.
|
||||
|
||||
## Features
|
||||
|
||||
- **Object Storage**: Store structured objects with metadata (title, tags, MIME type, timestamps)
|
||||
- **Namespace Management**: Organize objects into isolated namespaces
|
||||
- **Field Indexing**: Fast filtering by tags and metadata fields
|
||||
- **Text Search**: Simple keyword-based search across object content
|
||||
- **CLI Interface**: Command-line tools for object management and search
|
||||
- **9P Filesystem**: Mount OSIRIS as a filesystem (future)
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Start HeroDB:
|
||||
```bash
|
||||
cd /path/to/herodb
|
||||
cargo run --release -- --dir ./data --admin-secret mysecret --port 6379
|
||||
```
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
cd /path/to/osiris
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
### Initialize
|
||||
|
||||
```bash
|
||||
# Create configuration
|
||||
mkdir -p ~/.config/osiris
|
||||
cat > ~/.config/osiris/config.toml <<EOF
|
||||
[herodb]
|
||||
url = "redis://localhost:6379"
|
||||
|
||||
[namespaces.notes]
|
||||
db_id = 1
|
||||
EOF
|
||||
|
||||
# Initialize OSIRIS
|
||||
./target/release/osiris init --herodb redis://localhost:6379
|
||||
|
||||
# Create a namespace
|
||||
./target/release/osiris ns create notes
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
# Add objects
|
||||
./target/release/osiris put notes/my-note.md ./my-note.md --tags topic=rust,project=osiris
|
||||
|
||||
# Get objects
|
||||
./target/release/osiris get notes/my-note.md
|
||||
|
||||
# Search
|
||||
./target/release/osiris find --ns notes --filter topic=rust
|
||||
./target/release/osiris find "retrieval" --ns notes
|
||||
|
||||
# Delete objects
|
||||
./target/release/osiris del notes/my-note.md
|
||||
|
||||
# Show statistics
|
||||
./target/release/osiris stats --ns notes
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
HeroDB (unmodified)
|
||||
│
|
||||
├── KV store + encryption
|
||||
└── RESP protocol
|
||||
↑
|
||||
│
|
||||
└── OSIRIS
|
||||
├── store/ – object schema + persistence
|
||||
├── index/ – field index & keyword scanning
|
||||
├── retrieve/ – query planner + filtering
|
||||
├── interfaces/ – CLI, 9P
|
||||
└── config/ – namespaces + settings
|
||||
```
|
||||
|
||||
## Data Model
|
||||
|
||||
Objects are stored with metadata:
|
||||
- **ID**: Unique identifier (UUID or user-assigned)
|
||||
- **Namespace**: Logical grouping (e.g., "notes", "calendar")
|
||||
- **Title**: Optional human-readable title
|
||||
- **MIME Type**: Content type
|
||||
- **Tags**: Key-value pairs for categorization
|
||||
- **Timestamps**: Created and updated times
|
||||
- **Text Content**: Optional plain text content
|
||||
|
||||
## Keyspace Design
|
||||
|
||||
```
|
||||
meta:<id> → serialized OsirisObject
|
||||
field:<field>:<val> → Set of IDs (for equality filtering)
|
||||
scan:index → list of IDs for text scan
|
||||
```
|
||||
|
||||
Example:
|
||||
```
|
||||
field:tag:project=osiris → {note_1, note_2}
|
||||
field:mime:text/markdown → {note_1, note_3}
|
||||
```
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- Content-addressable deduplication
|
||||
- Vector embeddings for semantic search
|
||||
- Relation graphs
|
||||
- Full-text search with Tantivy
|
||||
- 9P filesystem interface
|
||||
|
||||
## License
|
||||
|
||||
See LICENSE file.
|
||||
328
RUNNER.md
Normal file
328
RUNNER.md
Normal file
@@ -0,0 +1,328 @@
|
||||
# OSIRIS Runner - Standalone Binary ✅
|
||||
|
||||
The OSIRIS runner is a standalone binary that executes Rhai scripts with full OSIRIS object support.
|
||||
|
||||
## 🎉 Status: FULLY OPERATIONAL
|
||||
|
||||
```
|
||||
✅ Binary created at src/bin/runner/
|
||||
✅ Rhai engine with OSIRIS objects
|
||||
✅ Note and Event support
|
||||
✅ Automatic indexing
|
||||
✅ Query functionality
|
||||
✅ Test scripts working
|
||||
```
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Run a Script File
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 --script-file scripts/test_note.rhai
|
||||
```
|
||||
|
||||
### Run Inline Script
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- runner1 \
|
||||
--script 'let note = note("test").title("Hi"); print(note.get_title());'
|
||||
```
|
||||
|
||||
## 📝 Example Output
|
||||
|
||||
```
|
||||
🚀 OSIRIS Runner
|
||||
Runner ID: test1
|
||||
HeroDB: redis://localhost:6379 (DB 1)
|
||||
|
||||
📝 Executing script...
|
||||
|
||||
─────────────────────────────────────
|
||||
=== OSIRIS Note Test ===
|
||||
|
||||
Creating note...
|
||||
Note created: Test from OSIRIS Runner
|
||||
Storing note...
|
||||
✓ Note stored with ID: 46a064c7-9062-4858-a390-c11f0d5877a7
|
||||
|
||||
Retrieving note...
|
||||
✓ Retrieved: Test from OSIRIS Runner
|
||||
Content: This note was created using the OSIRIS standalone runner!
|
||||
|
||||
Querying notes by tag...
|
||||
✓ Found notes:
|
||||
- 2c54e1ec-bed9-41ea-851c-f7313abbd7cd
|
||||
- 392f3f7f-47e7-444f-ba11-db38d74b12af
|
||||
- 46a064c7-9062-4858-a390-c11f0d5877a7
|
||||
=== Test Complete ===
|
||||
─────────────────────────────────────
|
||||
|
||||
✅ Script completed successfully!
|
||||
```
|
||||
|
||||
## 📖 Usage
|
||||
|
||||
### Command Line Options
|
||||
|
||||
```
|
||||
OSIRIS Rhai Script Runner
|
||||
|
||||
Usage: runner [OPTIONS] <RUNNER_ID>
|
||||
|
||||
Arguments:
|
||||
<RUNNER_ID> Runner ID
|
||||
|
||||
Options:
|
||||
-r, --redis-url <REDIS_URL>
|
||||
HeroDB URL [default: redis://localhost:6379]
|
||||
-d, --db-id <DB_ID>
|
||||
HeroDB database ID [default: 1]
|
||||
-s, --script <SCRIPT>
|
||||
Script to execute in single-job mode (optional)
|
||||
-f, --script-file <SCRIPT_FILE>
|
||||
Script file to execute
|
||||
-h, --help
|
||||
Print help
|
||||
-V, --version
|
||||
Print version
|
||||
```
|
||||
|
||||
### Script Examples
|
||||
|
||||
#### Create and Store a Note
|
||||
|
||||
```rhai
|
||||
// scripts/test_note.rhai
|
||||
let note = note("notes")
|
||||
.title("My Note")
|
||||
.content("This is the content")
|
||||
.tag("project", "osiris")
|
||||
.tag("priority", "high");
|
||||
|
||||
let id = put_note(note);
|
||||
print(`Stored: ${id}`);
|
||||
|
||||
let retrieved = get_note("notes", id);
|
||||
print(`Title: ${retrieved.get_title()}`);
|
||||
```
|
||||
|
||||
#### Create and Store an Event
|
||||
|
||||
```rhai
|
||||
// scripts/test_event.rhai
|
||||
let event = event("calendar", "Team Meeting")
|
||||
.description("Weekly sync")
|
||||
.location("Conference Room A")
|
||||
.category("meetings");
|
||||
|
||||
let id = put_event(event);
|
||||
print(`Event stored: ${id}`);
|
||||
```
|
||||
|
||||
#### Query by Index
|
||||
|
||||
```rhai
|
||||
let ids = query("notes", "tags:tag", "project=osiris");
|
||||
print("Found notes:");
|
||||
for id in ids {
|
||||
let note = get_note("notes", id);
|
||||
print(` - ${note.get_title()}`);
|
||||
}
|
||||
```
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ OSIRIS Runner Binary │
|
||||
│ (osiris/src/bin/runner/) │
|
||||
├─────────────────────────────────────┤
|
||||
│ ├── main.rs │
|
||||
│ │ - CLI argument parsing │
|
||||
│ │ - Script loading │
|
||||
│ │ - Execution orchestration │
|
||||
│ └── engine.rs │
|
||||
│ - Engine factory │
|
||||
│ - OSIRIS integration │
|
||||
└────────────┬────────────────────────┘
|
||||
│
|
||||
┌────────────▼────────────────────────┐
|
||||
│ OSIRIS Rhai Support │
|
||||
│ (osiris/src/rhai_support/) │
|
||||
├─────────────────────────────────────┤
|
||||
│ ├── note_rhai.rs │
|
||||
│ │ - Note CustomType │
|
||||
│ │ - Builder methods │
|
||||
│ ├── event_rhai.rs │
|
||||
│ │ - Event CustomType │
|
||||
│ │ - Builder methods │
|
||||
│ └── engine.rs │
|
||||
│ - OsirisRhaiEngine │
|
||||
│ - Async → Sync bridge │
|
||||
└────────────┬────────────────────────┘
|
||||
│
|
||||
┌────────────▼────────────────────────┐
|
||||
│ OSIRIS Core │
|
||||
│ (osiris/src/) │
|
||||
├─────────────────────────────────────┤
|
||||
│ ├── objects/ │
|
||||
│ │ - Note, Event │
|
||||
│ │ - #[derive(DeriveObject)] │
|
||||
│ ├── store/ │
|
||||
│ │ - GenericStore │
|
||||
│ │ - Automatic indexing │
|
||||
│ └── index/ │
|
||||
│ - FieldIndex │
|
||||
└────────────┬────────────────────────┘
|
||||
│
|
||||
┌────────────▼────────────────────────┐
|
||||
│ HeroDB │
|
||||
│ (Redis-compatible storage) │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## 🎯 Features
|
||||
|
||||
### 1. **Fluent Builder Pattern**
|
||||
```rhai
|
||||
let note = note("ns")
|
||||
.title("Title")
|
||||
.content("Content")
|
||||
.tag("key", "value");
|
||||
```
|
||||
|
||||
### 2. **Automatic Indexing**
|
||||
Fields marked with `#[index]` are automatically indexed:
|
||||
```rust
|
||||
#[derive(DeriveObject)]
|
||||
pub struct Note {
|
||||
pub base_data: BaseData,
|
||||
|
||||
#[index]
|
||||
pub title: Option<String>, // Indexed!
|
||||
|
||||
pub content: Option<String>, // Not indexed
|
||||
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>, // Indexed!
|
||||
}
|
||||
```
|
||||
|
||||
### 3. **Type-Safe Operations**
|
||||
- Compile-time type checking
|
||||
- Runtime validation
|
||||
- Clear error messages
|
||||
|
||||
### 4. **Query Support**
|
||||
```rhai
|
||||
// Query by any indexed field
|
||||
let ids = query("namespace", "field_name", "value");
|
||||
|
||||
// Query by tag
|
||||
let ids = query("notes", "tags:tag", "project=osiris");
|
||||
|
||||
// Query by title
|
||||
let ids = query("notes", "title", "My Note");
|
||||
```
|
||||
|
||||
## 📚 Available Functions
|
||||
|
||||
### Note API
|
||||
|
||||
| Function | Description |
|
||||
|----------|-------------|
|
||||
| `note(ns)` | Create new note |
|
||||
| `.title(s)` | Set title (chainable) |
|
||||
| `.content(s)` | Set content (chainable) |
|
||||
| `.tag(k, v)` | Add tag (chainable) |
|
||||
| `.mime(s)` | Set MIME type (chainable) |
|
||||
| `put_note(note)` | Store note, returns ID |
|
||||
| `get_note(ns, id)` | Retrieve note by ID |
|
||||
| `.get_id()` | Get note ID |
|
||||
| `.get_title()` | Get note title |
|
||||
| `.get_content()` | Get note content |
|
||||
| `.to_json()` | Serialize to JSON |
|
||||
|
||||
### Event API
|
||||
|
||||
| Function | Description |
|
||||
|----------|-------------|
|
||||
| `event(ns, title)` | Create new event |
|
||||
| `.description(s)` | Set description (chainable) |
|
||||
| `.location(s)` | Set location (chainable) |
|
||||
| `.category(s)` | Set category (chainable) |
|
||||
| `.all_day(b)` | Set all-day flag (chainable) |
|
||||
| `put_event(event)` | Store event, returns ID |
|
||||
| `get_event(ns, id)` | Retrieve event by ID |
|
||||
| `.get_id()` | Get event ID |
|
||||
| `.get_title()` | Get event title |
|
||||
| `.to_json()` | Serialize to JSON |
|
||||
|
||||
### Query API
|
||||
|
||||
| Function | Description |
|
||||
|----------|-------------|
|
||||
| `query(ns, field, value)` | Query by indexed field, returns array of IDs |
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
### Test Scripts Included
|
||||
|
||||
1. **`scripts/test_note.rhai`** - Complete note workflow
|
||||
2. **`scripts/test_event.rhai`** - Complete event workflow
|
||||
|
||||
### Run Tests
|
||||
|
||||
```bash
|
||||
# Test notes
|
||||
cargo run --bin runner --features rhai-support -- test1 --script-file scripts/test_note.rhai
|
||||
|
||||
# Test events
|
||||
cargo run --bin runner --features rhai-support -- test1 --script-file scripts/test_event.rhai
|
||||
```
|
||||
|
||||
## 🔧 Building
|
||||
|
||||
### Development Build
|
||||
|
||||
```bash
|
||||
cargo build --bin runner --features rhai-support
|
||||
```
|
||||
|
||||
### Release Build
|
||||
|
||||
```bash
|
||||
cargo build --bin runner --features rhai-support --release
|
||||
```
|
||||
|
||||
### Run Without Cargo
|
||||
|
||||
```bash
|
||||
# After building
|
||||
./target/release/runner runner1 --script-file scripts/test_note.rhai
|
||||
```
|
||||
|
||||
## 📦 Integration with runner_rust
|
||||
|
||||
The OSIRIS runner can also be integrated into the runner_rust infrastructure for distributed task execution. See `runner_rust/src/engine/osiris.rs` for the integration.
|
||||
|
||||
## ✅ Verification
|
||||
|
||||
Run this to verify everything works:
|
||||
|
||||
```bash
|
||||
cargo run --bin runner --features rhai-support -- test1 --script-file scripts/test_note.rhai
|
||||
```
|
||||
|
||||
Expected output:
|
||||
```
|
||||
✅ Script completed successfully!
|
||||
```
|
||||
|
||||
## 🎉 Success!
|
||||
|
||||
The OSIRIS runner is **fully operational** and ready for:
|
||||
- ✅ Standalone script execution
|
||||
- ✅ Integration with runner_rust
|
||||
- ✅ Production use
|
||||
- ✅ Custom object types (via derive macro)
|
||||
317
STRUCTURE.md
Normal file
317
STRUCTURE.md
Normal file
@@ -0,0 +1,317 @@
|
||||
# OSIRIS Code Structure
|
||||
|
||||
## 📁 Directory Organization
|
||||
|
||||
### Objects (`src/objects/`)
|
||||
|
||||
Each OSIRIS object has its own directory with:
|
||||
- `mod.rs` - Object definition and core logic
|
||||
- `rhai.rs` - Rhai integration (CustomType, builder API)
|
||||
|
||||
```
|
||||
src/objects/
|
||||
├── mod.rs # Module exports
|
||||
├── note/
|
||||
│ ├── mod.rs # Note object definition
|
||||
│ └── rhai.rs # Note Rhai integration
|
||||
└── event/
|
||||
├── mod.rs # Event object definition
|
||||
└── rhai.rs # Event Rhai integration
|
||||
```
|
||||
|
||||
### Rhai Support (`src/rhai_support/`)
|
||||
|
||||
Core Rhai infrastructure (not object-specific):
|
||||
- `instance.rs` - OsirisInstance type for multi-instance support
|
||||
- `mod.rs` - Module exports and re-exports
|
||||
|
||||
```
|
||||
src/rhai_support/
|
||||
├── mod.rs # Re-exports from object modules
|
||||
└── instance.rs # OsirisInstance implementation
|
||||
```
|
||||
|
||||
### Runner (`src/bin/runner/`)
|
||||
|
||||
Standalone binary for running OSIRIS scripts:
|
||||
- `main.rs` - CLI and script execution
|
||||
- `engine.rs` - Engine factory with instance configuration
|
||||
|
||||
```
|
||||
src/bin/runner/
|
||||
├── main.rs # CLI interface
|
||||
└── engine.rs # Engine configuration
|
||||
```
|
||||
|
||||
### Core (`src/`)
|
||||
|
||||
Core OSIRIS functionality:
|
||||
- `store/` - GenericStore, HeroDbClient, BaseData
|
||||
- `index/` - Field indexing
|
||||
- `error/` - Error types
|
||||
- `config/` - Configuration
|
||||
- `interfaces/` - Traits and interfaces
|
||||
- `retrieve/` - Retrieval logic
|
||||
|
||||
## 🎯 Design Principles
|
||||
|
||||
### 1. **Co-location**
|
||||
Object-specific code lives with the object:
|
||||
```
|
||||
objects/note/
|
||||
├── mod.rs # Note struct, impl
|
||||
└── rhai.rs # Note Rhai support
|
||||
```
|
||||
|
||||
### 2. **Separation of Concerns**
|
||||
- **Objects** - Domain models and business logic
|
||||
- **Rhai Support** - Scripting integration
|
||||
- **Store** - Persistence layer
|
||||
- **Runner** - Execution environment
|
||||
|
||||
### 3. **Feature Gating**
|
||||
Rhai support is optional via `rhai-support` feature:
|
||||
```rust
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai;
|
||||
```
|
||||
|
||||
### 4. **Clean Exports**
|
||||
Clear module boundaries with re-exports:
|
||||
```rust
|
||||
// src/objects/mod.rs
|
||||
pub mod note;
|
||||
pub use note::Note;
|
||||
|
||||
// src/rhai_support/mod.rs
|
||||
pub use crate::objects::note::rhai::register_note_api;
|
||||
```
|
||||
|
||||
## 📝 Adding a New Object
|
||||
|
||||
### 1. Create Object Directory
|
||||
```bash
|
||||
mkdir -p src/objects/task
|
||||
```
|
||||
|
||||
### 2. Create Object Definition (`mod.rs`)
|
||||
```rust
|
||||
use crate::store::BaseData;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, crate::DeriveObject)]
|
||||
pub struct Task {
|
||||
pub base_data: BaseData,
|
||||
|
||||
#[index]
|
||||
pub title: String,
|
||||
|
||||
pub completed: bool,
|
||||
}
|
||||
|
||||
impl Task {
|
||||
pub fn new(ns: String) -> Self {
|
||||
Self {
|
||||
base_data: BaseData::new(ns),
|
||||
title: String::new(),
|
||||
completed: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Create Rhai Integration (`rhai.rs`)
|
||||
```rust
|
||||
use crate::objects::Task;
|
||||
use rhai::{CustomType, Engine, TypeBuilder};
|
||||
|
||||
impl CustomType for Task {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("Task")
|
||||
.with_fn("new", |ns: String| Task::new(ns))
|
||||
.with_fn("set_title", |task: &mut Task, title: String| {
|
||||
task.title = title;
|
||||
task.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_completed", |task: &mut Task, completed: bool| {
|
||||
task.completed = completed;
|
||||
task.base_data.update_modified();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_task_api(engine: &mut Engine) {
|
||||
engine.build_type::<Task>();
|
||||
|
||||
// Builder pattern
|
||||
engine.register_fn("task", |ns: String| Task::new(ns));
|
||||
engine.register_fn("title", |mut task: Task, title: String| {
|
||||
task.title = title;
|
||||
task.base_data.update_modified();
|
||||
task
|
||||
});
|
||||
engine.register_fn("completed", |mut task: Task, completed: bool| {
|
||||
task.completed = completed;
|
||||
task.base_data.update_modified();
|
||||
task
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Update Module Exports
|
||||
```rust
|
||||
// src/objects/mod.rs
|
||||
pub mod task;
|
||||
pub use task::Task;
|
||||
|
||||
// src/rhai_support/mod.rs
|
||||
pub use crate::objects::task::rhai::register_task_api;
|
||||
```
|
||||
|
||||
### 5. Register in Engine
|
||||
```rust
|
||||
// src/bin/runner/engine.rs
|
||||
use osiris::rhai_support::register_task_api;
|
||||
|
||||
register_task_api(&mut engine);
|
||||
```
|
||||
|
||||
### 6. Use in Scripts
|
||||
```rhai
|
||||
let my_task = task("tasks")
|
||||
.title("Complete OSIRIS integration")
|
||||
.completed(true);
|
||||
|
||||
my_instance.put_task(my_task);
|
||||
```
|
||||
|
||||
## 🔍 File Responsibilities
|
||||
|
||||
### Object `mod.rs`
|
||||
- Struct definition with `#[derive(DeriveObject)]`
|
||||
- Index fields marked with `#[index]`
|
||||
- Constructor methods (`new`, `with_id`)
|
||||
- Business logic methods
|
||||
- Builder pattern methods (optional)
|
||||
|
||||
### Object `rhai.rs`
|
||||
- `CustomType` implementation
|
||||
- `register_*_api` function
|
||||
- Rhai-specific builder methods
|
||||
- Type conversions for Rhai
|
||||
|
||||
### `rhai_support/instance.rs`
|
||||
- `OsirisInstance` struct
|
||||
- Multi-instance support
|
||||
- CRUD operations per instance
|
||||
- Async → Sync bridge
|
||||
|
||||
### `bin/runner/engine.rs`
|
||||
- `OsirisConfig` for instance configuration
|
||||
- `create_osiris_engine_with_config` function
|
||||
- Engine setup and registration
|
||||
|
||||
### `bin/runner/main.rs`
|
||||
- CLI argument parsing
|
||||
- Instance configuration from CLI
|
||||
- Script loading and execution
|
||||
|
||||
## 🎨 Benefits of This Structure
|
||||
|
||||
### 1. **Discoverability**
|
||||
All code for an object is in one place:
|
||||
```
|
||||
objects/note/
|
||||
├── mod.rs # "What is a Note?"
|
||||
└── rhai.rs # "How do I use Note in Rhai?"
|
||||
```
|
||||
|
||||
### 2. **Maintainability**
|
||||
Changes to an object are localized:
|
||||
- Add a field → Update `mod.rs` and `rhai.rs`
|
||||
- No hunting through multiple directories
|
||||
|
||||
### 3. **Scalability**
|
||||
Easy to add new objects:
|
||||
- Create directory
|
||||
- Add two files
|
||||
- Update exports
|
||||
- Done!
|
||||
|
||||
### 4. **Testability**
|
||||
Each object can have its own tests:
|
||||
```
|
||||
objects/note/
|
||||
├── mod.rs
|
||||
├── rhai.rs
|
||||
└── tests.rs
|
||||
```
|
||||
|
||||
### 5. **Clear Dependencies**
|
||||
```
|
||||
Objects (domain) → Independent
|
||||
↓
|
||||
Rhai Support → Depends on Objects
|
||||
↓
|
||||
Runner → Depends on Rhai Support
|
||||
```
|
||||
|
||||
## 📊 Module Graph
|
||||
|
||||
```
|
||||
osiris (lib)
|
||||
├── objects/
|
||||
│ ├── note/
|
||||
│ │ ├── mod.rs (Note struct)
|
||||
│ │ └── rhai.rs (Note Rhai)
|
||||
│ └── event/
|
||||
│ ├── mod.rs (Event struct)
|
||||
│ └── rhai.rs (Event Rhai)
|
||||
├── rhai_support/
|
||||
│ ├── instance.rs (OsirisInstance)
|
||||
│ └── mod.rs (re-exports)
|
||||
├── store/ (GenericStore, BaseData)
|
||||
├── index/ (FieldIndex)
|
||||
└── error/ (Error types)
|
||||
|
||||
runner (bin)
|
||||
├── main.rs (CLI)
|
||||
└── engine.rs (Engine factory)
|
||||
```
|
||||
|
||||
## ✅ Summary
|
||||
|
||||
**Old Structure:**
|
||||
```
|
||||
src/rhai_support/
|
||||
├── note_rhai.rs
|
||||
├── event_rhai.rs
|
||||
├── engine.rs
|
||||
└── instance.rs
|
||||
```
|
||||
|
||||
**New Structure:**
|
||||
```
|
||||
src/objects/
|
||||
├── note/
|
||||
│ ├── mod.rs
|
||||
│ └── rhai.rs
|
||||
└── event/
|
||||
├── mod.rs
|
||||
└── rhai.rs
|
||||
|
||||
src/rhai_support/
|
||||
├── instance.rs
|
||||
└── mod.rs (re-exports only)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- ✅ Better organization
|
||||
- ✅ Co-located code
|
||||
- ✅ Easier to find things
|
||||
- ✅ Cleaner separation
|
||||
- ✅ Scalable structure
|
||||
426
docs/ARCHITECTURE.md
Normal file
426
docs/ARCHITECTURE.md
Normal file
@@ -0,0 +1,426 @@
|
||||
# OSIRIS Architecture - Trait-Based Generic Objects
|
||||
|
||||
## Overview
|
||||
|
||||
OSIRIS has been refactored to use a trait-based architecture similar to heromodels, allowing any object implementing the `Object` trait to be stored and indexed automatically based on field attributes.
|
||||
|
||||
## Core Concepts
|
||||
|
||||
### 1. BaseData
|
||||
|
||||
Every OSIRIS object must include `BaseData`, which provides:
|
||||
- **id**: Unique identifier (UUID or user-assigned)
|
||||
- **ns**: Namespace the object belongs to
|
||||
- **created_at**: Creation timestamp
|
||||
- **modified_at**: Last modification timestamp
|
||||
- **mime**: Optional MIME type
|
||||
- **size**: Optional content size
|
||||
|
||||
```rust
|
||||
pub struct BaseData {
|
||||
pub id: String,
|
||||
pub ns: String,
|
||||
pub created_at: OffsetDateTime,
|
||||
pub modified_at: OffsetDateTime,
|
||||
pub mime: Option<String>,
|
||||
pub size: Option<u64>,
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Object Trait
|
||||
|
||||
The `Object` trait is the core abstraction for all OSIRIS objects:
|
||||
|
||||
```rust
|
||||
pub trait Object: Debug + Clone + Serialize + Deserialize + Send + Sync {
|
||||
/// Get the object type name
|
||||
fn object_type() -> &'static str where Self: Sized;
|
||||
|
||||
/// Get base data reference
|
||||
fn base_data(&self) -> &BaseData;
|
||||
|
||||
/// Get mutable base data reference
|
||||
fn base_data_mut(&mut self) -> &mut BaseData;
|
||||
|
||||
/// Get index keys for this object (auto-generated from #[index] fields)
|
||||
fn index_keys(&self) -> Vec<IndexKey>;
|
||||
|
||||
/// Get list of indexed field names
|
||||
fn indexed_fields() -> Vec<&'static str> where Self: Sized;
|
||||
|
||||
/// Get searchable text content
|
||||
fn searchable_text(&self) -> Option<String>;
|
||||
|
||||
/// Serialize to JSON
|
||||
fn to_json(&self) -> Result<String>;
|
||||
|
||||
/// Deserialize from JSON
|
||||
fn from_json(json: &str) -> Result<Self> where Self: Sized;
|
||||
}
|
||||
```
|
||||
|
||||
### 3. IndexKey
|
||||
|
||||
Represents an index entry for a field:
|
||||
|
||||
```rust
|
||||
pub struct IndexKey {
|
||||
pub name: &'static str, // Field name
|
||||
pub value: String, // Field value
|
||||
}
|
||||
```
|
||||
|
||||
## Example: Note Object
|
||||
|
||||
```rust
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Note {
|
||||
pub base_data: BaseData,
|
||||
|
||||
// Indexed field - marked with #[index]
|
||||
#[index]
|
||||
pub title: Option<String>,
|
||||
|
||||
// Searchable content (not indexed)
|
||||
pub content: Option<String>,
|
||||
|
||||
// Indexed tags - marked with #[index]
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
impl Object for Note {
|
||||
fn object_type() -> &'static str {
|
||||
"note"
|
||||
}
|
||||
|
||||
fn base_data(&self) -> &BaseData {
|
||||
&self.base_data
|
||||
}
|
||||
|
||||
fn base_data_mut(&mut self) -> &mut BaseData {
|
||||
&mut self.base_data
|
||||
}
|
||||
|
||||
fn index_keys(&self) -> Vec<IndexKey> {
|
||||
let mut keys = Vec::new();
|
||||
|
||||
// Index title
|
||||
if let Some(title) = &self.title {
|
||||
keys.push(IndexKey::new("title", title));
|
||||
}
|
||||
|
||||
// Index tags
|
||||
for (key, value) in &self.tags {
|
||||
keys.push(IndexKey::new(&format!("tag:{}", key), value));
|
||||
}
|
||||
|
||||
keys
|
||||
}
|
||||
|
||||
fn indexed_fields() -> Vec<&'static str> {
|
||||
vec!["title", "tags"]
|
||||
}
|
||||
|
||||
fn searchable_text(&self) -> Option<String> {
|
||||
let mut text = String::new();
|
||||
if let Some(title) = &self.title {
|
||||
text.push_str(title);
|
||||
text.push(' ');
|
||||
}
|
||||
if let Some(content) = &self.content {
|
||||
text.push_str(content);
|
||||
}
|
||||
if text.is_empty() { None } else { Some(text) }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Example: Event Object
|
||||
|
||||
```rust
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Event {
|
||||
pub base_data: BaseData,
|
||||
|
||||
#[index]
|
||||
pub title: String,
|
||||
|
||||
pub description: Option<String>,
|
||||
|
||||
#[index]
|
||||
pub start_time: OffsetDateTime,
|
||||
|
||||
pub end_time: OffsetDateTime,
|
||||
|
||||
#[index]
|
||||
pub location: Option<String>,
|
||||
|
||||
#[index]
|
||||
pub status: EventStatus,
|
||||
|
||||
pub all_day: bool,
|
||||
|
||||
#[index]
|
||||
pub category: Option<String>,
|
||||
}
|
||||
|
||||
impl Object for Event {
|
||||
fn object_type() -> &'static str {
|
||||
"event"
|
||||
}
|
||||
|
||||
fn base_data(&self) -> &BaseData {
|
||||
&self.base_data
|
||||
}
|
||||
|
||||
fn base_data_mut(&mut self) -> &mut BaseData {
|
||||
&mut self.base_data
|
||||
}
|
||||
|
||||
fn index_keys(&self) -> Vec<IndexKey> {
|
||||
let mut keys = Vec::new();
|
||||
|
||||
keys.push(IndexKey::new("title", &self.title));
|
||||
|
||||
if let Some(location) = &self.location {
|
||||
keys.push(IndexKey::new("location", location));
|
||||
}
|
||||
|
||||
let status_str = match self.status {
|
||||
EventStatus::Draft => "draft",
|
||||
EventStatus::Published => "published",
|
||||
EventStatus::Cancelled => "cancelled",
|
||||
};
|
||||
keys.push(IndexKey::new("status", status_str));
|
||||
|
||||
if let Some(category) = &self.category {
|
||||
keys.push(IndexKey::new("category", category));
|
||||
}
|
||||
|
||||
// Index by date for day-based queries
|
||||
let date_str = self.start_time.date().to_string();
|
||||
keys.push(IndexKey::new("date", date_str));
|
||||
|
||||
keys
|
||||
}
|
||||
|
||||
fn indexed_fields() -> Vec<&'static str> {
|
||||
vec!["title", "location", "status", "category", "start_time"]
|
||||
}
|
||||
|
||||
fn searchable_text(&self) -> Option<String> {
|
||||
let mut text = String::new();
|
||||
text.push_str(&self.title);
|
||||
text.push(' ');
|
||||
if let Some(description) = &self.description {
|
||||
text.push_str(description);
|
||||
}
|
||||
Some(text)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Storage Layer
|
||||
|
||||
### GenericStore
|
||||
|
||||
The `GenericStore` provides a type-safe storage layer for any object implementing `Object`:
|
||||
|
||||
```rust
|
||||
pub struct GenericStore {
|
||||
client: HeroDbClient,
|
||||
index: FieldIndex,
|
||||
}
|
||||
|
||||
impl GenericStore {
|
||||
/// Store an object
|
||||
pub async fn put<T: Object>(&self, obj: &T) -> Result<()>;
|
||||
|
||||
/// Get an object by ID
|
||||
pub async fn get<T: Object>(&self, ns: &str, id: &str) -> Result<T>;
|
||||
|
||||
/// Delete an object
|
||||
pub async fn delete<T: Object>(&self, obj: &T) -> Result<bool>;
|
||||
|
||||
/// Get IDs matching an index key
|
||||
pub async fn get_ids_by_index(&self, ns: &str, field: &str, value: &str) -> Result<Vec<String>>;
|
||||
}
|
||||
```
|
||||
|
||||
### Usage Example
|
||||
|
||||
```rust
|
||||
use osiris::objects::Note;
|
||||
use osiris::store::{GenericStore, HeroDbClient};
|
||||
|
||||
// Create store
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1)?;
|
||||
let store = GenericStore::new(client);
|
||||
|
||||
// Create and store a note
|
||||
let note = Note::new("notes".to_string())
|
||||
.set_title("My Note")
|
||||
.set_content("This is the content")
|
||||
.add_tag("topic", "rust")
|
||||
.add_tag("priority", "high");
|
||||
|
||||
store.put(¬e).await?;
|
||||
|
||||
// Retrieve the note
|
||||
let retrieved: Note = store.get("notes", ¬e.id()).await?;
|
||||
|
||||
// Search by index
|
||||
let ids = store.get_ids_by_index("notes", "tag:topic", "rust").await?;
|
||||
```
|
||||
|
||||
## Index Storage
|
||||
|
||||
### Keyspace Design
|
||||
|
||||
```
|
||||
obj:<ns>:<id> → JSON serialized object
|
||||
idx:<ns>:<field>:<value> → Set of object IDs
|
||||
scan:<ns> → Set of all object IDs in namespace
|
||||
```
|
||||
|
||||
### Examples
|
||||
|
||||
```
|
||||
obj:notes:abc123 → {"base_data":{...},"title":"My Note",...}
|
||||
idx:notes:title:My Note → {abc123, def456}
|
||||
idx:notes:tag:topic:rust → {abc123, xyz789}
|
||||
idx:notes:mime:text/plain → {abc123}
|
||||
scan:notes → {abc123, def456, xyz789}
|
||||
```
|
||||
|
||||
## Automatic Indexing
|
||||
|
||||
When an object is stored:
|
||||
|
||||
1. **Serialize** the object to JSON
|
||||
2. **Store** at `obj:<ns>:<id>`
|
||||
3. **Generate index keys** by calling `obj.index_keys()`
|
||||
4. **Create indexes** for each key at `idx:<ns>:<field>:<value>`
|
||||
5. **Add to scan index** at `scan:<ns>`
|
||||
|
||||
When an object is deleted:
|
||||
|
||||
1. **Retrieve** the object
|
||||
2. **Generate index keys**
|
||||
3. **Remove** from all indexes
|
||||
4. **Delete** the object
|
||||
|
||||
## Comparison with heromodels
|
||||
|
||||
| Feature | heromodels | OSIRIS |
|
||||
|---------|-----------|--------|
|
||||
| Base struct | `BaseModelData` | `BaseData` |
|
||||
| Core trait | `Model` | `Object` |
|
||||
| ID type | `u32` (auto-increment) | `String` (UUID) |
|
||||
| Timestamps | `i64` (Unix) | `OffsetDateTime` |
|
||||
| Index macro | `#[index]` (derive) | Manual `index_keys()` |
|
||||
| Storage | OurDB/Postgres | HeroDB (Redis) |
|
||||
| Serialization | CBOR/JSON | JSON |
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### 1. Derive Macro for #[index]
|
||||
|
||||
Create a proc macro to automatically generate `index_keys()` from field attributes:
|
||||
|
||||
```rust
|
||||
#[derive(Object)]
|
||||
pub struct Note {
|
||||
pub base_data: BaseData,
|
||||
|
||||
#[index]
|
||||
pub title: Option<String>,
|
||||
|
||||
pub content: Option<String>,
|
||||
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Query Builder
|
||||
|
||||
Type-safe query builder for indexed fields:
|
||||
|
||||
```rust
|
||||
let results = store
|
||||
.query::<Note>("notes")
|
||||
.filter("tag:topic", "rust")
|
||||
.filter("tag:priority", "high")
|
||||
.limit(10)
|
||||
.execute()
|
||||
.await?;
|
||||
```
|
||||
|
||||
### 3. Relations
|
||||
|
||||
Support for typed relations between objects:
|
||||
|
||||
```rust
|
||||
pub struct Note {
|
||||
pub base_data: BaseData,
|
||||
pub title: String,
|
||||
|
||||
#[relation(target = "Note", label = "references")]
|
||||
pub references: Vec<String>,
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Validation
|
||||
|
||||
Trait-based validation:
|
||||
|
||||
```rust
|
||||
pub trait Validate {
|
||||
fn validate(&self) -> Result<()>;
|
||||
}
|
||||
|
||||
impl Validate for Note {
|
||||
fn validate(&self) -> Result<()> {
|
||||
if self.title.is_none() {
|
||||
return Err(Error::InvalidInput("Title required".into()));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Migration from Old API
|
||||
|
||||
The old `OsirisObject` API is still available for backwards compatibility:
|
||||
|
||||
```rust
|
||||
// Old API (still works)
|
||||
use osiris::store::OsirisObject;
|
||||
let obj = OsirisObject::new("notes".to_string(), Some("text".to_string()));
|
||||
|
||||
// New API (recommended)
|
||||
use osiris::objects::Note;
|
||||
let note = Note::new("notes".to_string())
|
||||
.set_title("Title")
|
||||
.set_content("text");
|
||||
```
|
||||
|
||||
## Benefits of Trait-Based Architecture
|
||||
|
||||
1. **Type Safety**: Compile-time guarantees for object types
|
||||
2. **Extensibility**: Easy to add new object types
|
||||
3. **Automatic Indexing**: Index keys generated from object structure
|
||||
4. **Consistency**: Same pattern as heromodels
|
||||
5. **Flexibility**: Each object type controls its own indexing logic
|
||||
6. **Testability**: Easy to mock and test individual object types
|
||||
|
||||
## Summary
|
||||
|
||||
The trait-based architecture makes OSIRIS:
|
||||
- **More flexible**: Any type can be stored by implementing `Object`
|
||||
- **More consistent**: Follows heromodels patterns
|
||||
- **More powerful**: Automatic indexing based on object structure
|
||||
- **More maintainable**: Clear separation of concerns
|
||||
- **More extensible**: Easy to add new object types and features
|
||||
195
docs/DERIVE_MACRO.md
Normal file
195
docs/DERIVE_MACRO.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# OSIRIS Derive Macro
|
||||
|
||||
The `#[derive(DeriveObject)]` macro automatically implements the `Object` trait for your structs, generating index keys based on fields marked with `#[index]`.
|
||||
|
||||
## Usage
|
||||
|
||||
```rust
|
||||
use osiris::{BaseData, DeriveObject};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, DeriveObject)]
|
||||
pub struct Note {
|
||||
pub base_data: BaseData,
|
||||
|
||||
#[index]
|
||||
pub title: Option<String>,
|
||||
|
||||
pub content: Option<String>,
|
||||
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
}
|
||||
```
|
||||
|
||||
## What Gets Generated
|
||||
|
||||
The derive macro automatically implements:
|
||||
|
||||
1. **`object_type()`** - Returns the struct name as a string
|
||||
2. **`base_data()`** - Returns a reference to `base_data`
|
||||
3. **`base_data_mut()`** - Returns a mutable reference to `base_data`
|
||||
4. **`index_keys()`** - Generates index keys for all `#[index]` fields
|
||||
5. **`indexed_fields()`** - Returns a list of indexed field names
|
||||
|
||||
## Supported Field Types
|
||||
|
||||
### Option<T>
|
||||
```rust
|
||||
#[index]
|
||||
pub title: Option<String>,
|
||||
```
|
||||
Generates: `IndexKey { name: "title", value: <string_value> }` (only if Some)
|
||||
|
||||
### BTreeMap<String, String>
|
||||
```rust
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
```
|
||||
Generates: `IndexKey { name: "tags:tag", value: "key=value" }` for each entry
|
||||
|
||||
### Vec<T>
|
||||
```rust
|
||||
#[index]
|
||||
pub items: Vec<String>,
|
||||
```
|
||||
Generates: `IndexKey { name: "items:item", value: "0:value" }` for each item
|
||||
|
||||
### OffsetDateTime
|
||||
```rust
|
||||
#[index]
|
||||
pub start_time: OffsetDateTime,
|
||||
```
|
||||
Generates: `IndexKey { name: "start_time", value: "2025-10-20" }` (date only)
|
||||
|
||||
### Enums and Other Types
|
||||
```rust
|
||||
#[index]
|
||||
pub status: EventStatus,
|
||||
```
|
||||
Generates: `IndexKey { name: "status", value: "Debug(status)" }` (using Debug format)
|
||||
|
||||
## Complete Example
|
||||
|
||||
```rust
|
||||
use osiris::{BaseData, DeriveObject};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub enum EventStatus {
|
||||
Draft,
|
||||
Published,
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, DeriveObject)]
|
||||
pub struct Event {
|
||||
pub base_data: BaseData,
|
||||
|
||||
#[index]
|
||||
pub title: String,
|
||||
|
||||
pub description: Option<String>,
|
||||
|
||||
#[index]
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub start_time: OffsetDateTime,
|
||||
|
||||
#[index]
|
||||
pub location: Option<String>,
|
||||
|
||||
#[index]
|
||||
pub status: EventStatus,
|
||||
|
||||
pub all_day: bool,
|
||||
|
||||
#[index]
|
||||
pub category: Option<String>,
|
||||
}
|
||||
|
||||
impl Event {
|
||||
pub fn new(ns: String, title: impl ToString) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
base_data: BaseData::new(ns),
|
||||
title: title.to_string(),
|
||||
description: None,
|
||||
start_time: now,
|
||||
location: None,
|
||||
status: EventStatus::Draft,
|
||||
all_day: false,
|
||||
category: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Generated Index Keys
|
||||
|
||||
For the Event example above with:
|
||||
- `title = "Team Meeting"`
|
||||
- `start_time = 2025-10-20T10:00:00Z`
|
||||
- `location = Some("Room 101")`
|
||||
- `status = EventStatus::Published`
|
||||
- `category = Some("work")`
|
||||
|
||||
The generated index keys would be:
|
||||
```rust
|
||||
vec![
|
||||
IndexKey { name: "mime", value: "application/json" }, // from base_data
|
||||
IndexKey { name: "title", value: "Team Meeting" },
|
||||
IndexKey { name: "start_time", value: "2025-10-20" },
|
||||
IndexKey { name: "location", value: "Room 101" },
|
||||
IndexKey { name: "status", value: "Published" },
|
||||
IndexKey { name: "category", value: "work" },
|
||||
]
|
||||
```
|
||||
|
||||
## HeroDB Storage
|
||||
|
||||
These index keys are stored in HeroDB as:
|
||||
```
|
||||
idx:events:title:Team Meeting → {event_id}
|
||||
idx:events:start_time:2025-10-20 → {event_id}
|
||||
idx:events:location:Room 101 → {event_id}
|
||||
idx:events:status:Published → {event_id}
|
||||
idx:events:category:work → {event_id}
|
||||
```
|
||||
|
||||
## Querying by Index
|
||||
|
||||
```rust
|
||||
use osiris::store::GenericStore;
|
||||
|
||||
let store = GenericStore::new(client);
|
||||
|
||||
// Get all events on a specific date
|
||||
let ids = store.get_ids_by_index("events", "start_time", "2025-10-20").await?;
|
||||
|
||||
// Get all published events
|
||||
let ids = store.get_ids_by_index("events", "status", "Published").await?;
|
||||
|
||||
// Get all events in a category
|
||||
let ids = store.get_ids_by_index("events", "category", "work").await?;
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
1. **Must have `base_data` field**: The struct must have a field named `base_data` of type `BaseData`
|
||||
2. **Must derive standard traits**: `Debug`, `Clone`, `Serialize`, `Deserialize`
|
||||
3. **Fields marked with `#[index]`**: Only fields with the `#[index]` attribute will be indexed
|
||||
|
||||
## Limitations
|
||||
|
||||
- The macro currently uses `Debug` formatting for enums and complex types
|
||||
- BTreeMap indexing assumes `String` keys and values
|
||||
- Vec indexing uses numeric indices (may not be ideal for all use cases)
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- Custom index key formatters via attributes
|
||||
- Support for nested struct indexing
|
||||
- Conditional indexing (e.g., `#[index(if = "is_published")]`)
|
||||
- Custom index names (e.g., `#[index(name = "custom_name")]`)
|
||||
525
docs/specs/osiris-mvp.md
Normal file
525
docs/specs/osiris-mvp.md
Normal file
@@ -0,0 +1,525 @@
|
||||
# OSIRIS MVP — Minimal Semantic Store over HeroDB
|
||||
|
||||
## 0) Purpose
|
||||
|
||||
OSIRIS is a Rust-native object layer on top of HeroDB that provides structured storage and retrieval capabilities without any server-side extensions or indexing engines.
|
||||
|
||||
It provides:
|
||||
- Object CRUD operations
|
||||
- Namespace management
|
||||
- Simple local field indexing (field:*)
|
||||
- Basic keyword scan (substring matching)
|
||||
- CLI interface
|
||||
- Future: 9P filesystem interface
|
||||
|
||||
It does **not** depend on HeroDB's Tantivy FTS, vectors, or relations.
|
||||
|
||||
---
|
||||
|
||||
## 1) Architecture
|
||||
|
||||
```
|
||||
HeroDB (unmodified)
|
||||
│
|
||||
├── KV store + encryption
|
||||
└── RESP protocol
|
||||
↑
|
||||
│
|
||||
└── OSIRIS
|
||||
├── store/ – object schema + persistence
|
||||
├── index/ – field index & keyword scanning
|
||||
├── retrieve/ – query planner + filtering
|
||||
├── interfaces/ – CLI, 9P (future)
|
||||
└── config/ – namespaces + settings
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2) Data Model
|
||||
|
||||
```rust
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct OsirisObject {
|
||||
pub id: String,
|
||||
pub ns: String,
|
||||
pub meta: Metadata,
|
||||
pub text: Option<String>, // optional plain text
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Metadata {
|
||||
pub title: Option<String>,
|
||||
pub mime: Option<String>,
|
||||
pub tags: BTreeMap<String, String>,
|
||||
pub created: OffsetDateTime,
|
||||
pub updated: OffsetDateTime,
|
||||
pub size: Option<u64>,
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3) Keyspace Design
|
||||
|
||||
```
|
||||
meta:<id> → serialized OsirisObject (JSON)
|
||||
field:tag:<key>=<val> → Set of IDs (for tag filtering)
|
||||
field:mime:<type> → Set of IDs (for MIME type filtering)
|
||||
field:title:<title> → Set of IDs (for title filtering)
|
||||
scan:index → Set of all IDs (for full scan)
|
||||
```
|
||||
|
||||
**Example:**
|
||||
```
|
||||
field:tag:project=osiris → {note_1, note_2}
|
||||
field:mime:text/markdown → {note_1, note_3}
|
||||
scan:index → {note_1, note_2, note_3, ...}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4) Index Maintenance
|
||||
|
||||
### Insert / Update
|
||||
|
||||
```rust
|
||||
// Store object
|
||||
redis.set(format!("meta:{}", obj.id), serde_json::to_string(&obj)?)?;
|
||||
|
||||
// Index tags
|
||||
for (k, v) in &obj.meta.tags {
|
||||
redis.sadd(format!("field:tag:{}={}", k, v), &obj.id)?;
|
||||
}
|
||||
|
||||
// Index MIME type
|
||||
if let Some(mime) = &obj.meta.mime {
|
||||
redis.sadd(format!("field:mime:{}", mime), &obj.id)?;
|
||||
}
|
||||
|
||||
// Index title
|
||||
if let Some(title) = &obj.meta.title {
|
||||
redis.sadd(format!("field:title:{}", title), &obj.id)?;
|
||||
}
|
||||
|
||||
// Add to scan index
|
||||
redis.sadd("scan:index", &obj.id)?;
|
||||
```
|
||||
|
||||
### Delete
|
||||
|
||||
```rust
|
||||
// Remove object
|
||||
redis.del(format!("meta:{}", obj.id))?;
|
||||
|
||||
// Deindex tags
|
||||
for (k, v) in &obj.meta.tags {
|
||||
redis.srem(format!("field:tag:{}={}", k, v), &obj.id)?;
|
||||
}
|
||||
|
||||
// Deindex MIME type
|
||||
if let Some(mime) = &obj.meta.mime {
|
||||
redis.srem(format!("field:mime:{}", mime), &obj.id)?;
|
||||
}
|
||||
|
||||
// Deindex title
|
||||
if let Some(title) = &obj.meta.title {
|
||||
redis.srem(format!("field:title:{}", title), &obj.id)?;
|
||||
}
|
||||
|
||||
// Remove from scan index
|
||||
redis.srem("scan:index", &obj.id)?;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5) Retrieval
|
||||
|
||||
### Query Structure
|
||||
|
||||
```rust
|
||||
pub struct RetrievalQuery {
|
||||
pub text: Option<String>, // keyword substring
|
||||
pub ns: String,
|
||||
pub filters: Vec<(String, String)>, // field=value
|
||||
pub top_k: usize,
|
||||
}
|
||||
```
|
||||
|
||||
### Execution Steps
|
||||
|
||||
1. **Collect candidate IDs** from field:* filters (SMEMBERS + intersection)
|
||||
2. **If text query is provided**, iterate over candidates:
|
||||
- Fetch `meta:<id>`
|
||||
- Test substring match on `meta.title`, `text`, or `tags`
|
||||
- Compute simple relevance score
|
||||
3. **Sort** by score (descending) and **limit** to `top_k`
|
||||
|
||||
This is O(N) for text scan but acceptable for MVP or small datasets (<10k objects).
|
||||
|
||||
### Scoring Algorithm
|
||||
|
||||
```rust
|
||||
fn compute_text_score(obj: &OsirisObject, query: &str) -> f32 {
|
||||
let mut score = 0.0;
|
||||
|
||||
// Title match
|
||||
if let Some(title) = &obj.meta.title {
|
||||
if title.to_lowercase().contains(query) {
|
||||
score += 0.5;
|
||||
}
|
||||
}
|
||||
|
||||
// Text content match
|
||||
if let Some(text) = &obj.text {
|
||||
if text.to_lowercase().contains(query) {
|
||||
score += 0.5;
|
||||
// Bonus for multiple occurrences
|
||||
let count = text.to_lowercase().matches(query).count();
|
||||
score += (count as f32 - 1.0) * 0.1;
|
||||
}
|
||||
}
|
||||
|
||||
// Tag match
|
||||
for (key, value) in &obj.meta.tags {
|
||||
if key.to_lowercase().contains(query) || value.to_lowercase().contains(query) {
|
||||
score += 0.2;
|
||||
}
|
||||
}
|
||||
|
||||
score.min(1.0)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6) CLI
|
||||
|
||||
### Commands
|
||||
|
||||
```bash
|
||||
# Initialize and create namespace
|
||||
osiris init --herodb redis://localhost:6379
|
||||
osiris ns create notes
|
||||
|
||||
# Add and read objects
|
||||
osiris put notes/my-note.md ./my-note.md --tags topic=rust,project=osiris
|
||||
osiris get notes/my-note.md
|
||||
osiris get notes/my-note.md --raw --output /tmp/note.md
|
||||
osiris del notes/my-note.md
|
||||
|
||||
# Search
|
||||
osiris find --ns notes --filter topic=rust
|
||||
osiris find "retrieval" --ns notes
|
||||
osiris find "rust" --ns notes --filter project=osiris --topk 20
|
||||
|
||||
# Namespace management
|
||||
osiris ns list
|
||||
osiris ns delete notes
|
||||
|
||||
# Statistics
|
||||
osiris stats
|
||||
osiris stats --ns notes
|
||||
```
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
# Store a note from stdin
|
||||
echo "This is a note about Rust programming" | \
|
||||
osiris put notes/rust-intro - \
|
||||
--title "Rust Introduction" \
|
||||
--tags topic=rust,level=beginner \
|
||||
--mime text/plain
|
||||
|
||||
# Search for notes about Rust
|
||||
osiris find "rust" --ns notes
|
||||
|
||||
# Filter by tag
|
||||
osiris find --ns notes --filter topic=rust
|
||||
|
||||
# Get note as JSON
|
||||
osiris get notes/rust-intro
|
||||
|
||||
# Get raw content
|
||||
osiris get notes/rust-intro --raw
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7) Configuration
|
||||
|
||||
### File Location
|
||||
|
||||
`~/.config/osiris/config.toml`
|
||||
|
||||
### Example
|
||||
|
||||
```toml
|
||||
[herodb]
|
||||
url = "redis://localhost:6379"
|
||||
|
||||
[namespaces.notes]
|
||||
db_id = 1
|
||||
|
||||
[namespaces.calendar]
|
||||
db_id = 2
|
||||
```
|
||||
|
||||
### Structure
|
||||
|
||||
```rust
|
||||
pub struct Config {
|
||||
pub herodb: HeroDbConfig,
|
||||
pub namespaces: HashMap<String, NamespaceConfig>,
|
||||
}
|
||||
|
||||
pub struct HeroDbConfig {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
pub struct NamespaceConfig {
|
||||
pub db_id: u16,
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8) Database Allocation
|
||||
|
||||
```
|
||||
DB 0 → HeroDB Admin (managed by HeroDB)
|
||||
DB 1 → osiris:notes (namespace "notes")
|
||||
DB 2 → osiris:calendar (namespace "calendar")
|
||||
DB 3+ → Additional namespaces...
|
||||
```
|
||||
|
||||
Each namespace gets its own isolated HeroDB database.
|
||||
|
||||
---
|
||||
|
||||
## 9) Dependencies
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
redis = { version = "0.24", features = ["aio", "tokio-comp"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
time = { version = "0.3", features = ["serde", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.23", features = ["full"] }
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
toml = "0.8"
|
||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10) Future Enhancements
|
||||
|
||||
| Feature | When Added | Moves Where |
|
||||
|---------|-----------|-------------|
|
||||
| Dedup / blobs | HeroDB extension | HeroDB |
|
||||
| Vector search | HeroDB extension | HeroDB |
|
||||
| Full-text search | HeroDB (Tantivy) | HeroDB |
|
||||
| Relations / graph | OSIRIS later | OSIRIS |
|
||||
| 9P filesystem | OSIRIS later | OSIRIS |
|
||||
|
||||
This MVP maintains clean interface boundaries:
|
||||
- **HeroDB** remains a plain KV substrate
|
||||
- **OSIRIS** builds higher-order meaning on top
|
||||
|
||||
---
|
||||
|
||||
## 11) Implementation Status
|
||||
|
||||
### ✅ Completed
|
||||
|
||||
- [x] Project structure and Cargo.toml
|
||||
- [x] Core data models (OsirisObject, Metadata)
|
||||
- [x] HeroDB client wrapper (RESP protocol)
|
||||
- [x] Field indexing (tags, MIME, title)
|
||||
- [x] Search engine (substring matching + scoring)
|
||||
- [x] Configuration management
|
||||
- [x] CLI interface (init, ns, put, get, del, find, stats)
|
||||
- [x] Error handling
|
||||
- [x] Documentation (README, specs)
|
||||
|
||||
### 🚧 Pending
|
||||
|
||||
- [ ] 9P filesystem interface
|
||||
- [ ] Integration tests
|
||||
- [ ] Performance benchmarks
|
||||
- [ ] Name resolution (namespace/name → ID mapping)
|
||||
|
||||
---
|
||||
|
||||
## 12) Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Start HeroDB:
|
||||
```bash
|
||||
cd /path/to/herodb
|
||||
cargo run --release -- --dir ./data --admin-secret mysecret --port 6379
|
||||
```
|
||||
|
||||
### Build OSIRIS
|
||||
|
||||
```bash
|
||||
cd /path/to/osiris
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
### Initialize
|
||||
|
||||
```bash
|
||||
# Create configuration
|
||||
./target/release/osiris init --herodb redis://localhost:6379
|
||||
|
||||
# Create a namespace
|
||||
./target/release/osiris ns create notes
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
# Add a note
|
||||
echo "OSIRIS is a minimal object store" | \
|
||||
./target/release/osiris put notes/intro - \
|
||||
--title "Introduction" \
|
||||
--tags topic=osiris,type=doc
|
||||
|
||||
# Search
|
||||
./target/release/osiris find "object store" --ns notes
|
||||
|
||||
# Get the note
|
||||
./target/release/osiris get notes/intro
|
||||
|
||||
# Show stats
|
||||
./target/release/osiris stats --ns notes
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 13) Testing
|
||||
|
||||
### Unit Tests
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
### Integration Tests (requires HeroDB)
|
||||
|
||||
```bash
|
||||
# Start HeroDB
|
||||
cd /path/to/herodb
|
||||
cargo run -- --dir /tmp/herodb-test --admin-secret test --port 6379
|
||||
|
||||
# Run tests
|
||||
cd /path/to/osiris
|
||||
cargo test -- --ignored
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 14) Performance Characteristics
|
||||
|
||||
### Write Performance
|
||||
|
||||
- **Object storage**: O(1) - single SET operation
|
||||
- **Indexing**: O(T) where T = number of tags/fields
|
||||
- **Total**: O(T) per object
|
||||
|
||||
### Read Performance
|
||||
|
||||
- **Get by ID**: O(1) - single GET operation
|
||||
- **Filter by tags**: O(F) where F = number of filters (set intersection)
|
||||
- **Text search**: O(N) where N = number of candidates (linear scan)
|
||||
|
||||
### Storage Overhead
|
||||
|
||||
- **Object**: ~1KB per object (JSON serialized)
|
||||
- **Indexes**: ~50 bytes per tag/field entry
|
||||
- **Total**: ~1.5KB per object with 10 tags
|
||||
|
||||
### Scalability
|
||||
|
||||
- **Optimal**: <10,000 objects per namespace
|
||||
- **Acceptable**: <100,000 objects per namespace
|
||||
- **Beyond**: Consider migrating to Tantivy FTS
|
||||
|
||||
---
|
||||
|
||||
## 15) Design Decisions
|
||||
|
||||
### Why No Tantivy in MVP?
|
||||
|
||||
- **Simplicity**: Avoid HeroDB server-side dependencies
|
||||
- **Portability**: Works with any Redis-compatible backend
|
||||
- **Flexibility**: Easy to migrate to Tantivy later
|
||||
|
||||
### Why Substring Matching?
|
||||
|
||||
- **Good enough**: For small datasets (<10k objects)
|
||||
- **Simple**: No tokenization, stemming, or complex scoring
|
||||
- **Fast**: O(N) is acceptable for MVP
|
||||
|
||||
### Why Separate Databases per Namespace?
|
||||
|
||||
- **Isolation**: Clear separation of concerns
|
||||
- **Performance**: Smaller keyspaces = faster scans
|
||||
- **Security**: Can apply different encryption keys per namespace
|
||||
|
||||
---
|
||||
|
||||
## 16) Migration Path
|
||||
|
||||
When ready to scale beyond MVP:
|
||||
|
||||
1. **Add Tantivy FTS** (HeroDB extension)
|
||||
- Create FT.* commands in HeroDB
|
||||
- Update OSIRIS to use FT.SEARCH instead of substring scan
|
||||
- Keep field indexes for filtering
|
||||
|
||||
2. **Add Vector Search** (HeroDB extension)
|
||||
- Store embeddings in HeroDB
|
||||
- Implement ANN search (HNSW/IVF)
|
||||
- Add hybrid retrieval (BM25 + vector)
|
||||
|
||||
3. **Add Relations** (OSIRIS feature)
|
||||
- Store relation graphs in HeroDB
|
||||
- Implement graph traversal
|
||||
- Add relation-based ranking
|
||||
|
||||
4. **Add Deduplication** (HeroDB extension)
|
||||
- Content-addressable storage (BLAKE3)
|
||||
- Reference counting
|
||||
- Garbage collection
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
**OSIRIS MVP is a minimal, production-ready object store** that:
|
||||
|
||||
- ✅ Works with unmodified HeroDB
|
||||
- ✅ Provides structured storage with metadata
|
||||
- ✅ Supports field-based filtering
|
||||
- ✅ Includes basic text search
|
||||
- ✅ Exposes a clean CLI interface
|
||||
- ✅ Maintains clear upgrade paths
|
||||
|
||||
**Perfect for:**
|
||||
- Personal knowledge management
|
||||
- Small-scale document storage
|
||||
- Prototyping semantic applications
|
||||
- Learning Rust + Redis patterns
|
||||
|
||||
**Next steps:**
|
||||
- Build and test the MVP
|
||||
- Gather usage feedback
|
||||
- Plan Tantivy/vector integration
|
||||
- Design 9P filesystem interface
|
||||
157
examples/README.md
Normal file
157
examples/README.md
Normal file
@@ -0,0 +1,157 @@
|
||||
# OSIRIS Examples
|
||||
|
||||
This directory contains examples demonstrating various features of OSIRIS.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before running the examples, make sure HeroDB is running:
|
||||
|
||||
```bash
|
||||
cd /path/to/herodb
|
||||
cargo run --release -- --dir ./data --admin-secret mysecret --port 6379
|
||||
```
|
||||
|
||||
## Running Examples
|
||||
|
||||
### Basic Usage
|
||||
|
||||
Demonstrates core OSIRIS functionality with Notes and Events:
|
||||
|
||||
```bash
|
||||
cargo run --example basic_usage
|
||||
```
|
||||
|
||||
**What it shows:**
|
||||
- Creating objects with the derive macro
|
||||
- Storing objects in HeroDB
|
||||
- Querying by indexed fields
|
||||
- Retrieving objects by ID
|
||||
- Auto-generated index keys
|
||||
- Cleanup/deletion
|
||||
|
||||
### Custom Object
|
||||
|
||||
Shows how to create your own custom object types:
|
||||
|
||||
```bash
|
||||
cargo run --example custom_object
|
||||
```
|
||||
|
||||
**What it shows:**
|
||||
- Defining custom structs with `#[derive(DeriveObject)]`
|
||||
- Using enums in indexed fields
|
||||
- Builder pattern for object construction
|
||||
- Querying by various indexed fields
|
||||
- Updating objects
|
||||
- Tag-based organization
|
||||
|
||||
## Example Structure
|
||||
|
||||
Each example follows this pattern:
|
||||
|
||||
1. **Setup** - Connect to HeroDB
|
||||
2. **Create** - Build objects using builder pattern
|
||||
3. **Store** - Save objects to HeroDB
|
||||
4. **Query** - Search by indexed fields
|
||||
5. **Retrieve** - Get objects by ID
|
||||
6. **Update** - Modify and re-store objects (where applicable)
|
||||
7. **Cleanup** - Delete test data
|
||||
|
||||
## Key Concepts Demonstrated
|
||||
|
||||
### Derive Macro
|
||||
|
||||
All examples use the `#[derive(DeriveObject)]` macro:
|
||||
|
||||
```rust
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, DeriveObject)]
|
||||
pub struct MyObject {
|
||||
pub base_data: BaseData,
|
||||
|
||||
#[index]
|
||||
pub indexed_field: String,
|
||||
|
||||
pub non_indexed_field: String,
|
||||
}
|
||||
```
|
||||
|
||||
### Indexed Fields
|
||||
|
||||
Fields marked with `#[index]` are automatically indexed:
|
||||
|
||||
- `Option<T>` - Indexed if Some
|
||||
- `BTreeMap<String, String>` - Each key-value pair indexed
|
||||
- `OffsetDateTime` - Indexed as date string
|
||||
- Enums - Indexed using Debug format
|
||||
- Other types - Indexed using Debug format
|
||||
|
||||
### Querying
|
||||
|
||||
Query by any indexed field:
|
||||
|
||||
```rust
|
||||
// Query by exact match
|
||||
let ids = store.get_ids_by_index("namespace", "field_name", "value").await?;
|
||||
|
||||
// Query tags (BTreeMap fields)
|
||||
let ids = store.get_ids_by_index("namespace", "tags:tag", "key=value").await?;
|
||||
```
|
||||
|
||||
### Builder Pattern
|
||||
|
||||
All objects support fluent builder pattern:
|
||||
|
||||
```rust
|
||||
let obj = MyObject::new("namespace".to_string())
|
||||
.set_field1("value1")
|
||||
.set_field2("value2")
|
||||
.add_tag("key", "value");
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Connection Refused
|
||||
|
||||
Make sure HeroDB is running on port 6379:
|
||||
|
||||
```bash
|
||||
redis-cli -p 6379 PING
|
||||
```
|
||||
|
||||
### Database Not Found
|
||||
|
||||
The examples use different database IDs:
|
||||
- `basic_usage` - DB 1
|
||||
- `custom_object` - DB 2
|
||||
|
||||
Make sure these databases are accessible in HeroDB.
|
||||
|
||||
### Compilation Errors
|
||||
|
||||
Ensure you have the latest dependencies:
|
||||
|
||||
```bash
|
||||
cargo clean
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
After running the examples:
|
||||
|
||||
1. Read the [Architecture Documentation](../docs/ARCHITECTURE.md)
|
||||
2. Learn about the [Derive Macro](../docs/DERIVE_MACRO.md)
|
||||
3. Check out the [Quick Start Guide](../QUICKSTART.md)
|
||||
4. Explore the [source code](../src/objects/) for Note and Event implementations
|
||||
|
||||
## Creating Your Own Objects
|
||||
|
||||
Use the `custom_object` example as a template:
|
||||
|
||||
1. Define your struct with `base_data: BaseData`
|
||||
2. Add `#[derive(DeriveObject)]`
|
||||
3. Mark fields with `#[index]` for automatic indexing
|
||||
4. Implement builder methods for convenience
|
||||
5. Use `GenericStore` to store and query
|
||||
|
||||
Happy coding! 🚀
|
||||
200
examples/basic_usage.rs
Normal file
200
examples/basic_usage.rs
Normal file
@@ -0,0 +1,200 @@
|
||||
/// Basic OSIRIS usage example
|
||||
///
|
||||
/// This example demonstrates:
|
||||
/// - Creating objects with the derive macro
|
||||
/// - Storing objects in HeroDB
|
||||
/// - Querying by indexed fields
|
||||
/// - Retrieving objects
|
||||
///
|
||||
/// Prerequisites:
|
||||
/// - HeroDB running on localhost:6379
|
||||
///
|
||||
/// Run with:
|
||||
/// ```bash
|
||||
/// cargo run --example basic_usage
|
||||
/// ```
|
||||
|
||||
use osiris::objects::{Event, Note};
|
||||
use osiris::store::{BaseData, GenericStore, HeroDbClient};
|
||||
use osiris::Object;
|
||||
use std::collections::BTreeMap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🚀 OSIRIS Basic Usage Example\n");
|
||||
|
||||
// Initialize HeroDB client
|
||||
println!("📡 Connecting to HeroDB...");
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1)?;
|
||||
let store = GenericStore::new(client);
|
||||
println!("✓ Connected to HeroDB (DB 1)\n");
|
||||
|
||||
// ========================================
|
||||
// Part 1: Working with Notes
|
||||
// ========================================
|
||||
println!("📝 Part 1: Working with Notes");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
// Create a note with tags
|
||||
let note1 = Note::new("notes".to_string())
|
||||
.set_title("OSIRIS Architecture")
|
||||
.set_content("OSIRIS uses a trait-based architecture with automatic indexing based on #[index] attributes.")
|
||||
.add_tag("topic", "architecture")
|
||||
.add_tag("project", "osiris")
|
||||
.add_tag("priority", "high")
|
||||
.set_mime("text/plain");
|
||||
|
||||
println!("Creating note: {}", note1.title.as_ref().unwrap());
|
||||
println!(" ID: {}", note1.base_data.id);
|
||||
println!(" Tags: {:?}", note1.tags);
|
||||
|
||||
// Store the note
|
||||
store.put(¬e1).await?;
|
||||
println!("✓ Note stored\n");
|
||||
|
||||
// Create another note
|
||||
let note2 = Note::new("notes".to_string())
|
||||
.set_title("HeroDB Integration")
|
||||
.set_content("HeroDB provides encrypted storage with Redis compatibility.")
|
||||
.add_tag("topic", "storage")
|
||||
.add_tag("project", "osiris")
|
||||
.add_tag("priority", "medium")
|
||||
.set_mime("text/plain");
|
||||
|
||||
println!("Creating note: {}", note2.title.as_ref().unwrap());
|
||||
println!(" ID: {}", note2.base_data.id);
|
||||
store.put(¬e2).await?;
|
||||
println!("✓ Note stored\n");
|
||||
|
||||
// Retrieve a note by ID
|
||||
println!("Retrieving note by ID...");
|
||||
let retrieved_note: Note = store.get("notes", ¬e1.base_data.id).await?;
|
||||
println!("✓ Retrieved: {}", retrieved_note.title.as_ref().unwrap());
|
||||
println!(" Content: {}\n", retrieved_note.content.as_ref().unwrap_or(&"(none)".to_string()));
|
||||
|
||||
// Query notes by tag
|
||||
println!("Querying notes by tag (project=osiris)...");
|
||||
let ids = store.get_ids_by_index("notes", "tags:tag", "project=osiris").await?;
|
||||
println!("✓ Found {} notes with tag project=osiris", ids.len());
|
||||
for id in &ids {
|
||||
let note: Note = store.get("notes", id).await?;
|
||||
println!(" - {}", note.title.as_ref().unwrap_or(&"(untitled)".to_string()));
|
||||
}
|
||||
println!();
|
||||
|
||||
// Query by different tag
|
||||
println!("Querying notes by tag (priority=high)...");
|
||||
let high_priority_ids = store.get_ids_by_index("notes", "tags:tag", "priority=high").await?;
|
||||
println!("✓ Found {} high-priority notes\n", high_priority_ids.len());
|
||||
|
||||
// ========================================
|
||||
// Part 2: Working with Events
|
||||
// ========================================
|
||||
println!("📅 Part 2: Working with Events");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
use osiris::objects::event::EventStatus;
|
||||
|
||||
// Create an event
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let event1 = Event::new("calendar".to_string(), "Team Standup")
|
||||
.set_description("Daily standup meeting")
|
||||
.set_start_time(now)
|
||||
.set_end_time(now + time::Duration::minutes(30))
|
||||
.set_location("Room 101")
|
||||
.set_status(EventStatus::Published)
|
||||
.set_category("meetings")
|
||||
.set_all_day(false);
|
||||
|
||||
println!("Creating event: {}", event1.title);
|
||||
println!(" ID: {}", event1.base_data.id);
|
||||
println!(" Location: {}", event1.location.as_ref().unwrap());
|
||||
println!(" Status: {:?}", event1.status);
|
||||
|
||||
store.put(&event1).await?;
|
||||
println!("✓ Event stored\n");
|
||||
|
||||
// Create another event
|
||||
let tomorrow = now + time::Duration::days(1);
|
||||
let event2 = Event::new("calendar".to_string(), "Project Review")
|
||||
.set_description("Review OSIRIS implementation progress")
|
||||
.set_start_time(tomorrow)
|
||||
.set_end_time(tomorrow + time::Duration::hours(1))
|
||||
.set_location("Conference Room A")
|
||||
.set_status(EventStatus::Published)
|
||||
.set_category("reviews");
|
||||
|
||||
println!("Creating event: {}", event2.title);
|
||||
store.put(&event2).await?;
|
||||
println!("✓ Event stored\n");
|
||||
|
||||
// Query events by location
|
||||
println!("Querying events by location (Room 101)...");
|
||||
let location_ids = store.get_ids_by_index("calendar", "location", "Room 101").await?;
|
||||
println!("✓ Found {} events in Room 101", location_ids.len());
|
||||
for id in &location_ids {
|
||||
let event: Event = store.get("calendar", id).await?;
|
||||
println!(" - {}", event.title);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Query events by status
|
||||
println!("Querying events by status (Published)...");
|
||||
let status_ids = store.get_ids_by_index("calendar", "status", "Published").await?;
|
||||
println!("✓ Found {} published events", status_ids.len());
|
||||
for id in &status_ids {
|
||||
let event: Event = store.get("calendar", id).await?;
|
||||
println!(" - {} ({})", event.title, event.category.as_ref().unwrap_or(&"uncategorized".to_string()));
|
||||
}
|
||||
println!();
|
||||
|
||||
// Query events by date
|
||||
let date_str = now.date().to_string();
|
||||
println!("Querying events by date ({})...", date_str);
|
||||
let date_ids = store.get_ids_by_index("calendar", "start_time", &date_str).await?;
|
||||
println!("✓ Found {} events on {}", date_ids.len(), date_str);
|
||||
println!();
|
||||
|
||||
// ========================================
|
||||
// Part 3: Demonstrating Auto-Generated Indexes
|
||||
// ========================================
|
||||
println!("🔍 Part 3: Auto-Generated Indexes");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
println!("Note indexed fields: {:?}", Note::indexed_fields());
|
||||
println!("Event indexed fields: {:?}", Event::indexed_fields());
|
||||
println!();
|
||||
|
||||
println!("Note index keys for '{}': ", note1.title.as_ref().unwrap());
|
||||
for key in note1.index_keys() {
|
||||
println!(" - {} = {}", key.name, key.value);
|
||||
}
|
||||
println!();
|
||||
|
||||
println!("Event index keys for '{}': ", event1.title);
|
||||
for key in event1.index_keys() {
|
||||
println!(" - {} = {}", key.name, key.value);
|
||||
}
|
||||
println!();
|
||||
|
||||
// ========================================
|
||||
// Part 4: Cleanup
|
||||
// ========================================
|
||||
println!("🧹 Part 4: Cleanup");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
println!("Deleting notes...");
|
||||
store.delete(¬e1).await?;
|
||||
store.delete(¬e2).await?;
|
||||
println!("✓ Notes deleted\n");
|
||||
|
||||
println!("Deleting events...");
|
||||
store.delete(&event1).await?;
|
||||
store.delete(&event2).await?;
|
||||
println!("✓ Events deleted\n");
|
||||
|
||||
println!("✅ Example completed successfully!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
295
examples/custom_object.rs
Normal file
295
examples/custom_object.rs
Normal file
@@ -0,0 +1,295 @@
|
||||
/// Custom Object Example
|
||||
///
|
||||
/// This example demonstrates how to create your own custom object types
|
||||
/// using the derive macro.
|
||||
///
|
||||
/// Run with:
|
||||
/// ```bash
|
||||
/// cargo run --example custom_object
|
||||
/// ```
|
||||
|
||||
use osiris::store::{BaseData, GenericStore, HeroDbClient};
|
||||
use osiris::{DeriveObject, Object};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
// ========================================
|
||||
// Custom Object: Task
|
||||
// ========================================
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub enum TaskPriority {
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
Critical,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub enum TaskStatus {
|
||||
Todo,
|
||||
InProgress,
|
||||
Done,
|
||||
Blocked,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, DeriveObject)]
|
||||
pub struct Task {
|
||||
pub base_data: BaseData,
|
||||
|
||||
/// Task title
|
||||
#[index]
|
||||
pub title: String,
|
||||
|
||||
/// Task description
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Priority level
|
||||
#[index]
|
||||
pub priority: TaskPriority,
|
||||
|
||||
/// Current status
|
||||
#[index]
|
||||
pub status: TaskStatus,
|
||||
|
||||
/// Assigned to user
|
||||
#[index]
|
||||
pub assignee: Option<String>,
|
||||
|
||||
/// Due date
|
||||
#[index]
|
||||
#[serde(with = "time::serde::rfc3339::option")]
|
||||
pub due_date: Option<OffsetDateTime>,
|
||||
|
||||
/// Tags for categorization
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
|
||||
/// Estimated hours
|
||||
pub estimated_hours: Option<f32>,
|
||||
|
||||
/// Actual hours spent
|
||||
pub actual_hours: Option<f32>,
|
||||
}
|
||||
|
||||
impl Task {
|
||||
pub fn new(ns: String, title: impl ToString) -> Self {
|
||||
Self {
|
||||
base_data: BaseData::new(ns),
|
||||
title: title.to_string(),
|
||||
description: None,
|
||||
priority: TaskPriority::Medium,
|
||||
status: TaskStatus::Todo,
|
||||
assignee: None,
|
||||
due_date: None,
|
||||
tags: BTreeMap::new(),
|
||||
estimated_hours: None,
|
||||
actual_hours: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_description(mut self, description: impl ToString) -> Self {
|
||||
self.description = Some(description.to_string());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_priority(mut self, priority: TaskPriority) -> Self {
|
||||
self.priority = priority;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_status(mut self, status: TaskStatus) -> Self {
|
||||
self.status = status;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_assignee(mut self, assignee: impl ToString) -> Self {
|
||||
self.assignee = Some(assignee.to_string());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_due_date(mut self, due_date: OffsetDateTime) -> Self {
|
||||
self.due_date = Some(due_date);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_tag(mut self, key: impl ToString, value: impl ToString) -> Self {
|
||||
self.tags.insert(key.to_string(), value.to_string());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_estimated_hours(mut self, hours: f32) -> Self {
|
||||
self.estimated_hours = Some(hours);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Main Example
|
||||
// ========================================
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🎯 OSIRIS Custom Object Example\n");
|
||||
|
||||
// Connect to HeroDB
|
||||
println!("📡 Connecting to HeroDB...");
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 2)?;
|
||||
let store = GenericStore::new(client);
|
||||
println!("✓ Connected to HeroDB (DB 2)\n");
|
||||
|
||||
// ========================================
|
||||
// Create Tasks
|
||||
// ========================================
|
||||
println!("📋 Creating Tasks");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let tomorrow = now + time::Duration::days(1);
|
||||
let next_week = now + time::Duration::days(7);
|
||||
|
||||
// Task 1: High priority, assigned
|
||||
let task1 = Task::new("tasks".to_string(), "Implement derive macro")
|
||||
.set_description("Create proc macro for automatic Object trait implementation")
|
||||
.set_priority(TaskPriority::High)
|
||||
.set_status(TaskStatus::Done)
|
||||
.set_assignee("alice")
|
||||
.set_due_date(tomorrow)
|
||||
.add_tag("component", "derive")
|
||||
.add_tag("project", "osiris")
|
||||
.set_estimated_hours(8.0);
|
||||
|
||||
println!("Task 1: {}", task1.title);
|
||||
println!(" Priority: {:?}", task1.priority);
|
||||
println!(" Status: {:?}", task1.status);
|
||||
println!(" Assignee: {}", task1.assignee.as_ref().unwrap());
|
||||
store.put(&task1).await?;
|
||||
println!("✓ Stored\n");
|
||||
|
||||
// Task 2: Critical priority, blocked
|
||||
let task2 = Task::new("tasks".to_string(), "Fix indexing bug")
|
||||
.set_description("BTreeMap indexing has lifetime issues")
|
||||
.set_priority(TaskPriority::Critical)
|
||||
.set_status(TaskStatus::Blocked)
|
||||
.set_assignee("bob")
|
||||
.set_due_date(now)
|
||||
.add_tag("type", "bug")
|
||||
.add_tag("project", "osiris")
|
||||
.set_estimated_hours(4.0);
|
||||
|
||||
println!("Task 2: {}", task2.title);
|
||||
println!(" Priority: {:?}", task2.priority);
|
||||
println!(" Status: {:?}", task2.status);
|
||||
store.put(&task2).await?;
|
||||
println!("✓ Stored\n");
|
||||
|
||||
// Task 3: In progress
|
||||
let task3 = Task::new("tasks".to_string(), "Write documentation")
|
||||
.set_description("Document the derive macro usage")
|
||||
.set_priority(TaskPriority::Medium)
|
||||
.set_status(TaskStatus::InProgress)
|
||||
.set_assignee("alice")
|
||||
.set_due_date(next_week)
|
||||
.add_tag("type", "docs")
|
||||
.add_tag("project", "osiris")
|
||||
.set_estimated_hours(6.0);
|
||||
|
||||
println!("Task 3: {}", task3.title);
|
||||
println!(" Priority: {:?}", task3.priority);
|
||||
println!(" Status: {:?}", task3.status);
|
||||
store.put(&task3).await?;
|
||||
println!("✓ Stored\n");
|
||||
|
||||
// ========================================
|
||||
// Query Tasks
|
||||
// ========================================
|
||||
println!("🔍 Querying Tasks");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
// Query by assignee
|
||||
println!("Tasks assigned to Alice:");
|
||||
let alice_tasks = store.get_ids_by_index("tasks", "assignee", "alice").await?;
|
||||
for id in &alice_tasks {
|
||||
let task: Task = store.get("tasks", id).await?;
|
||||
println!(" - {} ({:?})", task.title, task.status);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Query by priority
|
||||
println!("High priority tasks:");
|
||||
let high_priority = store.get_ids_by_index("tasks", "priority", "High").await?;
|
||||
for id in &high_priority {
|
||||
let task: Task = store.get("tasks", id).await?;
|
||||
println!(" - {} (assigned to: {})",
|
||||
task.title,
|
||||
task.assignee.as_ref().unwrap_or(&"unassigned".to_string())
|
||||
);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Query by status
|
||||
println!("Blocked tasks:");
|
||||
let blocked = store.get_ids_by_index("tasks", "status", "Blocked").await?;
|
||||
for id in &blocked {
|
||||
let task: Task = store.get("tasks", id).await?;
|
||||
println!(" - {} (priority: {:?})", task.title, task.priority);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Query by tag
|
||||
println!("Tasks tagged with project=osiris:");
|
||||
let project_tasks = store.get_ids_by_index("tasks", "tags:tag", "project=osiris").await?;
|
||||
println!(" Found {} tasks", project_tasks.len());
|
||||
println!();
|
||||
|
||||
// ========================================
|
||||
// Show Auto-Generated Indexes
|
||||
// ========================================
|
||||
println!("📊 Auto-Generated Indexes");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
println!("Task indexed fields: {:?}", Task::indexed_fields());
|
||||
println!();
|
||||
|
||||
println!("Index keys for '{}':", task1.title);
|
||||
for key in task1.index_keys() {
|
||||
println!(" - {} = {}", key.name, key.value);
|
||||
}
|
||||
println!();
|
||||
|
||||
// ========================================
|
||||
// Update Task Status
|
||||
// ========================================
|
||||
println!("✏️ Updating Task Status");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
// Retrieve, modify, and store
|
||||
let mut task2_updated: Task = store.get("tasks", &task2.base_data.id).await?;
|
||||
println!("Updating '{}' status from {:?} to {:?}",
|
||||
task2_updated.title,
|
||||
task2_updated.status,
|
||||
TaskStatus::InProgress
|
||||
);
|
||||
|
||||
task2_updated.status = TaskStatus::InProgress;
|
||||
task2_updated.base_data.update_modified();
|
||||
|
||||
store.put(&task2_updated).await?;
|
||||
println!("✓ Task updated\n");
|
||||
|
||||
// ========================================
|
||||
// Cleanup
|
||||
// ========================================
|
||||
println!("🧹 Cleanup");
|
||||
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
|
||||
|
||||
store.delete(&task1).await?;
|
||||
store.delete(&task2_updated).await?;
|
||||
store.delete(&task3).await?;
|
||||
println!("✓ All tasks deleted\n");
|
||||
|
||||
println!("✅ Example completed successfully!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
12
osiris_derive/Cargo.toml
Normal file
12
osiris_derive/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "osiris_derive"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "2.0", features = ["full", "extra-traits"] }
|
||||
quote = "1.0"
|
||||
proc-macro2 = "1.0"
|
||||
202
osiris_derive/src/lib.rs
Normal file
202
osiris_derive/src/lib.rs
Normal file
@@ -0,0 +1,202 @@
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, Data, DeriveInput, Fields, Type};
|
||||
|
||||
/// Derive macro for the Object trait
|
||||
///
|
||||
/// Automatically implements `index_keys()` and `indexed_fields()` based on fields marked with #[index]
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// #[derive(Object)]
|
||||
/// pub struct Note {
|
||||
/// pub base_data: BaseData,
|
||||
///
|
||||
/// #[index]
|
||||
/// pub title: Option<String>,
|
||||
///
|
||||
/// pub content: Option<String>,
|
||||
///
|
||||
/// #[index]
|
||||
/// pub tags: BTreeMap<String, String>,
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_derive(Object, attributes(index))]
|
||||
pub fn derive_object(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
let name = &input.ident;
|
||||
let generics = &input.generics;
|
||||
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
|
||||
|
||||
// Extract fields with #[index] attribute
|
||||
let indexed_fields = match &input.data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => {
|
||||
fields.named.iter().filter_map(|field| {
|
||||
let has_index = field.attrs.iter().any(|attr| {
|
||||
attr.path().is_ident("index")
|
||||
});
|
||||
|
||||
if has_index {
|
||||
let field_name = field.ident.as_ref()?;
|
||||
let field_type = &field.ty;
|
||||
Some((field_name.clone(), field_type.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).collect::<Vec<_>>()
|
||||
}
|
||||
_ => vec![],
|
||||
},
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
// Generate index_keys() implementation
|
||||
let index_keys_impl = generate_index_keys(&indexed_fields);
|
||||
|
||||
// Generate indexed_fields() implementation
|
||||
let field_names: Vec<_> = indexed_fields.iter()
|
||||
.map(|(name, _)| name.to_string())
|
||||
.collect();
|
||||
|
||||
// Always use ::osiris for external usage
|
||||
// When used inside the osiris crate's src/, the compiler will resolve it correctly
|
||||
let crate_path = quote! { ::osiris };
|
||||
|
||||
let expanded = quote! {
|
||||
impl #impl_generics #crate_path::Object for #name #ty_generics #where_clause {
|
||||
fn object_type() -> &'static str {
|
||||
stringify!(#name)
|
||||
}
|
||||
|
||||
fn base_data(&self) -> &#crate_path::BaseData {
|
||||
&self.base_data
|
||||
}
|
||||
|
||||
fn base_data_mut(&mut self) -> &mut #crate_path::BaseData {
|
||||
&mut self.base_data
|
||||
}
|
||||
|
||||
fn index_keys(&self) -> Vec<#crate_path::IndexKey> {
|
||||
let mut keys = Vec::new();
|
||||
|
||||
// Index from base_data
|
||||
if let Some(mime) = &self.base_data.mime {
|
||||
keys.push(#crate_path::IndexKey::new("mime", mime));
|
||||
}
|
||||
|
||||
#index_keys_impl
|
||||
|
||||
keys
|
||||
}
|
||||
|
||||
fn indexed_fields() -> Vec<&'static str> {
|
||||
vec![#(#field_names),*]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
fn generate_index_keys(fields: &[(syn::Ident, Type)]) -> proc_macro2::TokenStream {
|
||||
let mut implementations = Vec::new();
|
||||
|
||||
// Always use ::osiris
|
||||
let crate_path = quote! { ::osiris };
|
||||
|
||||
for (field_name, field_type) in fields {
|
||||
let field_name_str = field_name.to_string();
|
||||
|
||||
// Check if it's an Option type
|
||||
if is_option_type(field_type) {
|
||||
implementations.push(quote! {
|
||||
if let Some(value) = &self.#field_name {
|
||||
keys.push(#crate_path::IndexKey::new(#field_name_str, value));
|
||||
}
|
||||
});
|
||||
}
|
||||
// Check if it's a BTreeMap (for tags)
|
||||
else if is_btreemap_type(field_type) {
|
||||
implementations.push(quote! {
|
||||
for (key, value) in &self.#field_name {
|
||||
keys.push(#crate_path::IndexKey {
|
||||
name: concat!(#field_name_str, ":tag"),
|
||||
value: format!("{}={}", key, value),
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// Check if it's a Vec
|
||||
else if is_vec_type(field_type) {
|
||||
implementations.push(quote! {
|
||||
for (idx, value) in self.#field_name.iter().enumerate() {
|
||||
keys.push(#crate_path::IndexKey {
|
||||
name: concat!(#field_name_str, ":item"),
|
||||
value: format!("{}:{}", idx, value),
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// For OffsetDateTime, index as date string
|
||||
else if is_offsetdatetime_type(field_type) {
|
||||
implementations.push(quote! {
|
||||
{
|
||||
let date_str = self.#field_name.date().to_string();
|
||||
keys.push(#crate_path::IndexKey::new(#field_name_str, date_str));
|
||||
}
|
||||
});
|
||||
}
|
||||
// For enums or other types, convert to string
|
||||
else {
|
||||
implementations.push(quote! {
|
||||
{
|
||||
let value_str = format!("{:?}", &self.#field_name);
|
||||
keys.push(#crate_path::IndexKey::new(#field_name_str, value_str));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
quote! {
|
||||
#(#implementations)*
|
||||
}
|
||||
}
|
||||
|
||||
fn is_option_type(ty: &Type) -> bool {
|
||||
if let Type::Path(type_path) = ty {
|
||||
if let Some(segment) = type_path.path.segments.last() {
|
||||
return segment.ident == "Option";
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn is_btreemap_type(ty: &Type) -> bool {
|
||||
if let Type::Path(type_path) = ty {
|
||||
if let Some(segment) = type_path.path.segments.last() {
|
||||
return segment.ident == "BTreeMap";
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn is_vec_type(ty: &Type) -> bool {
|
||||
if let Type::Path(type_path) = ty {
|
||||
if let Some(segment) = type_path.path.segments.last() {
|
||||
return segment.ident == "Vec";
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn is_offsetdatetime_type(ty: &Type) -> bool {
|
||||
if let Type::Path(type_path) = ty {
|
||||
if let Some(segment) = type_path.path.segments.last() {
|
||||
return segment.ident == "OffsetDateTime";
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
60
scripts/multi_instance.rhai
Normal file
60
scripts/multi_instance.rhai
Normal file
@@ -0,0 +1,60 @@
|
||||
// Multi-Instance OSIRIS Example
|
||||
// Demonstrates using multiple OSIRIS instances in a single script
|
||||
|
||||
print("=== Multi-Instance OSIRIS Test ===\n");
|
||||
|
||||
// Create two OSIRIS instances
|
||||
print("Creating OSIRIS instances...");
|
||||
let freezone = osiris("freezone", "redis://localhost:6379", 1);
|
||||
let my_osiris = osiris("my_osiris", "redis://localhost:6379", 2);
|
||||
print(`✓ Created: ${freezone.name()}`);
|
||||
print(`✓ Created: ${my_osiris.name()}\n`);
|
||||
|
||||
// Create a note
|
||||
print("Creating note...");
|
||||
let my_note = note("shared_notes")
|
||||
.title("Multi-Instance Test Note")
|
||||
.content("This note will be stored in both OSIRIS instances!")
|
||||
.tag("test", "multi-instance")
|
||||
.tag("shared", "true");
|
||||
|
||||
print(`Note created: ${my_note.get_title()}\n`);
|
||||
|
||||
// Store in freezone instance
|
||||
print("Storing in freezone...");
|
||||
let freezone_id = freezone.put_note(my_note);
|
||||
print(`✓ Stored in freezone with ID: ${freezone_id}\n`);
|
||||
|
||||
// Store in my_osiris instance (same note, different storage)
|
||||
print("Storing in my_osiris...");
|
||||
let my_id = my_osiris.put_note(my_note);
|
||||
print(`✓ Stored in my_osiris with ID: ${my_id}\n`);
|
||||
|
||||
// Retrieve from freezone
|
||||
print("Retrieving from freezone...");
|
||||
let freezone_note = freezone.get_note("shared_notes", freezone_id);
|
||||
print(`✓ Retrieved from freezone: ${freezone_note.get_title()}\n`);
|
||||
|
||||
// Retrieve from my_osiris
|
||||
print("Retrieving from my_osiris...");
|
||||
let my_note_retrieved = my_osiris.get_note("shared_notes", my_id);
|
||||
print(`✓ Retrieved from my_osiris: ${my_note_retrieved.get_title()}\n`);
|
||||
|
||||
// Query both instances
|
||||
print("Querying freezone...");
|
||||
let freezone_ids = freezone.query("shared_notes", "tags:tag", "shared=true");
|
||||
print(`✓ Found in freezone:`);
|
||||
for id in freezone_ids {
|
||||
print(` - ${id}`);
|
||||
}
|
||||
print("");
|
||||
|
||||
print("Querying my_osiris...");
|
||||
let my_ids = my_osiris.query("shared_notes", "tags:tag", "shared=true");
|
||||
print(`✓ Found in my_osiris:`);
|
||||
for id in my_ids {
|
||||
print(` - ${id}`);
|
||||
}
|
||||
|
||||
print("\n=== Test Complete ===");
|
||||
print("Successfully demonstrated multi-instance OSIRIS!");
|
||||
60
scripts/predefined_instances.rhai
Normal file
60
scripts/predefined_instances.rhai
Normal file
@@ -0,0 +1,60 @@
|
||||
// Predefined Instances Example
|
||||
// Run with predefined instances:
|
||||
// cargo run --bin runner --features rhai-support -- test1 \
|
||||
// --instance freezone:redis://localhost:6379:1 \
|
||||
// --instance my:redis://localhost:6379:2 \
|
||||
// --script-file scripts/predefined_instances.rhai
|
||||
|
||||
print("=== Predefined Instances Example ===\n");
|
||||
|
||||
// freezone and my are already available - no need to create them!
|
||||
print(`Using predefined instance: ${freezone.name()}`);
|
||||
print(`Using predefined instance: ${my.name()}\n`);
|
||||
|
||||
// Create a note
|
||||
print("Creating note...");
|
||||
let my_note = note("notes")
|
||||
.title("Predefined Instance Test")
|
||||
.content("Using freezone and my instances directly!")
|
||||
.tag("type", "predefined")
|
||||
.tag("test", "true");
|
||||
|
||||
print(`Note created: ${my_note.get_title()}\n`);
|
||||
|
||||
// Store in freezone - just use it directly!
|
||||
print("Storing in freezone...");
|
||||
let freezone_id = freezone.put_note(my_note);
|
||||
print(`✓ Stored in freezone: ${freezone_id}\n`);
|
||||
|
||||
// Store in my - just use it directly!
|
||||
print("Storing in my...");
|
||||
let my_id = my.put_note(my_note);
|
||||
print(`✓ Stored in my: ${my_id}\n`);
|
||||
|
||||
// Retrieve from both
|
||||
print("Retrieving from freezone...");
|
||||
let note1 = freezone.get_note("notes", freezone_id);
|
||||
print(`✓ ${note1.get_title()}\n`);
|
||||
|
||||
print("Retrieving from my...");
|
||||
let note2 = my.get_note("notes", my_id);
|
||||
print(`✓ ${note2.get_title()}\n`);
|
||||
|
||||
// Query both
|
||||
print("Querying freezone for predefined notes...");
|
||||
let freezone_ids = freezone.query("notes", "tags:tag", "type=predefined");
|
||||
print(`✓ Found ${freezone_ids.len} notes in freezone`);
|
||||
for id in freezone_ids {
|
||||
print(` - ${id}`);
|
||||
}
|
||||
print("");
|
||||
|
||||
print("Querying my for predefined notes...");
|
||||
let my_ids = my.query("notes", "tags:tag", "type=predefined");
|
||||
print(`✓ Found ${my_ids.len} notes in my`);
|
||||
for id in my_ids {
|
||||
print(` - ${id}`);
|
||||
}
|
||||
|
||||
print("\n=== Test Complete ===");
|
||||
print("Successfully used predefined freezone and my instances!");
|
||||
34
scripts/test_event.rhai
Normal file
34
scripts/test_event.rhai
Normal file
@@ -0,0 +1,34 @@
|
||||
// Test OSIRIS Event Creation
|
||||
// Run with: cargo run --bin runner --features rhai-support -- test1 --script-file scripts/test_event.rhai
|
||||
|
||||
print("=== OSIRIS Event Test ===\n");
|
||||
|
||||
// Create an event
|
||||
print("Creating event...");
|
||||
let event = event("test_calendar", "OSIRIS Runner Test Meeting")
|
||||
.description("Testing the OSIRIS standalone runner")
|
||||
.location("Virtual")
|
||||
.category("testing")
|
||||
.all_day(false);
|
||||
|
||||
print(`Event created: ${event.get_title()}`);
|
||||
|
||||
// Store the event
|
||||
print("Storing event...");
|
||||
let id = put_event(event);
|
||||
print(`✓ Event stored with ID: ${id}\n`);
|
||||
|
||||
// Retrieve the event
|
||||
print("Retrieving event...");
|
||||
let retrieved = get_event("test_calendar", id);
|
||||
print(`✓ Retrieved: ${retrieved.get_title()}\n`);
|
||||
|
||||
// Query by category
|
||||
print("Querying events by category...");
|
||||
let ids = query("test_calendar", "category", "testing");
|
||||
print("✓ Found events:");
|
||||
for id in ids {
|
||||
print(` - ${id}`);
|
||||
}
|
||||
|
||||
print("=== Test Complete ===");
|
||||
36
scripts/test_note.rhai
Normal file
36
scripts/test_note.rhai
Normal file
@@ -0,0 +1,36 @@
|
||||
// Test OSIRIS Note Creation
|
||||
// Run with: cargo run --bin runner --features rhai-support -- test1 --script-file scripts/test_note.rhai
|
||||
|
||||
print("=== OSIRIS Note Test ===\n");
|
||||
|
||||
// Create a note
|
||||
print("Creating note...");
|
||||
let note = note("test_notes")
|
||||
.title("Test from OSIRIS Runner")
|
||||
.content("This note was created using the OSIRIS standalone runner!")
|
||||
.tag("source", "osiris-runner")
|
||||
.tag("test", "true")
|
||||
.mime("text/plain");
|
||||
|
||||
print(`Note created: ${note.get_title()}`);
|
||||
|
||||
// Store the note
|
||||
print("Storing note...");
|
||||
let id = put_note(note);
|
||||
print(`✓ Note stored with ID: ${id}\n`);
|
||||
|
||||
// Retrieve the note
|
||||
print("Retrieving note...");
|
||||
let retrieved = get_note("test_notes", id);
|
||||
print(`✓ Retrieved: ${retrieved.get_title()}`);
|
||||
print(` Content: ${retrieved.get_content()}\n`);
|
||||
|
||||
// Query by tag
|
||||
print("Querying notes by tag...");
|
||||
let ids = query("test_notes", "tags:tag", "source=osiris-runner");
|
||||
print("✓ Found notes:");
|
||||
for id in ids {
|
||||
print(` - ${id}`);
|
||||
}
|
||||
|
||||
print("=== Test Complete ===");
|
||||
73
src/bin/runner/engine.rs
Normal file
73
src/bin/runner/engine.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
/// OSIRIS Engine Factory
|
||||
///
|
||||
/// Creates a Rhai engine configured with OSIRIS objects and methods.
|
||||
|
||||
use osiris::rhai_support::{register_note_api, register_event_api, OsirisInstance};
|
||||
use rhai::Engine;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
||||
/// Configuration for multiple OSIRIS instances
|
||||
pub struct OsirisConfig {
|
||||
pub instances: HashMap<String, (String, u16)>, // name -> (url, db_id)
|
||||
}
|
||||
|
||||
impl OsirisConfig {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
instances: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_instance(&mut self, name: impl ToString, url: impl ToString, db_id: u16) {
|
||||
self.instances.insert(name.to_string(), (url.to_string(), db_id));
|
||||
}
|
||||
|
||||
pub fn single(url: impl ToString, db_id: u16) -> Self {
|
||||
let mut config = Self::new();
|
||||
config.add_instance("default", url, db_id);
|
||||
config
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new Rhai engine with OSIRIS support
|
||||
pub fn create_osiris_engine(
|
||||
herodb_url: &str,
|
||||
db_id: u16,
|
||||
) -> Result<(Engine, rhai::Scope<'static>), Box<dyn std::error::Error>> {
|
||||
let config = OsirisConfig::single(herodb_url, db_id);
|
||||
create_osiris_engine_with_config(config)
|
||||
}
|
||||
|
||||
/// Create a new Rhai engine with multiple OSIRIS instances
|
||||
/// Returns (Engine, Scope) where Scope contains predefined instances
|
||||
pub fn create_osiris_engine_with_config(
|
||||
config: OsirisConfig,
|
||||
) -> Result<(Engine, rhai::Scope<'static>), Box<dyn std::error::Error>> {
|
||||
let mut engine = Engine::new();
|
||||
|
||||
// Register Note API
|
||||
register_note_api(&mut engine);
|
||||
|
||||
// Register Event API
|
||||
register_event_api(&mut engine);
|
||||
|
||||
// Register OsirisInstance type
|
||||
engine.build_type::<OsirisInstance>();
|
||||
|
||||
// Register osiris() constructor function for dynamic creation
|
||||
engine.register_fn("osiris", |name: &str, url: &str, db_id: rhai::INT| -> Result<OsirisInstance, Box<rhai::EvalAltResult>> {
|
||||
OsirisInstance::new(name, url, db_id as u16)
|
||||
.map_err(|e| format!("Failed to create OSIRIS instance: {}", e).into())
|
||||
});
|
||||
|
||||
// Create predefined instances and inject them as global constants in scope
|
||||
let mut scope = rhai::Scope::new();
|
||||
for (name, (url, db_id)) in config.instances {
|
||||
let instance = OsirisInstance::new(&name, &url, db_id)?;
|
||||
scope.push_constant(&name, instance);
|
||||
}
|
||||
|
||||
Ok((engine, scope))
|
||||
}
|
||||
135
src/bin/runner/main.rs
Normal file
135
src/bin/runner/main.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
/// OSIRIS Runner
|
||||
///
|
||||
/// A standalone runner for executing OSIRIS Rhai scripts.
|
||||
/// Can run in script mode (single execution) or daemon mode (continuous).
|
||||
///
|
||||
/// Usage:
|
||||
/// ```bash
|
||||
/// # Script mode
|
||||
/// cargo run --bin runner --features rhai-support -- runner1 --script "let note = note('test').title('Hi'); put_note(note);"
|
||||
///
|
||||
/// # Daemon mode (requires runner_rust infrastructure)
|
||||
/// cargo run --bin runner --features rhai-support -- runner1 --redis-url redis://localhost:6379
|
||||
/// ```
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
mod engine;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
use engine::create_osiris_engine;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about = "OSIRIS Rhai Script Runner", long_about = None)]
|
||||
struct Args {
|
||||
/// Runner ID
|
||||
runner_id: String,
|
||||
|
||||
/// HeroDB URL
|
||||
#[arg(short = 'r', long, default_value = "redis://localhost:6379")]
|
||||
redis_url: String,
|
||||
|
||||
/// HeroDB database ID
|
||||
#[arg(short = 'd', long, default_value_t = 1)]
|
||||
db_id: u16,
|
||||
|
||||
/// Script to execute in single-job mode (optional)
|
||||
#[arg(short, long)]
|
||||
script: Option<String>,
|
||||
|
||||
/// Script file to execute
|
||||
#[arg(short = 'f', long)]
|
||||
script_file: Option<String>,
|
||||
|
||||
/// Predefined instances in format: name:url:db_id (can be repeated)
|
||||
/// Example: --instance freezone:redis://localhost:6379:1
|
||||
#[arg(short = 'i', long = "instance")]
|
||||
instances: Vec<String>,
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "rhai-support"))]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
eprintln!("❌ Error: OSIRIS runner requires the 'rhai-support' feature");
|
||||
eprintln!("Run with: cargo run --bin runner --features rhai-support");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
println!("🚀 OSIRIS Runner");
|
||||
println!("Runner ID: {}", args.runner_id);
|
||||
println!("HeroDB: {} (DB {})", args.redis_url, args.db_id);
|
||||
|
||||
// Parse predefined instances
|
||||
let mut config = engine::OsirisConfig::new();
|
||||
|
||||
if args.instances.is_empty() {
|
||||
// No predefined instances, use default
|
||||
config.add_instance("default", &args.redis_url, args.db_id);
|
||||
} else {
|
||||
// Parse instance definitions (format: name:url:db_id)
|
||||
// We need to split carefully since URL contains colons
|
||||
for instance_def in &args.instances {
|
||||
// Find the first colon (name separator)
|
||||
let first_colon = instance_def.find(':')
|
||||
.ok_or_else(|| format!("Invalid instance format: '{}'. Expected: name:url:db_id", instance_def))?;
|
||||
|
||||
let name = &instance_def[..first_colon];
|
||||
let rest = &instance_def[first_colon + 1..];
|
||||
|
||||
// Find the last colon (db_id separator)
|
||||
let last_colon = rest.rfind(':')
|
||||
.ok_or_else(|| format!("Invalid instance format: '{}'. Expected: name:url:db_id", instance_def))?;
|
||||
|
||||
let url = &rest[..last_colon];
|
||||
let db_id_str = &rest[last_colon + 1..];
|
||||
|
||||
let db_id: u16 = db_id_str.parse()
|
||||
.map_err(|_| format!("Invalid db_id in instance '{}': {}", instance_def, db_id_str))?;
|
||||
|
||||
config.add_instance(name, url, db_id);
|
||||
println!(" Instance: {} → {} (DB {})", name, url, db_id);
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Determine script source
|
||||
let script_content = if let Some(script) = args.script {
|
||||
script
|
||||
} else if let Some(file_path) = args.script_file {
|
||||
std::fs::read_to_string(&file_path)
|
||||
.map_err(|e| format!("Failed to read script file '{}': {}", file_path, e))?
|
||||
} else {
|
||||
return Err("No script provided. Use --script or --script-file".into());
|
||||
};
|
||||
|
||||
println!("📝 Executing script...\n");
|
||||
println!("─────────────────────────────────────");
|
||||
|
||||
// Create engine with predefined instances
|
||||
let (engine, mut scope) = engine::create_osiris_engine_with_config(config)?;
|
||||
|
||||
match engine.eval_with_scope::<rhai::Dynamic>(&mut scope, &script_content) {
|
||||
Ok(result) => {
|
||||
println!("─────────────────────────────────────");
|
||||
println!("\n✅ Script completed successfully!");
|
||||
if !result.is_unit() {
|
||||
println!("Result: {}", result);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
println!("─────────────────────────────────────");
|
||||
println!("\n❌ Script execution failed!");
|
||||
println!("Error: {}", e);
|
||||
Err(Box::new(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
60
src/config/mod.rs
Normal file
60
src/config/mod.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
pub mod model;
|
||||
|
||||
pub use model::{Config, HeroDbConfig, NamespaceConfig};
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Load configuration from file
|
||||
pub fn load_config(path: Option<PathBuf>) -> Result<Config> {
|
||||
let config_path = path.unwrap_or_else(default_config_path);
|
||||
|
||||
if !config_path.exists() {
|
||||
return Err(Error::Config(format!(
|
||||
"Configuration file not found: {}",
|
||||
config_path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&config_path)?;
|
||||
let config: Config = toml::from_str(&content)
|
||||
.map_err(|e| Error::Config(format!("Failed to parse config: {}", e)))?;
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
/// Save configuration to file
|
||||
pub fn save_config(config: &Config, path: Option<PathBuf>) -> Result<()> {
|
||||
let config_path = path.unwrap_or_else(default_config_path);
|
||||
|
||||
// Create parent directory if it doesn't exist
|
||||
if let Some(parent) = config_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let content = toml::to_string_pretty(config)
|
||||
.map_err(|e| Error::Config(format!("Failed to serialize config: {}", e)))?;
|
||||
|
||||
fs::write(&config_path, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the default configuration file path
|
||||
pub fn default_config_path() -> PathBuf {
|
||||
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
|
||||
PathBuf::from(home)
|
||||
.join(".config")
|
||||
.join("osiris")
|
||||
.join("config.toml")
|
||||
}
|
||||
|
||||
/// Create a default configuration
|
||||
pub fn create_default_config(herodb_url: String) -> Config {
|
||||
Config {
|
||||
herodb: HeroDbConfig { url: herodb_url },
|
||||
namespaces: HashMap::new(),
|
||||
}
|
||||
}
|
||||
55
src/config/model.rs
Normal file
55
src/config/model.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// OSIRIS configuration
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
/// HeroDB connection configuration
|
||||
pub herodb: HeroDbConfig,
|
||||
|
||||
/// Namespace configurations
|
||||
#[serde(default)]
|
||||
pub namespaces: HashMap<String, NamespaceConfig>,
|
||||
}
|
||||
|
||||
/// HeroDB connection configuration
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct HeroDbConfig {
|
||||
/// HeroDB URL (e.g., "redis://localhost:6379")
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
/// Namespace configuration
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NamespaceConfig {
|
||||
/// HeroDB database ID for this namespace
|
||||
pub db_id: u16,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Get namespace configuration by name
|
||||
pub fn get_namespace(&self, name: &str) -> Option<&NamespaceConfig> {
|
||||
self.namespaces.get(name)
|
||||
}
|
||||
|
||||
/// Add or update a namespace
|
||||
pub fn set_namespace(&mut self, name: String, config: NamespaceConfig) {
|
||||
self.namespaces.insert(name, config);
|
||||
}
|
||||
|
||||
/// Remove a namespace
|
||||
pub fn remove_namespace(&mut self, name: &str) -> Option<NamespaceConfig> {
|
||||
self.namespaces.remove(name)
|
||||
}
|
||||
|
||||
/// Get the next available database ID
|
||||
pub fn next_db_id(&self) -> u16 {
|
||||
let max_id = self
|
||||
.namespaces
|
||||
.values()
|
||||
.map(|ns| ns.db_id)
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
max_id + 1
|
||||
}
|
||||
}
|
||||
46
src/error.rs
Normal file
46
src/error.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Redis(redis::RedisError),
|
||||
Serialization(serde_json::Error),
|
||||
NotFound(String),
|
||||
InvalidInput(String),
|
||||
Config(String),
|
||||
Io(std::io::Error),
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Error::Redis(e) => write!(f, "Redis error: {}", e),
|
||||
Error::Serialization(e) => write!(f, "Serialization error: {}", e),
|
||||
Error::NotFound(msg) => write!(f, "Not found: {}", msg),
|
||||
Error::InvalidInput(msg) => write!(f, "Invalid input: {}", msg),
|
||||
Error::Config(msg) => write!(f, "Configuration error: {}", msg),
|
||||
Error::Io(e) => write!(f, "IO error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl From<redis::RedisError> for Error {
|
||||
fn from(e: redis::RedisError) -> Self {
|
||||
Error::Redis(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for Error {
|
||||
fn from(e: serde_json::Error) -> Self {
|
||||
Error::Serialization(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
Error::Io(e)
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
139
src/index/field_index.rs
Normal file
139
src/index/field_index.rs
Normal file
@@ -0,0 +1,139 @@
|
||||
use crate::error::Result;
|
||||
use crate::store::{HeroDbClient, OsirisObject};
|
||||
|
||||
/// Field indexing for fast filtering by tags and metadata
|
||||
pub struct FieldIndex {
|
||||
client: HeroDbClient,
|
||||
}
|
||||
|
||||
impl FieldIndex {
|
||||
/// Create a new field index
|
||||
pub fn new(client: HeroDbClient) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
/// Index an object (add to field indexes)
|
||||
pub async fn index_object(&self, obj: &OsirisObject) -> Result<()> {
|
||||
// Index tags
|
||||
for (key, value) in &obj.meta.tags {
|
||||
let field_key = format!("field:tag:{}={}", key, value);
|
||||
self.client.sadd(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Index MIME type if present
|
||||
if let Some(mime) = &obj.meta.mime {
|
||||
let field_key = format!("field:mime:{}", mime);
|
||||
self.client.sadd(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Index title if present (for exact match)
|
||||
if let Some(title) = &obj.meta.title {
|
||||
let field_key = format!("field:title:{}", title);
|
||||
self.client.sadd(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Add to scan index for text search
|
||||
self.client.sadd("scan:index", &obj.id).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove an object from indexes
|
||||
pub async fn deindex_object(&self, obj: &OsirisObject) -> Result<()> {
|
||||
// Remove from tag indexes
|
||||
for (key, value) in &obj.meta.tags {
|
||||
let field_key = format!("field:tag:{}={}", key, value);
|
||||
self.client.srem(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Remove from MIME index
|
||||
if let Some(mime) = &obj.meta.mime {
|
||||
let field_key = format!("field:mime:{}", mime);
|
||||
self.client.srem(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Remove from title index
|
||||
if let Some(title) = &obj.meta.title {
|
||||
let field_key = format!("field:title:{}", title);
|
||||
self.client.srem(&field_key, &obj.id).await?;
|
||||
}
|
||||
|
||||
// Remove from scan index
|
||||
self.client.srem("scan:index", &obj.id).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update object indexes (remove old, add new)
|
||||
pub async fn reindex_object(&self, old_obj: &OsirisObject, new_obj: &OsirisObject) -> Result<()> {
|
||||
self.deindex_object(old_obj).await?;
|
||||
self.index_object(new_obj).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all IDs matching a tag filter
|
||||
pub async fn get_ids_by_tag(&self, key: &str, value: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("field:tag:{}={}", key, value);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs matching a MIME type
|
||||
pub async fn get_ids_by_mime(&self, mime: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("field:mime:{}", mime);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs matching a title
|
||||
pub async fn get_ids_by_title(&self, title: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("field:title:{}", title);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs in the scan index
|
||||
pub async fn get_all_ids(&self) -> Result<Vec<String>> {
|
||||
self.client.smembers("scan:index").await
|
||||
}
|
||||
|
||||
/// Get intersection of multiple field filters
|
||||
pub async fn get_ids_by_filters(&self, filters: &[(String, String)]) -> Result<Vec<String>> {
|
||||
if filters.is_empty() {
|
||||
return self.get_all_ids().await;
|
||||
}
|
||||
|
||||
let keys: Vec<String> = filters
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
if k == "mime" {
|
||||
format!("field:mime:{}", v)
|
||||
} else if k == "title" {
|
||||
format!("field:title:{}", v)
|
||||
} else {
|
||||
format!("field:tag:{}={}", k, v)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.client.sinter(&keys).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_index_object() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let index = FieldIndex::new(client);
|
||||
|
||||
let mut obj = OsirisObject::new("test".to_string(), Some("Hello".to_string()));
|
||||
obj.set_tag("topic".to_string(), "rust".to_string());
|
||||
obj.set_mime(Some("text/plain".to_string()));
|
||||
|
||||
index.index_object(&obj).await.unwrap();
|
||||
|
||||
let ids = index.get_ids_by_tag("topic", "rust").await.unwrap();
|
||||
assert!(ids.contains(&obj.id));
|
||||
}
|
||||
}
|
||||
3
src/index/mod.rs
Normal file
3
src/index/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod field_index;
|
||||
|
||||
pub use field_index::FieldIndex;
|
||||
408
src/interfaces/cli.rs
Normal file
408
src/interfaces/cli.rs
Normal file
@@ -0,0 +1,408 @@
|
||||
use crate::config::{self, NamespaceConfig};
|
||||
use crate::error::{Error, Result};
|
||||
use crate::index::FieldIndex;
|
||||
use crate::retrieve::{RetrievalQuery, SearchEngine};
|
||||
use crate::store::{HeroDbClient, OsirisObject};
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs;
|
||||
use std::io::{self, Read};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "osiris")]
|
||||
#[command(about = "OSIRIS - Object Storage, Indexing & Retrieval Intelligent System", long_about = None)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
pub command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
pub enum Commands {
|
||||
/// Initialize OSIRIS configuration
|
||||
Init {
|
||||
/// HeroDB URL
|
||||
#[arg(long, default_value = "redis://localhost:6379")]
|
||||
herodb: String,
|
||||
},
|
||||
|
||||
/// Namespace management
|
||||
Ns {
|
||||
#[command(subcommand)]
|
||||
command: NsCommands,
|
||||
},
|
||||
|
||||
/// Put an object
|
||||
Put {
|
||||
/// Object path (namespace/name)
|
||||
path: String,
|
||||
|
||||
/// File to upload (use '-' for stdin)
|
||||
file: String,
|
||||
|
||||
/// Tags (key=value pairs, comma-separated)
|
||||
#[arg(long)]
|
||||
tags: Option<String>,
|
||||
|
||||
/// MIME type
|
||||
#[arg(long)]
|
||||
mime: Option<String>,
|
||||
|
||||
/// Title
|
||||
#[arg(long)]
|
||||
title: Option<String>,
|
||||
},
|
||||
|
||||
/// Get an object
|
||||
Get {
|
||||
/// Object path (namespace/name or namespace/id)
|
||||
path: String,
|
||||
|
||||
/// Output file (default: stdout)
|
||||
#[arg(long)]
|
||||
output: Option<PathBuf>,
|
||||
|
||||
/// Output raw content only (no metadata)
|
||||
#[arg(long)]
|
||||
raw: bool,
|
||||
},
|
||||
|
||||
/// Delete an object
|
||||
Del {
|
||||
/// Object path (namespace/name or namespace/id)
|
||||
path: String,
|
||||
},
|
||||
|
||||
/// Search/find objects
|
||||
Find {
|
||||
/// Text query (optional)
|
||||
query: Option<String>,
|
||||
|
||||
/// Namespace to search
|
||||
#[arg(long)]
|
||||
ns: String,
|
||||
|
||||
/// Filters (key=value pairs, comma-separated)
|
||||
#[arg(long)]
|
||||
filter: Option<String>,
|
||||
|
||||
/// Maximum number of results
|
||||
#[arg(long, default_value = "10")]
|
||||
topk: usize,
|
||||
|
||||
/// Output as JSON
|
||||
#[arg(long)]
|
||||
json: bool,
|
||||
},
|
||||
|
||||
/// Show statistics
|
||||
Stats {
|
||||
/// Namespace (optional, shows all if not specified)
|
||||
#[arg(long)]
|
||||
ns: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug, Clone)]
|
||||
pub enum NsCommands {
|
||||
/// Create a new namespace
|
||||
Create {
|
||||
/// Namespace name
|
||||
name: String,
|
||||
},
|
||||
|
||||
/// List all namespaces
|
||||
List,
|
||||
|
||||
/// Delete a namespace
|
||||
Delete {
|
||||
/// Namespace name
|
||||
name: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
pub async fn run(self) -> Result<()> {
|
||||
match self.command {
|
||||
Commands::Init { herodb } => {
|
||||
let config = config::create_default_config(herodb);
|
||||
config::save_config(&config, None)?;
|
||||
println!("✓ OSIRIS initialized");
|
||||
println!(" Config: {}", config::default_config_path().display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Commands::Ns { ref command } => self.handle_ns_command(command.clone()).await,
|
||||
Commands::Put { ref path, ref file, ref tags, ref mime, ref title } => {
|
||||
self.handle_put(path.clone(), file.clone(), tags.clone(), mime.clone(), title.clone()).await
|
||||
}
|
||||
Commands::Get { ref path, ref output, raw } => {
|
||||
self.handle_get(path.clone(), output.clone(), raw).await
|
||||
}
|
||||
Commands::Del { ref path } => self.handle_del(path.clone()).await,
|
||||
Commands::Find { ref query, ref ns, ref filter, topk, json } => {
|
||||
self.handle_find(query.clone(), ns.clone(), filter.clone(), topk, json).await
|
||||
}
|
||||
Commands::Stats { ref ns } => self.handle_stats(ns.clone()).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_ns_command(&self, command: NsCommands) -> Result<()> {
|
||||
let mut config = config::load_config(None)?;
|
||||
|
||||
match command {
|
||||
NsCommands::Create { name } => {
|
||||
if config.get_namespace(&name).is_some() {
|
||||
return Err(Error::InvalidInput(format!(
|
||||
"Namespace '{}' already exists",
|
||||
name
|
||||
)));
|
||||
}
|
||||
|
||||
let db_id = config.next_db_id();
|
||||
let ns_config = NamespaceConfig { db_id };
|
||||
|
||||
config.set_namespace(name.clone(), ns_config);
|
||||
config::save_config(&config, None)?;
|
||||
|
||||
println!("✓ Created namespace '{}' (DB {})", name, db_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
NsCommands::List => {
|
||||
if config.namespaces.is_empty() {
|
||||
println!("No namespaces configured");
|
||||
} else {
|
||||
println!("Namespaces:");
|
||||
for (name, ns_config) in &config.namespaces {
|
||||
println!(" {} → DB {}", name, ns_config.db_id);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
NsCommands::Delete { name } => {
|
||||
if config.remove_namespace(&name).is_none() {
|
||||
return Err(Error::NotFound(format!("Namespace '{}'", name)));
|
||||
}
|
||||
|
||||
config::save_config(&config, None)?;
|
||||
println!("✓ Deleted namespace '{}'", name);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_put(
|
||||
&self,
|
||||
path: String,
|
||||
file: String,
|
||||
tags: Option<String>,
|
||||
mime: Option<String>,
|
||||
title: Option<String>,
|
||||
) -> Result<()> {
|
||||
let (ns, name) = parse_path(&path)?;
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
// Read content
|
||||
let content = if file == "-" {
|
||||
let mut buffer = String::new();
|
||||
io::stdin().read_to_string(&mut buffer)?;
|
||||
buffer
|
||||
} else {
|
||||
fs::read_to_string(&file)?
|
||||
};
|
||||
|
||||
// Create object
|
||||
let mut obj = OsirisObject::with_id(name.clone(), ns.clone(), Some(content));
|
||||
|
||||
if let Some(title) = title {
|
||||
obj.set_title(Some(title));
|
||||
}
|
||||
|
||||
if let Some(mime) = mime {
|
||||
obj.set_mime(Some(mime));
|
||||
}
|
||||
|
||||
// Parse tags
|
||||
if let Some(tags_str) = tags {
|
||||
let tag_map = parse_tags(&tags_str)?;
|
||||
for (key, value) in tag_map {
|
||||
obj.set_tag(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Store object
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let index = FieldIndex::new(client.clone());
|
||||
|
||||
client.put_object(&obj).await?;
|
||||
index.index_object(&obj).await?;
|
||||
|
||||
println!("✓ Stored {}/{}", ns, obj.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_get(&self, path: String, output: Option<PathBuf>, raw: bool) -> Result<()> {
|
||||
let (ns, id) = parse_path(&path)?;
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let obj = client.get_object(&id).await?;
|
||||
|
||||
if raw {
|
||||
// Output raw content only
|
||||
let content = obj.text.unwrap_or_default();
|
||||
if let Some(output_path) = output {
|
||||
fs::write(output_path, content)?;
|
||||
} else {
|
||||
print!("{}", content);
|
||||
}
|
||||
} else {
|
||||
// Output full object as JSON
|
||||
let json = serde_json::to_string_pretty(&obj)?;
|
||||
if let Some(output_path) = output {
|
||||
fs::write(output_path, json)?;
|
||||
} else {
|
||||
println!("{}", json);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_del(&self, path: String) -> Result<()> {
|
||||
let (ns, id) = parse_path(&path)?;
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let index = FieldIndex::new(client.clone());
|
||||
|
||||
// Get object first to deindex it
|
||||
let obj = client.get_object(&id).await?;
|
||||
index.deindex_object(&obj).await?;
|
||||
|
||||
let deleted = client.delete_object(&id).await?;
|
||||
|
||||
if deleted {
|
||||
println!("✓ Deleted {}/{}", ns, id);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::NotFound(format!("{}/{}", ns, id)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_find(
|
||||
&self,
|
||||
query: Option<String>,
|
||||
ns: String,
|
||||
filter: Option<String>,
|
||||
topk: usize,
|
||||
json: bool,
|
||||
) -> Result<()> {
|
||||
let config = config::load_config(None)?;
|
||||
let ns_config = config.get_namespace(&ns)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let engine = SearchEngine::new(client.clone());
|
||||
|
||||
// Build query
|
||||
let mut retrieval_query = RetrievalQuery::new(ns.clone()).with_top_k(topk);
|
||||
|
||||
if let Some(text) = query {
|
||||
retrieval_query = retrieval_query.with_text(text);
|
||||
}
|
||||
|
||||
if let Some(filter_str) = filter {
|
||||
let filters = parse_tags(&filter_str)?;
|
||||
for (key, value) in filters {
|
||||
retrieval_query = retrieval_query.with_filter(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Execute search
|
||||
let results = engine.search(&retrieval_query).await?;
|
||||
|
||||
if json {
|
||||
println!("{}", serde_json::to_string_pretty(&results)?);
|
||||
} else {
|
||||
if results.is_empty() {
|
||||
println!("No results found");
|
||||
} else {
|
||||
println!("Found {} result(s):\n", results.len());
|
||||
for (i, result) in results.iter().enumerate() {
|
||||
println!("{}. {} (score: {:.2})", i + 1, result.id, result.score);
|
||||
if let Some(snippet) = &result.snippet {
|
||||
println!(" {}", snippet);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_stats(&self, ns: Option<String>) -> Result<()> {
|
||||
let config = config::load_config(None)?;
|
||||
|
||||
if let Some(ns_name) = ns {
|
||||
let ns_config = config.get_namespace(&ns_name)
|
||||
.ok_or_else(|| Error::NotFound(format!("Namespace '{}'", ns_name)))?;
|
||||
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let size = client.dbsize().await?;
|
||||
|
||||
println!("Namespace: {}", ns_name);
|
||||
println!(" DB ID: {}", ns_config.db_id);
|
||||
println!(" Keys: {}", size);
|
||||
} else {
|
||||
println!("OSIRIS Statistics\n");
|
||||
println!("Namespaces: {}", config.namespaces.len());
|
||||
for (name, ns_config) in &config.namespaces {
|
||||
let client = HeroDbClient::new(&config.herodb.url, ns_config.db_id)?;
|
||||
let size = client.dbsize().await?;
|
||||
println!(" {} (DB {}) → {} keys", name, ns_config.db_id, size);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a path into namespace and name/id
|
||||
fn parse_path(path: &str) -> Result<(String, String)> {
|
||||
let parts: Vec<&str> = path.splitn(2, '/').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(Error::InvalidInput(format!(
|
||||
"Invalid path format. Expected 'namespace/name', got '{}'",
|
||||
path
|
||||
)));
|
||||
}
|
||||
Ok((parts[0].to_string(), parts[1].to_string()))
|
||||
}
|
||||
|
||||
/// Parse tags from comma-separated key=value pairs
|
||||
fn parse_tags(tags_str: &str) -> Result<BTreeMap<String, String>> {
|
||||
let mut tags = BTreeMap::new();
|
||||
|
||||
for pair in tags_str.split(',') {
|
||||
let parts: Vec<&str> = pair.trim().splitn(2, '=').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(Error::InvalidInput(format!(
|
||||
"Invalid tag format. Expected 'key=value', got '{}'",
|
||||
pair
|
||||
)));
|
||||
}
|
||||
tags.insert(parts[0].to_string(), parts[1].to_string());
|
||||
}
|
||||
|
||||
Ok(tags)
|
||||
}
|
||||
3
src/interfaces/mod.rs
Normal file
3
src/interfaces/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod cli;
|
||||
|
||||
pub use cli::Cli;
|
||||
23
src/lib.rs
Normal file
23
src/lib.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
// Allow the crate to reference itself as ::osiris for the derive macro
|
||||
extern crate self as osiris;
|
||||
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod index;
|
||||
pub mod interfaces;
|
||||
pub mod objects;
|
||||
pub mod retrieve;
|
||||
pub mod store;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai_support;
|
||||
|
||||
pub use error::{Error, Result};
|
||||
pub use store::{BaseData, IndexKey, Object, Storable};
|
||||
|
||||
// Re-export the derive macro
|
||||
pub use osiris_derive::Object as DeriveObject;
|
||||
|
||||
// OsirisInstance is the main type for Rhai integration
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub use rhai_support::OsirisInstance;
|
||||
22
src/main.rs
Normal file
22
src/main.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use clap::Parser;
|
||||
use osiris::interfaces::Cli;
|
||||
use tracing_subscriber::{fmt, EnvFilter};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// Initialize tracing
|
||||
fmt()
|
||||
.with_env_filter(
|
||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")),
|
||||
)
|
||||
.init();
|
||||
|
||||
// Parse CLI arguments
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Run the command
|
||||
if let Err(e) = cli.run().await {
|
||||
eprintln!("Error: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
139
src/objects/event/mod.rs
Normal file
139
src/objects/event/mod.rs
Normal file
@@ -0,0 +1,139 @@
|
||||
use crate::store::BaseData;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai;
|
||||
|
||||
/// Event status
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
|
||||
pub enum EventStatus {
|
||||
#[default]
|
||||
Draft,
|
||||
Published,
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
/// A calendar event object
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, crate::DeriveObject)]
|
||||
pub struct Event {
|
||||
/// Base data
|
||||
pub base_data: BaseData,
|
||||
|
||||
/// Title of the event
|
||||
#[index]
|
||||
pub title: String,
|
||||
|
||||
/// Optional description
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Start time
|
||||
#[index]
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub start_time: OffsetDateTime,
|
||||
|
||||
/// End time
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub end_time: OffsetDateTime,
|
||||
|
||||
/// Optional location
|
||||
#[index]
|
||||
pub location: Option<String>,
|
||||
|
||||
/// Event status
|
||||
#[index]
|
||||
pub status: EventStatus,
|
||||
|
||||
/// Whether this is an all-day event
|
||||
pub all_day: bool,
|
||||
|
||||
/// Optional category
|
||||
#[index]
|
||||
pub category: Option<String>,
|
||||
}
|
||||
|
||||
impl Event {
|
||||
/// Create a new event
|
||||
pub fn new(ns: String, title: impl ToString) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
base_data: BaseData::new(ns),
|
||||
title: title.to_string(),
|
||||
description: None,
|
||||
start_time: now,
|
||||
end_time: now,
|
||||
location: None,
|
||||
status: EventStatus::default(),
|
||||
all_day: false,
|
||||
category: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an event with specific ID
|
||||
pub fn with_id(id: String, ns: String, title: impl ToString) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
base_data: BaseData::with_id(id, ns),
|
||||
title: title.to_string(),
|
||||
description: None,
|
||||
start_time: now,
|
||||
end_time: now,
|
||||
location: None,
|
||||
status: EventStatus::default(),
|
||||
all_day: false,
|
||||
category: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the description
|
||||
pub fn set_description(mut self, description: impl ToString) -> Self {
|
||||
self.description = Some(description.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the start time
|
||||
pub fn set_start_time(mut self, start_time: OffsetDateTime) -> Self {
|
||||
self.start_time = start_time;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the end time
|
||||
pub fn set_end_time(mut self, end_time: OffsetDateTime) -> Self {
|
||||
self.end_time = end_time;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the location
|
||||
pub fn set_location(mut self, location: impl ToString) -> Self {
|
||||
self.location = Some(location.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the status
|
||||
pub fn set_status(mut self, status: EventStatus) -> Self {
|
||||
self.status = status;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set as all-day event
|
||||
pub fn set_all_day(mut self, all_day: bool) -> Self {
|
||||
self.all_day = all_day;
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the category
|
||||
pub fn set_category(mut self, category: impl ToString) -> Self {
|
||||
self.category = Some(category.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// Object trait implementation is auto-generated by #[derive(DeriveObject)]
|
||||
// The derive macro generates: object_type(), base_data(), base_data_mut(), index_keys(), indexed_fields()
|
||||
64
src/objects/event/rhai.rs
Normal file
64
src/objects/event/rhai.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use crate::objects::Event;
|
||||
use rhai::{CustomType, Engine, TypeBuilder};
|
||||
|
||||
impl CustomType for Event {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("Event")
|
||||
.with_fn("new", |ns: String, title: String| Event::new(ns, title))
|
||||
.with_fn("set_description", |event: &mut Event, desc: String| {
|
||||
event.description = Some(desc);
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_location", |event: &mut Event, location: String| {
|
||||
event.location = Some(location);
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_category", |event: &mut Event, category: String| {
|
||||
event.category = Some(category);
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_all_day", |event: &mut Event, all_day: bool| {
|
||||
event.all_day = all_day;
|
||||
event.base_data.update_modified();
|
||||
})
|
||||
.with_fn("get_id", |event: &mut Event| event.base_data.id.clone())
|
||||
.with_fn("get_title", |event: &mut Event| event.title.clone())
|
||||
.with_fn("to_json", |event: &mut Event| {
|
||||
serde_json::to_string_pretty(event).unwrap_or_default()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Register Event API in Rhai engine
|
||||
pub fn register_event_api(engine: &mut Engine) {
|
||||
engine.build_type::<Event>();
|
||||
|
||||
// Register builder-style constructor
|
||||
engine.register_fn("event", |ns: String, title: String| Event::new(ns, title));
|
||||
|
||||
// Register chainable methods that return Self
|
||||
engine.register_fn("description", |mut event: Event, desc: String| {
|
||||
event.description = Some(desc);
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
|
||||
engine.register_fn("location", |mut event: Event, location: String| {
|
||||
event.location = Some(location);
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
|
||||
engine.register_fn("category", |mut event: Event, category: String| {
|
||||
event.category = Some(category);
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
|
||||
engine.register_fn("all_day", |mut event: Event, all_day: bool| {
|
||||
event.all_day = all_day;
|
||||
event.base_data.update_modified();
|
||||
event
|
||||
});
|
||||
}
|
||||
5
src/objects/mod.rs
Normal file
5
src/objects/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod note;
|
||||
pub mod event;
|
||||
|
||||
pub use note::Note;
|
||||
pub use event::Event;
|
||||
78
src/objects/note/mod.rs
Normal file
78
src/objects/note/mod.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use crate::store::BaseData;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
#[cfg(feature = "rhai-support")]
|
||||
pub mod rhai;
|
||||
|
||||
/// A simple note object
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, crate::DeriveObject)]
|
||||
pub struct Note {
|
||||
/// Base data
|
||||
pub base_data: BaseData,
|
||||
|
||||
/// Title of the note
|
||||
#[index]
|
||||
pub title: Option<String>,
|
||||
|
||||
/// Content of the note (searchable but not indexed)
|
||||
pub content: Option<String>,
|
||||
|
||||
/// Tags for categorization
|
||||
#[index]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
impl Note {
|
||||
/// Create a new note
|
||||
pub fn new(ns: String) -> Self {
|
||||
Self {
|
||||
base_data: BaseData::new(ns),
|
||||
title: None,
|
||||
content: None,
|
||||
tags: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a note with specific ID
|
||||
pub fn with_id(id: String, ns: String) -> Self {
|
||||
Self {
|
||||
base_data: BaseData::with_id(id, ns),
|
||||
title: None,
|
||||
content: None,
|
||||
tags: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the title
|
||||
pub fn set_title(mut self, title: impl ToString) -> Self {
|
||||
self.title = Some(title.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the content
|
||||
pub fn set_content(mut self, content: impl ToString) -> Self {
|
||||
let content_str = content.to_string();
|
||||
self.base_data.set_size(Some(content_str.len() as u64));
|
||||
self.content = Some(content_str);
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a tag
|
||||
pub fn add_tag(mut self, key: impl ToString, value: impl ToString) -> Self {
|
||||
self.tags.insert(key.to_string(), value.to_string());
|
||||
self.base_data.update_modified();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set MIME type
|
||||
pub fn set_mime(mut self, mime: impl ToString) -> Self {
|
||||
self.base_data.set_mime(Some(mime.to_string()));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// Object trait implementation is auto-generated by #[derive(DeriveObject)]
|
||||
// The derive macro generates: object_type(), base_data(), base_data_mut(), index_keys(), indexed_fields()
|
||||
67
src/objects/note/rhai.rs
Normal file
67
src/objects/note/rhai.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use crate::objects::Note;
|
||||
use rhai::{CustomType, Engine, TypeBuilder};
|
||||
|
||||
impl CustomType for Note {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("Note")
|
||||
.with_fn("new", |ns: String| Note::new(ns))
|
||||
.with_fn("set_title", |note: &mut Note, title: String| {
|
||||
note.title = Some(title);
|
||||
note.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_content", |note: &mut Note, content: String| {
|
||||
let size = content.len() as u64;
|
||||
note.content = Some(content);
|
||||
note.base_data.set_size(Some(size));
|
||||
note.base_data.update_modified();
|
||||
})
|
||||
.with_fn("add_tag", |note: &mut Note, key: String, value: String| {
|
||||
note.tags.insert(key, value);
|
||||
note.base_data.update_modified();
|
||||
})
|
||||
.with_fn("set_mime", |note: &mut Note, mime: String| {
|
||||
note.base_data.set_mime(Some(mime));
|
||||
})
|
||||
.with_fn("get_id", |note: &mut Note| note.base_data.id.clone())
|
||||
.with_fn("get_title", |note: &mut Note| note.title.clone().unwrap_or_default())
|
||||
.with_fn("get_content", |note: &mut Note| note.content.clone().unwrap_or_default())
|
||||
.with_fn("to_json", |note: &mut Note| {
|
||||
serde_json::to_string_pretty(note).unwrap_or_default()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Register Note API in Rhai engine
|
||||
pub fn register_note_api(engine: &mut Engine) {
|
||||
engine.build_type::<Note>();
|
||||
|
||||
// Register builder-style constructor
|
||||
engine.register_fn("note", |ns: String| Note::new(ns));
|
||||
|
||||
// Register chainable methods that return Self
|
||||
engine.register_fn("title", |mut note: Note, title: String| {
|
||||
note.title = Some(title);
|
||||
note.base_data.update_modified();
|
||||
note
|
||||
});
|
||||
|
||||
engine.register_fn("content", |mut note: Note, content: String| {
|
||||
let size = content.len() as u64;
|
||||
note.content = Some(content);
|
||||
note.base_data.set_size(Some(size));
|
||||
note.base_data.update_modified();
|
||||
note
|
||||
});
|
||||
|
||||
engine.register_fn("tag", |mut note: Note, key: String, value: String| {
|
||||
note.tags.insert(key, value);
|
||||
note.base_data.update_modified();
|
||||
note
|
||||
});
|
||||
|
||||
engine.register_fn("mime", |mut note: Note, mime: String| {
|
||||
note.base_data.set_mime(Some(mime));
|
||||
note
|
||||
});
|
||||
}
|
||||
5
src/retrieve/mod.rs
Normal file
5
src/retrieve/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod query;
|
||||
pub mod search;
|
||||
|
||||
pub use query::RetrievalQuery;
|
||||
pub use search::SearchEngine;
|
||||
74
src/retrieve/query.rs
Normal file
74
src/retrieve/query.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
/// Retrieval query structure
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RetrievalQuery {
|
||||
/// Optional text query for keyword substring matching
|
||||
pub text: Option<String>,
|
||||
|
||||
/// Namespace to search in
|
||||
pub ns: String,
|
||||
|
||||
/// Field filters (key=value pairs)
|
||||
pub filters: Vec<(String, String)>,
|
||||
|
||||
/// Maximum number of results to return
|
||||
pub top_k: usize,
|
||||
}
|
||||
|
||||
impl RetrievalQuery {
|
||||
/// Create a new retrieval query
|
||||
pub fn new(ns: String) -> Self {
|
||||
Self {
|
||||
text: None,
|
||||
ns,
|
||||
filters: Vec::new(),
|
||||
top_k: 10,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the text query
|
||||
pub fn with_text(mut self, text: String) -> Self {
|
||||
self.text = Some(text);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a filter
|
||||
pub fn with_filter(mut self, key: String, value: String) -> Self {
|
||||
self.filters.push((key, value));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the maximum number of results
|
||||
pub fn with_top_k(mut self, top_k: usize) -> Self {
|
||||
self.top_k = top_k;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Search result
|
||||
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub struct SearchResult {
|
||||
/// Object ID
|
||||
pub id: String,
|
||||
|
||||
/// Match score (0.0 to 1.0)
|
||||
pub score: f32,
|
||||
|
||||
/// Matched text snippet (if applicable)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub snippet: Option<String>,
|
||||
}
|
||||
|
||||
impl SearchResult {
|
||||
pub fn new(id: String, score: f32) -> Self {
|
||||
Self {
|
||||
id,
|
||||
score,
|
||||
snippet: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_snippet(mut self, snippet: String) -> Self {
|
||||
self.snippet = Some(snippet);
|
||||
self
|
||||
}
|
||||
}
|
||||
150
src/retrieve/search.rs
Normal file
150
src/retrieve/search.rs
Normal file
@@ -0,0 +1,150 @@
|
||||
use crate::error::Result;
|
||||
use crate::index::FieldIndex;
|
||||
use crate::retrieve::query::{RetrievalQuery, SearchResult};
|
||||
use crate::store::{HeroDbClient, OsirisObject};
|
||||
|
||||
/// Search engine for OSIRIS
|
||||
pub struct SearchEngine {
|
||||
client: HeroDbClient,
|
||||
index: FieldIndex,
|
||||
}
|
||||
|
||||
impl SearchEngine {
|
||||
/// Create a new search engine
|
||||
pub fn new(client: HeroDbClient) -> Self {
|
||||
let index = FieldIndex::new(client.clone());
|
||||
Self { client, index }
|
||||
}
|
||||
|
||||
/// Execute a search query
|
||||
pub async fn search(&self, query: &RetrievalQuery) -> Result<Vec<SearchResult>> {
|
||||
// Step 1: Get candidate IDs from field filters
|
||||
let candidate_ids = if query.filters.is_empty() {
|
||||
self.index.get_all_ids().await?
|
||||
} else {
|
||||
self.index.get_ids_by_filters(&query.filters).await?
|
||||
};
|
||||
|
||||
// Step 2: If text query is provided, filter by substring match
|
||||
let mut results = Vec::new();
|
||||
|
||||
if let Some(text_query) = &query.text {
|
||||
let text_query_lower = text_query.to_lowercase();
|
||||
|
||||
for id in candidate_ids {
|
||||
// Fetch the object
|
||||
if let Ok(obj) = self.client.get_object(&id).await {
|
||||
// Check if text matches
|
||||
let score = self.compute_text_score(&obj, &text_query_lower);
|
||||
|
||||
if score > 0.0 {
|
||||
let snippet = self.extract_snippet(&obj, &text_query_lower);
|
||||
results.push(SearchResult::new(id, score).with_snippet(snippet));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No text query, return all candidates with score 1.0
|
||||
for id in candidate_ids {
|
||||
results.push(SearchResult::new(id, 1.0));
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Sort by score (descending) and limit
|
||||
results.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap());
|
||||
results.truncate(query.top_k);
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Compute text match score (simple substring matching)
|
||||
fn compute_text_score(&self, obj: &OsirisObject, query: &str) -> f32 {
|
||||
let mut score = 0.0;
|
||||
|
||||
// Check title
|
||||
if let Some(title) = &obj.meta.title {
|
||||
if title.to_lowercase().contains(query) {
|
||||
score += 0.5;
|
||||
}
|
||||
}
|
||||
|
||||
// Check text content
|
||||
if let Some(text) = &obj.text {
|
||||
if text.to_lowercase().contains(query) {
|
||||
score += 0.5;
|
||||
|
||||
// Bonus for multiple occurrences
|
||||
let count = text.to_lowercase().matches(query).count();
|
||||
score += (count as f32 - 1.0) * 0.1;
|
||||
}
|
||||
}
|
||||
|
||||
// Check tags
|
||||
for (key, value) in &obj.meta.tags {
|
||||
if key.to_lowercase().contains(query) || value.to_lowercase().contains(query) {
|
||||
score += 0.2;
|
||||
}
|
||||
}
|
||||
|
||||
score.min(1.0)
|
||||
}
|
||||
|
||||
/// Extract a snippet around the matched text
|
||||
fn extract_snippet(&self, obj: &OsirisObject, query: &str) -> String {
|
||||
const SNIPPET_LENGTH: usize = 100;
|
||||
|
||||
// Try to find snippet in text
|
||||
if let Some(text) = &obj.text {
|
||||
let text_lower = text.to_lowercase();
|
||||
if let Some(pos) = text_lower.find(query) {
|
||||
let start = pos.saturating_sub(SNIPPET_LENGTH / 2);
|
||||
let end = (pos + query.len() + SNIPPET_LENGTH / 2).min(text.len());
|
||||
|
||||
let mut snippet = text[start..end].to_string();
|
||||
if start > 0 {
|
||||
snippet = format!("...{}", snippet);
|
||||
}
|
||||
if end < text.len() {
|
||||
snippet = format!("{}...", snippet);
|
||||
}
|
||||
|
||||
return snippet;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to title or first N chars
|
||||
if let Some(title) = &obj.meta.title {
|
||||
return title.clone();
|
||||
}
|
||||
|
||||
if let Some(text) = &obj.text {
|
||||
let end = SNIPPET_LENGTH.min(text.len());
|
||||
let mut snippet = text[..end].to_string();
|
||||
if end < text.len() {
|
||||
snippet = format!("{}...", snippet);
|
||||
}
|
||||
return snippet;
|
||||
}
|
||||
|
||||
String::from("[No content]")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_search() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let engine = SearchEngine::new(client);
|
||||
|
||||
let query = RetrievalQuery::new("test".to_string())
|
||||
.with_text("rust".to_string())
|
||||
.with_top_k(10);
|
||||
|
||||
let results = engine.search(&query).await.unwrap();
|
||||
assert!(results.len() <= 10);
|
||||
}
|
||||
}
|
||||
121
src/rhai_support/instance.rs
Normal file
121
src/rhai_support/instance.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
/// OSIRIS Instance for Rhai
|
||||
///
|
||||
/// Represents a named OSIRIS instance that can be used in Rhai scripts.
|
||||
/// Multiple instances can coexist, each with their own HeroDB connection.
|
||||
|
||||
use crate::objects::{Event, Note};
|
||||
use crate::store::{GenericStore, HeroDbClient};
|
||||
use rhai::{CustomType, EvalAltResult, TypeBuilder};
|
||||
use std::sync::Arc;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
/// A named OSIRIS instance for use in Rhai scripts
|
||||
#[derive(Clone)]
|
||||
pub struct OsirisInstance {
|
||||
name: String,
|
||||
store: Arc<GenericStore>,
|
||||
runtime: Arc<Runtime>,
|
||||
}
|
||||
|
||||
impl OsirisInstance {
|
||||
/// Create a new OSIRIS instance
|
||||
pub fn new(name: impl ToString, herodb_url: &str, db_id: u16) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let client = HeroDbClient::new(herodb_url, db_id)?;
|
||||
let store = GenericStore::new(client);
|
||||
let runtime = Runtime::new()?;
|
||||
|
||||
Ok(Self {
|
||||
name: name.to_string(),
|
||||
store: Arc::new(store),
|
||||
runtime: Arc::new(runtime),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the instance name
|
||||
pub fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
/// Put a Note object
|
||||
pub fn put_note(&self, note: Note) -> Result<String, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
let id = note.base_data.id.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.put(¬e).await })
|
||||
.map_err(|e| format!("[{}] Failed to put note: {}", self.name, e).into())
|
||||
.map(|_| id)
|
||||
}
|
||||
|
||||
/// Get a Note object by ID
|
||||
pub fn get_note(&self, ns: String, id: String) -> Result<Note, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.get::<Note>(&ns, &id).await })
|
||||
.map_err(|e| format!("[{}] Failed to get note: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Put an Event object
|
||||
pub fn put_event(&self, event: Event) -> Result<String, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
let id = event.base_data.id.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.put(&event).await })
|
||||
.map_err(|e| format!("[{}] Failed to put event: {}", self.name, e).into())
|
||||
.map(|_| id)
|
||||
}
|
||||
|
||||
/// Get an Event object by ID
|
||||
pub fn get_event(&self, ns: String, id: String) -> Result<Event, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.get::<Event>(&ns, &id).await })
|
||||
.map_err(|e| format!("[{}] Failed to get event: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Query by index
|
||||
pub fn query(&self, ns: String, field: String, value: String) -> Result<rhai::Array, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.get_ids_by_index(&ns, &field, &value).await })
|
||||
.map(|ids| ids.into_iter().map(rhai::Dynamic::from).collect())
|
||||
.map_err(|e| format!("[{}] Failed to query: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Delete a Note
|
||||
pub fn delete_note(&self, note: Note) -> Result<bool, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.delete(¬e).await })
|
||||
.map_err(|e| format!("[{}] Failed to delete note: {}", self.name, e).into())
|
||||
}
|
||||
|
||||
/// Delete an Event
|
||||
pub fn delete_event(&self, event: Event) -> Result<bool, Box<EvalAltResult>> {
|
||||
let store = self.store.clone();
|
||||
|
||||
self.runtime
|
||||
.block_on(async move { store.delete(&event).await })
|
||||
.map_err(|e| format!("[{}] Failed to delete event: {}", self.name, e).into())
|
||||
}
|
||||
}
|
||||
|
||||
impl CustomType for OsirisInstance {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("OsirisInstance")
|
||||
.with_fn("name", |instance: &mut OsirisInstance| instance.name())
|
||||
.with_fn("put_note", |instance: &mut OsirisInstance, note: Note| instance.put_note(note))
|
||||
.with_fn("get_note", |instance: &mut OsirisInstance, ns: String, id: String| instance.get_note(ns, id))
|
||||
.with_fn("put_event", |instance: &mut OsirisInstance, event: Event| instance.put_event(event))
|
||||
.with_fn("get_event", |instance: &mut OsirisInstance, ns: String, id: String| instance.get_event(ns, id))
|
||||
.with_fn("query", |instance: &mut OsirisInstance, ns: String, field: String, value: String| instance.query(ns, field, value))
|
||||
.with_fn("delete_note", |instance: &mut OsirisInstance, note: Note| instance.delete_note(note))
|
||||
.with_fn("delete_event", |instance: &mut OsirisInstance, event: Event| instance.delete_event(event));
|
||||
}
|
||||
}
|
||||
12
src/rhai_support/mod.rs
Normal file
12
src/rhai_support/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
/// Rhai support for OSIRIS
|
||||
///
|
||||
/// This module provides Rhai integration infrastructure for OSIRIS.
|
||||
/// Object-specific Rhai support is located in each object's module (e.g., objects/note/rhai.rs).
|
||||
|
||||
pub mod instance;
|
||||
|
||||
pub use instance::OsirisInstance;
|
||||
|
||||
// Re-export registration functions from object modules
|
||||
pub use crate::objects::note::rhai::register_note_api;
|
||||
pub use crate::objects::event::rhai::register_event_api;
|
||||
78
src/store/base_data.rs
Normal file
78
src/store/base_data.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
/// Base data that all OSIRIS objects must include
|
||||
/// Similar to heromodels BaseModelData but adapted for OSIRIS
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
||||
pub struct BaseData {
|
||||
/// Unique ID (auto-generated or user-assigned)
|
||||
pub id: String,
|
||||
|
||||
/// Namespace this object belongs to
|
||||
pub ns: String,
|
||||
|
||||
/// Unix timestamp for creation time
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub created_at: OffsetDateTime,
|
||||
|
||||
/// Unix timestamp for last modification time
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub modified_at: OffsetDateTime,
|
||||
|
||||
/// Optional MIME type
|
||||
pub mime: Option<String>,
|
||||
|
||||
/// Content size in bytes
|
||||
pub size: Option<u64>,
|
||||
}
|
||||
|
||||
impl BaseData {
|
||||
/// Create new base data with generated UUID
|
||||
pub fn new(ns: String) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
ns,
|
||||
created_at: now,
|
||||
modified_at: now,
|
||||
mime: None,
|
||||
size: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create new base data with specific ID
|
||||
pub fn with_id(id: String, ns: String) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id,
|
||||
ns,
|
||||
created_at: now,
|
||||
modified_at: now,
|
||||
mime: None,
|
||||
size: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the modified timestamp
|
||||
pub fn update_modified(&mut self) {
|
||||
self.modified_at = OffsetDateTime::now_utc();
|
||||
}
|
||||
|
||||
/// Set the MIME type
|
||||
pub fn set_mime(&mut self, mime: Option<String>) {
|
||||
self.mime = mime;
|
||||
self.update_modified();
|
||||
}
|
||||
|
||||
/// Set the size
|
||||
pub fn set_size(&mut self, size: Option<u64>) {
|
||||
self.size = size;
|
||||
self.update_modified();
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BaseData {
|
||||
fn default() -> Self {
|
||||
Self::new(String::from("default"))
|
||||
}
|
||||
}
|
||||
124
src/store/generic_store.rs
Normal file
124
src/store/generic_store.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use crate::error::Result;
|
||||
use crate::index::FieldIndex;
|
||||
use crate::store::{HeroDbClient, Object};
|
||||
|
||||
/// Generic storage layer for OSIRIS objects
|
||||
pub struct GenericStore {
|
||||
client: HeroDbClient,
|
||||
index: FieldIndex,
|
||||
}
|
||||
|
||||
impl GenericStore {
|
||||
/// Create a new generic store
|
||||
pub fn new(client: HeroDbClient) -> Self {
|
||||
let index = FieldIndex::new(client.clone());
|
||||
Self { client, index }
|
||||
}
|
||||
|
||||
/// Store an object
|
||||
pub async fn put<T: Object>(&self, obj: &T) -> Result<()> {
|
||||
// Serialize object to JSON
|
||||
let json = obj.to_json()?;
|
||||
let key = format!("obj:{}:{}", obj.namespace(), obj.id());
|
||||
|
||||
// Store in HeroDB
|
||||
self.client.set(&key, &json).await?;
|
||||
|
||||
// Index the object
|
||||
self.index_object(obj).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get an object by ID
|
||||
pub async fn get<T: Object>(&self, ns: &str, id: &str) -> Result<T> {
|
||||
let key = format!("obj:{}:{}", ns, id);
|
||||
let json = self.client.get(&key).await?
|
||||
.ok_or_else(|| crate::error::Error::NotFound(format!("Object {}:{}", ns, id)))?;
|
||||
|
||||
T::from_json(&json)
|
||||
}
|
||||
|
||||
/// Delete an object
|
||||
pub async fn delete<T: Object>(&self, obj: &T) -> Result<bool> {
|
||||
let key = format!("obj:{}:{}", obj.namespace(), obj.id());
|
||||
|
||||
// Deindex first
|
||||
self.deindex_object(obj).await?;
|
||||
|
||||
// Delete from HeroDB
|
||||
self.client.del(&key).await
|
||||
}
|
||||
|
||||
/// Check if an object exists
|
||||
pub async fn exists(&self, ns: &str, id: &str) -> Result<bool> {
|
||||
let key = format!("obj:{}:{}", ns, id);
|
||||
self.client.exists(&key).await
|
||||
}
|
||||
|
||||
/// Index an object
|
||||
async fn index_object<T: Object>(&self, obj: &T) -> Result<()> {
|
||||
let index_keys = obj.index_keys();
|
||||
|
||||
for key in index_keys {
|
||||
let field_key = format!("idx:{}:{}:{}", obj.namespace(), key.name, key.value);
|
||||
self.client.sadd(&field_key, obj.id()).await?;
|
||||
}
|
||||
|
||||
// Add to scan index for full-text search
|
||||
let scan_key = format!("scan:{}", obj.namespace());
|
||||
self.client.sadd(&scan_key, obj.id()).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Deindex an object
|
||||
async fn deindex_object<T: Object>(&self, obj: &T) -> Result<()> {
|
||||
let index_keys = obj.index_keys();
|
||||
|
||||
for key in index_keys {
|
||||
let field_key = format!("idx:{}:{}:{}", obj.namespace(), key.name, key.value);
|
||||
self.client.srem(&field_key, obj.id()).await?;
|
||||
}
|
||||
|
||||
// Remove from scan index
|
||||
let scan_key = format!("scan:{}", obj.namespace());
|
||||
self.client.srem(&scan_key, obj.id()).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all IDs matching an index key
|
||||
pub async fn get_ids_by_index(&self, ns: &str, field: &str, value: &str) -> Result<Vec<String>> {
|
||||
let field_key = format!("idx:{}:{}:{}", ns, field, value);
|
||||
self.client.smembers(&field_key).await
|
||||
}
|
||||
|
||||
/// Get all IDs in a namespace
|
||||
pub async fn get_all_ids(&self, ns: &str) -> Result<Vec<String>> {
|
||||
let scan_key = format!("scan:{}", ns);
|
||||
self.client.smembers(&scan_key).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::objects::Note;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_generic_store() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let store = GenericStore::new(client);
|
||||
|
||||
let note = Note::new("test".to_string())
|
||||
.set_title("Test Note")
|
||||
.set_content("This is a test");
|
||||
|
||||
store.put(¬e).await.unwrap();
|
||||
|
||||
let retrieved: Note = store.get("test", note.id()).await.unwrap();
|
||||
assert_eq!(retrieved.title, note.title);
|
||||
}
|
||||
}
|
||||
161
src/store/herodb_client.rs
Normal file
161
src/store/herodb_client.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
use crate::error::{Error, Result};
|
||||
use crate::store::OsirisObject;
|
||||
use redis::aio::MultiplexedConnection;
|
||||
use redis::{AsyncCommands, Client};
|
||||
|
||||
/// HeroDB client wrapper for OSIRIS operations
|
||||
#[derive(Clone)]
|
||||
pub struct HeroDbClient {
|
||||
client: Client,
|
||||
pub db_id: u16,
|
||||
}
|
||||
|
||||
impl HeroDbClient {
|
||||
/// Create a new HeroDB client
|
||||
pub fn new(url: &str, db_id: u16) -> Result<Self> {
|
||||
let client = Client::open(url)?;
|
||||
Ok(Self { client, db_id })
|
||||
}
|
||||
|
||||
/// Get a connection to the database
|
||||
pub async fn get_connection(&self) -> Result<MultiplexedConnection> {
|
||||
let mut conn = self.client.get_multiplexed_async_connection().await?;
|
||||
|
||||
// Select the appropriate database
|
||||
if self.db_id > 0 {
|
||||
redis::cmd("SELECT")
|
||||
.arg(self.db_id)
|
||||
.query_async(&mut conn)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(conn)
|
||||
}
|
||||
|
||||
/// Store an object in HeroDB
|
||||
pub async fn put_object(&self, obj: &OsirisObject) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", obj.id);
|
||||
let value = serde_json::to_string(obj)?;
|
||||
|
||||
conn.set(&key, value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Retrieve an object from HeroDB
|
||||
pub async fn get_object(&self, id: &str) -> Result<OsirisObject> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", id);
|
||||
|
||||
let value: Option<String> = conn.get(&key).await?;
|
||||
match value {
|
||||
Some(v) => {
|
||||
let obj: OsirisObject = serde_json::from_str(&v)?;
|
||||
Ok(obj)
|
||||
}
|
||||
None => Err(Error::NotFound(format!("Object not found: {}", id))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete an object from HeroDB
|
||||
pub async fn delete_object(&self, id: &str) -> Result<bool> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", id);
|
||||
|
||||
let deleted: i32 = conn.del(&key).await?;
|
||||
Ok(deleted > 0)
|
||||
}
|
||||
|
||||
/// Check if an object exists
|
||||
pub async fn exists(&self, id: &str) -> Result<bool> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let key = format!("meta:{}", id);
|
||||
|
||||
let exists: bool = conn.exists(&key).await?;
|
||||
Ok(exists)
|
||||
}
|
||||
|
||||
/// Add an ID to a set (for field indexing)
|
||||
pub async fn sadd(&self, set_key: &str, member: &str) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
conn.sadd(set_key, member).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove an ID from a set
|
||||
pub async fn srem(&self, set_key: &str, member: &str) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
conn.srem(set_key, member).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all members of a set
|
||||
pub async fn smembers(&self, set_key: &str) -> Result<Vec<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let members: Vec<String> = conn.smembers(set_key).await?;
|
||||
Ok(members)
|
||||
}
|
||||
|
||||
/// Get the intersection of multiple sets
|
||||
pub async fn sinter(&self, keys: &[String]) -> Result<Vec<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let members: Vec<String> = conn.sinter(keys).await?;
|
||||
Ok(members)
|
||||
}
|
||||
|
||||
/// Get all keys matching a pattern
|
||||
pub async fn keys(&self, pattern: &str) -> Result<Vec<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let keys: Vec<String> = conn.keys(pattern).await?;
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
/// Set a key-value pair
|
||||
pub async fn set(&self, key: &str, value: &str) -> Result<()> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
conn.set(key, value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a value by key
|
||||
pub async fn get(&self, key: &str) -> Result<Option<String>> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let value: Option<String> = conn.get(key).await?;
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
/// Delete a key
|
||||
pub async fn del(&self, key: &str) -> Result<bool> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let deleted: i32 = conn.del(key).await?;
|
||||
Ok(deleted > 0)
|
||||
}
|
||||
|
||||
/// Get database size (number of keys)
|
||||
pub async fn dbsize(&self) -> Result<usize> {
|
||||
let mut conn = self.get_connection().await?;
|
||||
let size: usize = redis::cmd("DBSIZE").query_async(&mut conn).await?;
|
||||
Ok(size)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// Note: These tests require a running HeroDB instance
|
||||
// They are ignored by default
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_put_get_object() {
|
||||
let client = HeroDbClient::new("redis://localhost:6379", 1).unwrap();
|
||||
let obj = OsirisObject::new("test".to_string(), Some("Hello".to_string()));
|
||||
|
||||
client.put_object(&obj).await.unwrap();
|
||||
let retrieved = client.get_object(&obj.id).await.unwrap();
|
||||
|
||||
assert_eq!(obj.id, retrieved.id);
|
||||
assert_eq!(obj.text, retrieved.text);
|
||||
}
|
||||
}
|
||||
11
src/store/mod.rs
Normal file
11
src/store/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
pub mod base_data;
|
||||
pub mod object_trait;
|
||||
pub mod herodb_client;
|
||||
pub mod generic_store;
|
||||
pub mod object; // Keep old implementation for backwards compat temporarily
|
||||
|
||||
pub use base_data::BaseData;
|
||||
pub use object_trait::{IndexKey, Object, Storable};
|
||||
pub use herodb_client::HeroDbClient;
|
||||
pub use generic_store::GenericStore;
|
||||
pub use object::{Metadata, OsirisObject}; // Old implementation
|
||||
160
src/store/object.rs
Normal file
160
src/store/object.rs
Normal file
@@ -0,0 +1,160 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
/// Core OSIRIS object structure
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct OsirisObject {
|
||||
/// Unique identifier (UUID or user-assigned)
|
||||
pub id: String,
|
||||
|
||||
/// Namespace (e.g., "notes", "calendar")
|
||||
pub ns: String,
|
||||
|
||||
/// Metadata
|
||||
pub meta: Metadata,
|
||||
|
||||
/// Optional plain text content
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub text: Option<String>,
|
||||
}
|
||||
|
||||
/// Object metadata
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Metadata {
|
||||
/// Optional human-readable title
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
|
||||
/// MIME type
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub mime: Option<String>,
|
||||
|
||||
/// Key-value tags for categorization
|
||||
#[serde(default)]
|
||||
pub tags: BTreeMap<String, String>,
|
||||
|
||||
/// Creation timestamp
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub created: OffsetDateTime,
|
||||
|
||||
/// Last update timestamp
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub updated: OffsetDateTime,
|
||||
|
||||
/// Content size in bytes
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub size: Option<u64>,
|
||||
}
|
||||
|
||||
impl OsirisObject {
|
||||
/// Create a new object with generated UUID
|
||||
pub fn new(ns: String, text: Option<String>) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
ns,
|
||||
meta: Metadata {
|
||||
title: None,
|
||||
mime: None,
|
||||
tags: BTreeMap::new(),
|
||||
created: now,
|
||||
updated: now,
|
||||
size: text.as_ref().map(|t| t.len() as u64),
|
||||
},
|
||||
text,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new object with specific ID
|
||||
pub fn with_id(id: String, ns: String, text: Option<String>) -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self {
|
||||
id,
|
||||
ns,
|
||||
meta: Metadata {
|
||||
title: None,
|
||||
mime: None,
|
||||
tags: BTreeMap::new(),
|
||||
created: now,
|
||||
updated: now,
|
||||
size: text.as_ref().map(|t| t.len() as u64),
|
||||
},
|
||||
text,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the object's text content
|
||||
pub fn update_text(&mut self, text: Option<String>) {
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
self.meta.size = text.as_ref().map(|t| t.len() as u64);
|
||||
self.text = text;
|
||||
}
|
||||
|
||||
/// Add or update a tag
|
||||
pub fn set_tag(&mut self, key: String, value: String) {
|
||||
self.meta.tags.insert(key, value);
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
|
||||
/// Remove a tag
|
||||
pub fn remove_tag(&mut self, key: &str) -> Option<String> {
|
||||
let result = self.meta.tags.remove(key);
|
||||
if result.is_some() {
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Set the title
|
||||
pub fn set_title(&mut self, title: Option<String>) {
|
||||
self.meta.title = title;
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
|
||||
/// Set the MIME type
|
||||
pub fn set_mime(&mut self, mime: Option<String>) {
|
||||
self.meta.mime = mime;
|
||||
self.meta.updated = OffsetDateTime::now_utc();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_new_object() {
|
||||
let obj = OsirisObject::new("notes".to_string(), Some("Hello, world!".to_string()));
|
||||
assert_eq!(obj.ns, "notes");
|
||||
assert_eq!(obj.text, Some("Hello, world!".to_string()));
|
||||
assert_eq!(obj.meta.size, Some(13));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_text() {
|
||||
let mut obj = OsirisObject::new("notes".to_string(), Some("Initial".to_string()));
|
||||
let initial_updated = obj.meta.updated;
|
||||
|
||||
std::thread::sleep(std::time::Duration::from_millis(10));
|
||||
obj.update_text(Some("Updated".to_string()));
|
||||
|
||||
assert_eq!(obj.text, Some("Updated".to_string()));
|
||||
assert_eq!(obj.meta.size, Some(7));
|
||||
assert!(obj.meta.updated > initial_updated);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tags() {
|
||||
let mut obj = OsirisObject::new("notes".to_string(), None);
|
||||
obj.set_tag("topic".to_string(), "rust".to_string());
|
||||
obj.set_tag("project".to_string(), "osiris".to_string());
|
||||
|
||||
assert_eq!(obj.meta.tags.get("topic"), Some(&"rust".to_string()));
|
||||
assert_eq!(obj.meta.tags.get("project"), Some(&"osiris".to_string()));
|
||||
|
||||
let removed = obj.remove_tag("topic");
|
||||
assert_eq!(removed, Some("rust".to_string()));
|
||||
assert_eq!(obj.meta.tags.get("topic"), None);
|
||||
}
|
||||
}
|
||||
108
src/store/object_trait.rs
Normal file
108
src/store/object_trait.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use crate::error::Result;
|
||||
use crate::store::BaseData;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// Represents an index key for an object field
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct IndexKey {
|
||||
/// The name of the index key (field name)
|
||||
pub name: &'static str,
|
||||
|
||||
/// The value of the index key for this object instance
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl IndexKey {
|
||||
pub fn new(name: &'static str, value: impl ToString) -> Self {
|
||||
Self {
|
||||
name,
|
||||
value: value.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Core trait that all OSIRIS objects must implement
|
||||
/// Similar to heromodels Model trait but adapted for OSIRIS
|
||||
pub trait Object: Debug + Clone + Serialize + for<'de> Deserialize<'de> + Send + Sync {
|
||||
/// Get the object type name (used for routing/identification)
|
||||
fn object_type() -> &'static str
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// Get a reference to the base data
|
||||
fn base_data(&self) -> &BaseData;
|
||||
|
||||
/// Get a mutable reference to the base data
|
||||
fn base_data_mut(&mut self) -> &mut BaseData;
|
||||
|
||||
/// Get the unique ID for this object
|
||||
fn id(&self) -> &str {
|
||||
&self.base_data().id
|
||||
}
|
||||
|
||||
/// Get the namespace for this object
|
||||
fn namespace(&self) -> &str {
|
||||
&self.base_data().ns
|
||||
}
|
||||
|
||||
/// Returns a list of index keys for this object instance
|
||||
/// These are generated from fields marked with #[index]
|
||||
/// The default implementation returns base_data indexes only
|
||||
fn index_keys(&self) -> Vec<IndexKey> {
|
||||
let base = self.base_data();
|
||||
let mut keys = Vec::new();
|
||||
|
||||
// Index MIME type if present
|
||||
if let Some(mime) = &base.mime {
|
||||
keys.push(IndexKey::new("mime", mime));
|
||||
}
|
||||
|
||||
keys
|
||||
}
|
||||
|
||||
/// Return a list of field names which have an index applied
|
||||
/// This should be implemented by the derive macro
|
||||
fn indexed_fields() -> Vec<&'static str>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
/// Get the full-text searchable content for this object
|
||||
/// Override this to provide custom searchable text
|
||||
fn searchable_text(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Serialize the object to JSON
|
||||
fn to_json(&self) -> Result<String> {
|
||||
serde_json::to_string(self).map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Deserialize the object from JSON
|
||||
fn from_json(json: &str) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
serde_json::from_str(json).map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Update the modified timestamp
|
||||
fn touch(&mut self) {
|
||||
self.base_data_mut().update_modified();
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for objects that can be stored in OSIRIS
|
||||
/// This is automatically implemented for all types that implement Object
|
||||
pub trait Storable: Object {
|
||||
/// Prepare the object for storage (update timestamps, etc.)
|
||||
fn prepare_for_storage(&mut self) {
|
||||
self.touch();
|
||||
}
|
||||
}
|
||||
|
||||
// Blanket implementation
|
||||
impl<T: Object> Storable for T {}
|
||||
Reference in New Issue
Block a user