move rhailib to herolib
This commit is contained in:
		
							
								
								
									
										1
									
								
								rhailib/_archive/dispatcher/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								rhailib/_archive/dispatcher/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| /target | ||||
							
								
								
									
										24
									
								
								rhailib/_archive/dispatcher/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								rhailib/_archive/dispatcher/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| [package] | ||||
| name = "rhai_dispatcher" | ||||
| version = "0.1.0" | ||||
| edition = "2021" | ||||
|  | ||||
| [[bin]] | ||||
| name = "dispatcher" | ||||
| path = "cmd/dispatcher.rs" | ||||
|  | ||||
| [dependencies] | ||||
| clap = { version = "4.4", features = ["derive"] } | ||||
| env_logger = "0.10" | ||||
| redis = { version = "0.25.0", features = ["tokio-comp"] } | ||||
| serde = { version = "1.0", features = ["derive"] } | ||||
| serde_json = "1.0" | ||||
| uuid = { version = "1.6", features = ["v4", "serde"] } | ||||
| chrono = { version = "0.4", features = ["serde"] } | ||||
| log = "0.4" | ||||
| tokio = { version = "1", features = ["macros", "rt-multi-thread"] } # For async main in examples, and general async | ||||
| colored = "2.0" | ||||
|  | ||||
| [dev-dependencies] # For examples later | ||||
| env_logger = "0.10" | ||||
| rhai = "1.18.0" # For examples that might need to show engine setup | ||||
							
								
								
									
										107
									
								
								rhailib/_archive/dispatcher/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										107
									
								
								rhailib/_archive/dispatcher/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,107 @@ | ||||
| # Rhai Client | ||||
|  | ||||
| The `rhai-client` crate provides a fluent builder-based interface for submitting Rhai scripts to a distributed task execution system over Redis. It enables applications to offload Rhai script execution to one or more worker services and await the results. | ||||
|  | ||||
| ## Features | ||||
|  | ||||
| -   **Fluent Builder API**: A `RhaiDispatcherBuilder` for easy client configuration and a `PlayRequestBuilder` for constructing and submitting script execution requests. | ||||
| -   **Asynchronous Operations**: Built with `tokio` for non-blocking I/O. | ||||
| -   **Request-Reply Pattern**: Submits tasks and awaits results on a dedicated reply queue, eliminating the need for polling. | ||||
| -   **Configurable Timeouts**: Set timeouts for how long the client should wait for a task to complete. | ||||
| -   **Direct-to-Worker-Queue Submission**: Tasks are sent to a queue named after the `worker_id`, allowing for direct and clear task routing. | ||||
| -   **Manual Status Check**: Provides an option to manually check the status of a task by its ID. | ||||
|  | ||||
| ## Core Components | ||||
|  | ||||
| -   **`RhaiDispatcherBuilder`**: A builder to construct a `RhaiDispatcher`. Requires a `caller_id` and Redis URL. | ||||
| -   **`RhaiDispatcher`**: The main client for interacting with the task system. It's used to create `PlayRequestBuilder` instances. | ||||
| -   **`PlayRequestBuilder`**: A fluent builder for creating and dispatching a script execution request. You can set: | ||||
|     -   `worker_id`: The ID of the worker queue to send the task to. | ||||
|     -   `script` or `script_path`: The Rhai script to execute. | ||||
|     -   `request_id`: An optional unique ID for the request. | ||||
|     -   `timeout`: How long to wait for a result. | ||||
| -   **Submission Methods**: | ||||
|     -   `submit()`: Submits the request and returns immediately (fire-and-forget). | ||||
|     -   `await_response()`: Submits the request and waits for the result or a timeout. | ||||
| -   **`RhaiTaskDetails`**: A struct representing the details of a task, including its script, status (`pending`, `processing`, `completed`, `error`), output, and error messages. | ||||
| -   **`RhaiDispatcherError`**: An enum for various errors, such as Redis errors, serialization issues, or task timeouts. | ||||
|  | ||||
| ## How It Works | ||||
|  | ||||
| 1.  A `RhaiDispatcher` is created using the `RhaiDispatcherBuilder`, configured with a `caller_id` and Redis URL. | ||||
| 2.  A `PlayRequestBuilder` is created from the client. | ||||
| 3.  The script, `worker_id`, and an optional `timeout` are configured on the builder. | ||||
| 4.  When `await_response()` is called: | ||||
|     a.  A unique `task_id` (UUID v4) is generated. | ||||
|     b.  Task details are stored in a Redis hash with a key like `rhailib:<task_id>`. | ||||
|     c.  The `task_id` is pushed to the worker's queue, named `rhailib:<worker_id>`. | ||||
|     d.  The client performs a blocking pop (`BLPOP`) on a dedicated reply queue (`rhailib:reply:<task_id>`), waiting for the worker to send the result. | ||||
| 5.  A `rhai-worker` process, listening on the `rhailib:<worker_id>` queue, picks up the task, executes it, and pushes the final `RhaiTaskDetails` to the reply queue. | ||||
| 6.  The client receives the result from the reply queue and returns it to the caller. | ||||
|  | ||||
| ## Prerequisites | ||||
|  | ||||
| -   A running Redis instance accessible by the client and the worker services. | ||||
|  | ||||
| ## Usage Example | ||||
|  | ||||
| The following example demonstrates how to build a client, submit a script, and wait for the result. | ||||
|  | ||||
| ```rust | ||||
| use rhai_dispatcher::{RhaiDispatcherBuilder, RhaiDispatcherError}; | ||||
| use std::time::Duration; | ||||
|  | ||||
| #[tokio::main] | ||||
| async fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
|     env_logger::init(); | ||||
|  | ||||
|     // 1. Build the client | ||||
|     let client = RhaiDispatcherBuilder::new() | ||||
|         .caller_id("my-app-instance-1") | ||||
|         .redis_url("redis://127.0.0.1/") | ||||
|         .build()?; | ||||
|  | ||||
|     // 2. Define the script and target worker | ||||
|     let script = r#" "Hello, " + worker_id + "!" "#; | ||||
|     let worker_id = "worker-1"; | ||||
|  | ||||
|     // 3. Use the PlayRequestBuilder to configure and submit the request | ||||
|     let result = client | ||||
|         .new_play_request() | ||||
|         .worker_id(worker_id) | ||||
|         .script(script) | ||||
|         .timeout(Duration::from_secs(5)) | ||||
|         .await_response() | ||||
|         .await; | ||||
|  | ||||
|     match result { | ||||
|         Ok(details) => { | ||||
|             log::info!("Task completed successfully!"); | ||||
|             log::info!("Status: {}", details.status); | ||||
|             if let Some(output) = details.output { | ||||
|                 log::info!("Output: {}", output); | ||||
|             } | ||||
|         } | ||||
|         Err(RhaiDispatcherError::Timeout(task_id)) => { | ||||
|             log::error!("Task {} timed out.", task_id); | ||||
|         } | ||||
|         Err(e) => { | ||||
|             log::error!("An unexpected error occurred: {}", e); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     Ok(()) | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Refer to the `examples/` directory for more specific use cases, such as `timeout_example.rs` which tests the timeout mechanism. | ||||
|  | ||||
| ## Building and Running Examples | ||||
|  | ||||
| To run an example (e.g., `timeout_example`): | ||||
|  | ||||
| ```bash | ||||
| cd src/client # (or wherever this client's Cargo.toml is) | ||||
| cargo run --example timeout_example | ||||
| ``` | ||||
| Ensure a Redis server is running and accessible at `redis://127.0.0.1/`. | ||||
							
								
								
									
										157
									
								
								rhailib/_archive/dispatcher/cmd/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										157
									
								
								rhailib/_archive/dispatcher/cmd/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,157 @@ | ||||
| # Rhai Client Binary | ||||
|  | ||||
| A command-line client for executing Rhai scripts on remote workers via Redis. | ||||
|  | ||||
| ## Binary: `client` | ||||
|  | ||||
| ### Installation | ||||
|  | ||||
| Build the binary: | ||||
| ```bash | ||||
| cargo build --bin client --release | ||||
| ``` | ||||
|  | ||||
| ### Usage | ||||
|  | ||||
| ```bash | ||||
| # Basic usage - requires caller and circle keys | ||||
| client --caller-key <CALLER_KEY> --circle-key <CIRCLE_KEY> | ||||
|  | ||||
| # Execute inline script | ||||
| client -c <CALLER_KEY> -k <CIRCLE_KEY> --script "print('Hello World!')" | ||||
|  | ||||
| # Execute script from file | ||||
| client -c <CALLER_KEY> -k <CIRCLE_KEY> --file script.rhai | ||||
|  | ||||
| # Use specific worker (defaults to circle key) | ||||
| client -c <CALLER_KEY> -k <CIRCLE_KEY> -w <WORKER_KEY> --script "2 + 2" | ||||
|  | ||||
| # Custom Redis and timeout | ||||
| client -c <CALLER_KEY> -k <CIRCLE_KEY> --redis-url redis://localhost:6379/1 --timeout 60 | ||||
|  | ||||
| # Remove timestamps from logs | ||||
| client -c <CALLER_KEY> -k <CIRCLE_KEY> --no-timestamp | ||||
|  | ||||
| # Increase verbosity | ||||
| client -c <CALLER_KEY> -k <CIRCLE_KEY> -v --script "debug_info()" | ||||
| ``` | ||||
|  | ||||
| ### Command-Line Options | ||||
|  | ||||
| | Option | Short | Default | Description | | ||||
| |--------|-------|---------|-------------| | ||||
| | `--caller-key` | `-c` | **Required** | Caller public key (your identity) | | ||||
| | `--circle-key` | `-k` | **Required** | Circle public key (execution context) | | ||||
| | `--worker-key` | `-w` | `circle-key` | Worker public key (target worker) | | ||||
| | `--redis-url` | `-r` | `redis://localhost:6379` | Redis connection URL | | ||||
| | `--script` | `-s` | | Rhai script to execute | | ||||
| | `--file` | `-f` | | Path to Rhai script file | | ||||
| | `--timeout` | `-t` | `30` | Timeout for script execution (seconds) | | ||||
| | `--no-timestamp` | | `false` | Remove timestamps from log output | | ||||
| | `--verbose` | `-v` | | Increase verbosity (stackable) | | ||||
|  | ||||
| ### Execution Modes | ||||
|  | ||||
| #### Inline Script Execution | ||||
| ```bash | ||||
| # Execute a simple calculation | ||||
| client -c caller_123 -k circle_456 -s "let result = 2 + 2; print(result);" | ||||
|  | ||||
| # Execute with specific worker | ||||
| client -c caller_123 -k circle_456 -w worker_789 -s "get_user_data()" | ||||
| ``` | ||||
|  | ||||
| #### Script File Execution | ||||
| ```bash | ||||
| # Execute script from file | ||||
| client -c caller_123 -k circle_456 -f examples/data_processing.rhai | ||||
|  | ||||
| # Execute with custom timeout | ||||
| client -c caller_123 -k circle_456 -f long_running_script.rhai -t 120 | ||||
| ``` | ||||
|  | ||||
| #### Interactive Mode | ||||
| ```bash | ||||
| # Enter interactive REPL mode (when no script or file provided) | ||||
| client -c caller_123 -k circle_456 | ||||
|  | ||||
| # Interactive mode with verbose logging | ||||
| client -c caller_123 -k circle_456 -v --no-timestamp | ||||
| ``` | ||||
|  | ||||
| ### Interactive Mode | ||||
|  | ||||
| When no script (`-s`) or file (`-f`) is provided, the client enters interactive mode: | ||||
|  | ||||
| ``` | ||||
| 🔗 Starting Rhai Client | ||||
| 📋 Configuration: | ||||
|    Caller Key: caller_123 | ||||
|    Circle Key: circle_456 | ||||
|    Worker Key: circle_456 | ||||
|    Redis URL: redis://localhost:6379 | ||||
|    Timeout: 30s | ||||
|  | ||||
| ✅ Connected to Redis at redis://localhost:6379 | ||||
| 🎮 Entering interactive mode | ||||
| Type Rhai scripts and press Enter to execute. Type 'exit' or 'quit' to close. | ||||
| rhai> let x = 42; print(x); | ||||
| Status: completed | ||||
| Output: 42 | ||||
| rhai> exit | ||||
| 👋 Goodbye! | ||||
| ``` | ||||
|  | ||||
| ### Configuration Examples | ||||
|  | ||||
| #### Development Usage | ||||
| ```bash | ||||
| # Simple development client | ||||
| client -c dev_user -k dev_circle | ||||
|  | ||||
| # Development with clean logs | ||||
| client -c dev_user -k dev_circle --no-timestamp -v | ||||
| ``` | ||||
|  | ||||
| #### Production Usage | ||||
| ```bash | ||||
| # Production client with specific worker | ||||
| client \ | ||||
|   --caller-key prod_user_123 \ | ||||
|   --circle-key prod_circle_456 \ | ||||
|   --worker-key prod_worker_789 \ | ||||
|   --redis-url redis://redis-cluster:6379/0 \ | ||||
|   --timeout 300 \ | ||||
|   --file production_script.rhai | ||||
| ``` | ||||
|  | ||||
| #### Batch Processing | ||||
| ```bash | ||||
| # Process multiple scripts | ||||
| for script in scripts/*.rhai; do | ||||
|   client -c batch_user -k batch_circle -f "$script" --no-timestamp | ||||
| done | ||||
| ``` | ||||
|  | ||||
| ### Key Concepts | ||||
|  | ||||
| - **Caller Key**: Your identity - used for authentication and tracking | ||||
| - **Circle Key**: Execution context - defines the environment/permissions | ||||
| - **Worker Key**: Target worker - which worker should execute the script (defaults to circle key) | ||||
|  | ||||
| ### Error Handling | ||||
|  | ||||
| The client provides clear error messages for: | ||||
| - Missing required keys | ||||
| - Redis connection failures | ||||
| - Script execution timeouts | ||||
| - Worker unavailability | ||||
| - Script syntax errors | ||||
|  | ||||
| ### Dependencies | ||||
|  | ||||
| - `rhai_dispatcher`: Core client library for Redis-based script execution | ||||
| - `redis`: Redis client for task queue communication | ||||
| - `clap`: Command-line argument parsing | ||||
| - `env_logger`: Logging infrastructure | ||||
| - `tokio`: Async runtime | ||||
							
								
								
									
										207
									
								
								rhailib/_archive/dispatcher/cmd/dispatcher.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										207
									
								
								rhailib/_archive/dispatcher/cmd/dispatcher.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,207 @@ | ||||
| use clap::Parser; | ||||
| use rhai_dispatcher::{RhaiDispatcher, RhaiDispatcherBuilder}; | ||||
| use log::{error, info}; | ||||
| use colored::Colorize; | ||||
| use std::io::{self, Write}; | ||||
| use std::time::Duration; | ||||
|  | ||||
| #[derive(Parser, Debug)] | ||||
| #[command(author, version, about = "Rhai Client - Script execution client", long_about = None)] | ||||
| struct Args { | ||||
|     /// Caller public key (caller ID) | ||||
|     #[arg(short = 'c', long = "caller-key", help = "Caller public key (your identity)")] | ||||
|     caller_id: String, | ||||
|  | ||||
|     /// Circle public key (context ID) | ||||
|     #[arg(short = 'k', long = "circle-key", help = "Circle public key (execution context)")] | ||||
|     context_id: String, | ||||
|  | ||||
|     /// Worker public key (defaults to circle public key if not provided) | ||||
|     #[arg(short = 'w', long = "worker-key", help = "Worker public key (defaults to circle key)")] | ||||
|     worker_id: String, | ||||
|  | ||||
|     /// Redis URL | ||||
|     #[arg(short, long, default_value = "redis://localhost:6379", help = "Redis connection URL")] | ||||
|     redis_url: String, | ||||
|  | ||||
|     /// Rhai script to execute | ||||
|     #[arg(short, long, help = "Rhai script to execute")] | ||||
|     script: Option<String>, | ||||
|  | ||||
|     /// Path to Rhai script file | ||||
|     #[arg(short, long, help = "Path to Rhai script file")] | ||||
|     file: Option<String>, | ||||
|  | ||||
|     /// Timeout for script execution (in seconds) | ||||
|     #[arg(short, long, default_value = "30", help = "Timeout for script execution in seconds")] | ||||
|     timeout: u64, | ||||
|  | ||||
|     /// Increase verbosity (can be used multiple times) | ||||
|     #[arg(short, long, action = clap::ArgAction::Count, help = "Increase verbosity (-v for debug, -vv for trace)")] | ||||
|     verbose: u8, | ||||
|  | ||||
|     /// Disable timestamps in log output | ||||
|     #[arg(long, help = "Remove timestamps from log output")] | ||||
|     no_timestamp: bool, | ||||
| } | ||||
|  | ||||
| #[tokio::main] | ||||
| async fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
|     let args = Args::parse(); | ||||
|  | ||||
|     // Configure logging based on verbosity level | ||||
|     let log_config = match args.verbose { | ||||
|         0 => "warn,rhai_dispatcher=warn", | ||||
|         1 => "info,rhai_dispatcher=info", | ||||
|         2 => "debug,rhai_dispatcher=debug", | ||||
|         _ => "trace,rhai_dispatcher=trace", | ||||
|     }; | ||||
|      | ||||
|     std::env::set_var("RUST_LOG", log_config); | ||||
|      | ||||
|     // Configure env_logger with or without timestamps | ||||
|     if args.no_timestamp { | ||||
|         env_logger::Builder::from_default_env() | ||||
|             .format_timestamp(None) | ||||
|             .init(); | ||||
|     } else { | ||||
|         env_logger::init(); | ||||
|     } | ||||
|  | ||||
|     if args.verbose > 0 { | ||||
|         info!("🔗 Starting Rhai Dispatcher"); | ||||
|         info!("📋 Configuration:"); | ||||
|         info!("   Caller ID: {}", args.caller_id); | ||||
|         info!("   Context ID: {}", args.context_id); | ||||
|         info!("   Worker ID: {}", args.worker_id); | ||||
|         info!("   Redis URL: {}", args.redis_url); | ||||
|         info!("   Timeout: {}s", args.timeout); | ||||
|         info!(""); | ||||
|     } | ||||
|  | ||||
|     // Create the Rhai client | ||||
|     let client = RhaiDispatcherBuilder::new() | ||||
|         .caller_id(&args.caller_id) | ||||
|         .worker_id(&args.worker_id) | ||||
|         .context_id(&args.context_id) | ||||
|         .redis_url(&args.redis_url) | ||||
|         .build()?; | ||||
|  | ||||
|     if args.verbose > 0 { | ||||
|         info!("✅ Connected to Redis at {}", args.redis_url); | ||||
|     } | ||||
|  | ||||
|     // Determine execution mode | ||||
|     if let Some(script_content) = args.script { | ||||
|         // Execute inline script | ||||
|         if args.verbose > 0 { | ||||
|             info!("📜 Executing inline script"); | ||||
|         } | ||||
|         execute_script(&client, script_content, args.timeout).await?; | ||||
|     } else if let Some(file_path) = args.file { | ||||
|         // Execute script from file | ||||
|         if args.verbose > 0 { | ||||
|             info!("📁 Loading script from file: {}", file_path); | ||||
|         } | ||||
|         let script_content = std::fs::read_to_string(&file_path) | ||||
|             .map_err(|e| format!("Failed to read script file '{}': {}", file_path, e))?; | ||||
|         execute_script(&client, script_content, args.timeout).await?; | ||||
|     } else { | ||||
|         // Interactive mode | ||||
|         info!("🎮 Entering interactive mode"); | ||||
|         info!("Type Rhai scripts and press Enter to execute. Type 'exit' or 'quit' to close."); | ||||
|         run_interactive_mode(&client, args.timeout, args.verbose).await?; | ||||
|     } | ||||
|  | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| async fn execute_script( | ||||
|     client: &RhaiDispatcher, | ||||
|     script: String, | ||||
|     timeout_secs: u64, | ||||
| ) -> Result<(), Box<dyn std::error::Error>> { | ||||
|     info!("⚡ Executing script: {:.50}...", script); | ||||
|      | ||||
|     let timeout = Duration::from_secs(timeout_secs); | ||||
|      | ||||
|     match client | ||||
|         .new_play_request() | ||||
|         .script(&script) | ||||
|         .timeout(timeout) | ||||
|         .await_response() | ||||
|         .await | ||||
|     { | ||||
|         Ok(result) => { | ||||
|             info!("✅ Script execution completed"); | ||||
|             println!("Status: {}", result.status); | ||||
|             if let Some(output) = result.output { | ||||
|                 println!("Output: {}", output); | ||||
|             } | ||||
|             if let Some(error) = result.error { | ||||
|                 println!("Error: {}", error); | ||||
|             } | ||||
|         } | ||||
|         Err(e) => { | ||||
|             error!("❌ Script execution failed: {}", e); | ||||
|             return Err(Box::new(e)); | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| async fn run_interactive_mode( | ||||
|     client: &RhaiDispatcher, | ||||
|     timeout_secs: u64, | ||||
|     verbose: u8, | ||||
| ) -> Result<(), Box<dyn std::error::Error>> { | ||||
|     let timeout = Duration::from_secs(timeout_secs); | ||||
|      | ||||
|     loop { | ||||
|         print!("rhai> "); | ||||
|         io::stdout().flush()?; | ||||
|          | ||||
|         let mut input = String::new(); | ||||
|         io::stdin().read_line(&mut input)?; | ||||
|          | ||||
|         let input = input.trim(); | ||||
|          | ||||
|         if input.is_empty() { | ||||
|             continue; | ||||
|         } | ||||
|          | ||||
|         if input == "exit" || input == "quit" { | ||||
|             info!("👋 Goodbye!"); | ||||
|             break; | ||||
|         } | ||||
|          | ||||
|         if verbose > 0 { | ||||
|             info!("⚡ Executing: {}", input); | ||||
|         } | ||||
|          | ||||
|         match client | ||||
|             .new_play_request() | ||||
|             .script(input) | ||||
|             .timeout(timeout) | ||||
|             .await_response() | ||||
|             .await | ||||
|         { | ||||
|             Ok(result) => { | ||||
|                 if let Some(output) = result.output { | ||||
|                     println!("{}", output.color("green")); | ||||
|                 } | ||||
|                 if let Some(error) = result.error { | ||||
|                     println!("{}", format!("error: {}", error).color("red")); | ||||
|                 } | ||||
|             } | ||||
|             Err(e) => { | ||||
|                 println!("{}", format!("error: {}", e).red()); | ||||
|             } | ||||
|         } | ||||
|          | ||||
|         println!(); // Add blank line for readability | ||||
|     } | ||||
|      | ||||
|     Ok(()) | ||||
| } | ||||
							
								
								
									
										190
									
								
								rhailib/_archive/dispatcher/docs/ARCHITECTURE.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										190
									
								
								rhailib/_archive/dispatcher/docs/ARCHITECTURE.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,190 @@ | ||||
| # Architecture of the `rhai_dispatcher` Crate | ||||
|  | ||||
| The `rhai_dispatcher` crate provides a Redis-based client library for submitting Rhai scripts to distributed worker services and awaiting their execution results. It implements a request-reply pattern using Redis as the message broker. | ||||
|  | ||||
| ## Core Architecture | ||||
|  | ||||
| The client follows a builder pattern design with clear separation of concerns: | ||||
|  | ||||
| ```mermaid | ||||
| graph TD | ||||
|     A[RhaiDispatcherBuilder] --> B[RhaiDispatcher] | ||||
|     B --> C[PlayRequestBuilder] | ||||
|     C --> D[PlayRequest] | ||||
|     D --> E[Redis Task Queue] | ||||
|     E --> F[Worker Service] | ||||
|     F --> G[Redis Reply Queue] | ||||
|     G --> H[Client Response] | ||||
|      | ||||
|     subgraph "Client Components" | ||||
|         A | ||||
|         B | ||||
|         C | ||||
|         D | ||||
|     end | ||||
|      | ||||
|     subgraph "Redis Infrastructure" | ||||
|         E | ||||
|         G | ||||
|     end | ||||
|      | ||||
|     subgraph "External Services" | ||||
|         F | ||||
|     end | ||||
| ``` | ||||
|  | ||||
| ## Key Components | ||||
|  | ||||
| ### 1. RhaiDispatcherBuilder | ||||
|  | ||||
| A builder pattern implementation for constructing `RhaiDispatcher` instances with proper configuration validation. | ||||
|  | ||||
| **Responsibilities:** | ||||
| - Configure Redis connection URL | ||||
| - Set caller ID for task attribution | ||||
| - Validate configuration before building client | ||||
|  | ||||
| **Key Methods:** | ||||
| - `caller_id(id: &str)` - Sets the caller identifier | ||||
| - `redis_url(url: &str)` - Configures Redis connection | ||||
| - `build()` - Creates the final `RhaiDispatcher` instance | ||||
|  | ||||
| ### 2. RhaiDispatcher | ||||
|  | ||||
| The main client interface that manages Redis connections and provides factory methods for creating play requests. | ||||
|  | ||||
| **Responsibilities:** | ||||
| - Maintain Redis connection pool | ||||
| - Provide factory methods for request builders | ||||
| - Handle low-level Redis operations | ||||
| - Manage task status queries | ||||
|  | ||||
| **Key Methods:** | ||||
| - `new_play_request()` - Creates a new `PlayRequestBuilder` | ||||
| - `get_task_status(task_id)` - Queries task status from Redis | ||||
| - Internal methods for Redis operations | ||||
|  | ||||
| ### 3. PlayRequestBuilder | ||||
|  | ||||
| A fluent builder for constructing and submitting script execution requests. | ||||
|  | ||||
| **Responsibilities:** | ||||
| - Configure script execution parameters | ||||
| - Handle script loading from files or strings | ||||
| - Manage request timeouts | ||||
| - Provide submission methods (fire-and-forget vs await-response) | ||||
|  | ||||
| **Key Methods:** | ||||
| - `worker_id(id: &str)` - Target worker queue (determines which worker processes the task) | ||||
| - `context_id(id: &str)` - Target context ID (determines execution context/circle) | ||||
| - `script(content: &str)` - Set script content directly | ||||
| - `script_path(path: &str)` - Load script from file | ||||
| - `timeout(duration: Duration)` - Set execution timeout | ||||
| - `submit()` - Fire-and-forget submission | ||||
| - `await_response()` - Submit and wait for result | ||||
|  | ||||
| **Architecture Note:** The decoupling of `worker_id` and `context_id` allows a single worker to process tasks for multiple contexts (circles), providing greater deployment flexibility. | ||||
|  | ||||
| ### 4. Data Structures | ||||
|  | ||||
| #### RhaiTaskDetails | ||||
| Represents the complete state of a task throughout its lifecycle. | ||||
|  | ||||
| ```rust | ||||
| pub struct RhaiTaskDetails { | ||||
|     pub task_id: String, | ||||
|     pub script: String, | ||||
|     pub status: String,        // "pending", "processing", "completed", "error" | ||||
|     pub output: Option<String>, | ||||
|     pub error: Option<String>, | ||||
|     pub created_at: DateTime<Utc>, | ||||
|     pub updated_at: DateTime<Utc>, | ||||
|     pub caller_id: String, | ||||
| } | ||||
| ``` | ||||
|  | ||||
| #### RhaiDispatcherError | ||||
| Comprehensive error handling for various failure scenarios: | ||||
| - `RedisError` - Redis connection/operation failures | ||||
| - `SerializationError` - JSON serialization/deserialization issues | ||||
| - `Timeout` - Task execution timeouts | ||||
| - `TaskNotFound` - Missing tasks after submission | ||||
|  | ||||
| ## Communication Protocol | ||||
|  | ||||
| ### Task Submission Flow | ||||
|  | ||||
| 1. **Task Creation**: Client generates unique UUID for task identification | ||||
| 2. **Task Storage**: Task details stored in Redis hash: `rhailib:<task_id>` | ||||
| 3. **Queue Submission**: Task ID pushed to worker queue: `rhailib:<worker_id>` | ||||
| 4. **Reply Queue Setup**: Client listens on: `rhailib:reply:<task_id>` | ||||
|  | ||||
| ### Redis Key Patterns | ||||
|  | ||||
| - **Task Storage**: `rhailib:<task_id>` (Redis Hash) | ||||
| - **Worker Queues**: `rhailib:<worker_id>` (Redis List) | ||||
| - **Reply Queues**: `rhailib:reply:<task_id>` (Redis List) | ||||
|  | ||||
| ### Message Flow Diagram | ||||
|  | ||||
| ```mermaid | ||||
| sequenceDiagram | ||||
|     participant C as Client | ||||
|     participant R as Redis | ||||
|     participant W as Worker | ||||
|      | ||||
|     C->>R: HSET rhailib:task_id (task details) | ||||
|     C->>R: LPUSH rhailib:worker_id task_id | ||||
|     C->>R: BLPOP rhailib:reply:task_id (blocking) | ||||
|      | ||||
|     W->>R: BRPOP rhailib:worker_id (blocking) | ||||
|     W->>W: Execute Rhai Script | ||||
|     W->>R: LPUSH rhailib:reply:task_id (result) | ||||
|      | ||||
|     R->>C: Return result from BLPOP | ||||
|     C->>R: DEL rhailib:reply:task_id (cleanup) | ||||
| ``` | ||||
|  | ||||
| ## Concurrency and Async Design | ||||
|  | ||||
| The client is built on `tokio` for asynchronous operations: | ||||
|  | ||||
| - **Connection Pooling**: Uses Redis multiplexed connections for efficiency | ||||
| - **Non-blocking Operations**: All Redis operations are async | ||||
| - **Timeout Handling**: Configurable timeouts with proper cleanup | ||||
| - **Error Propagation**: Comprehensive error handling with context | ||||
|  | ||||
| ## Configuration and Deployment | ||||
|  | ||||
| ### Prerequisites | ||||
| - Redis server accessible to both client and workers | ||||
| - Proper network connectivity between components | ||||
| - Sufficient Redis memory for task storage | ||||
|  | ||||
| ### Configuration Options | ||||
| - **Redis URL**: Connection string for Redis instance | ||||
| - **Caller ID**: Unique identifier for client instance | ||||
| - **Timeouts**: Per-request timeout configuration | ||||
| - **Worker Targeting**: Direct worker queue addressing | ||||
|  | ||||
| ## Security Considerations | ||||
|  | ||||
| - **Task Isolation**: Each task uses unique identifiers | ||||
| - **Queue Separation**: Worker-specific queues prevent cross-contamination | ||||
| - **Cleanup**: Automatic cleanup of reply queues after completion | ||||
| - **Error Handling**: Secure error propagation without sensitive data leakage | ||||
|  | ||||
| ## Performance Characteristics | ||||
|  | ||||
| - **Scalability**: Horizontal scaling through multiple worker instances | ||||
| - **Throughput**: Limited by Redis performance and network latency | ||||
| - **Memory Usage**: Efficient with connection pooling and cleanup | ||||
| - **Latency**: Low latency for local Redis deployments | ||||
|  | ||||
| ## Integration Points | ||||
|  | ||||
| The client integrates with: | ||||
| - **Worker Services**: Via Redis queue protocol | ||||
| - **Monitoring Systems**: Through structured logging | ||||
| - **Application Code**: Via builder pattern API | ||||
| - **Configuration Systems**: Through environment variables and builders | ||||
							
								
								
									
										90
									
								
								rhailib/_archive/dispatcher/examples/timeout_example.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										90
									
								
								rhailib/_archive/dispatcher/examples/timeout_example.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,90 @@ | ||||
| use log::info; | ||||
| use rhai_dispatcher::{RhaiDispatcherBuilder, RhaiDispatcherError}; | ||||
| use std::time::{Duration, Instant}; | ||||
|  | ||||
| #[tokio::main] | ||||
| async fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
|     env_logger::builder() | ||||
|         .filter_level(log::LevelFilter::Info) | ||||
|         .init(); | ||||
|  | ||||
|     // Build the client using the new builder pattern | ||||
|     let client = RhaiDispatcherBuilder::new() | ||||
|         .caller_id("timeout-example-runner") | ||||
|         .redis_url("redis://127.0.0.1/") | ||||
|         .build()?; | ||||
|     info!("RhaiDispatcher created."); | ||||
|  | ||||
|     let script_content = r#" | ||||
|         // This script will never be executed by a worker because the recipient does not exist. | ||||
|         let x = 10; | ||||
|         let y = x + 32; | ||||
|         y | ||||
|     "#; | ||||
|  | ||||
|     // The worker_id points to a worker queue that doesn't have a worker. | ||||
|     let non_existent_recipient = "non_existent_worker_for_timeout_test"; | ||||
|     let very_short_timeout = Duration::from_secs(2); | ||||
|  | ||||
|     info!( | ||||
|         "Submitting script to non-existent recipient '{}' with a timeout of {:?}...", | ||||
|         non_existent_recipient, very_short_timeout | ||||
|     ); | ||||
|  | ||||
|     let start_time = Instant::now(); | ||||
|  | ||||
|     // Use the new PlayRequestBuilder | ||||
|     let result = client | ||||
|         .new_play_request() | ||||
|         .worker_id(non_existent_recipient) | ||||
|         .script(script_content) | ||||
|         .timeout(very_short_timeout) | ||||
|         .await_response() | ||||
|         .await; | ||||
|  | ||||
|     match result { | ||||
|         Ok(details) => { | ||||
|             log::error!( | ||||
|                 "Timeout Example FAILED: Expected a timeout, but got Ok: {:?}", | ||||
|                 details | ||||
|             ); | ||||
|             Err("Expected timeout, but task completed successfully.".into()) | ||||
|         } | ||||
|         Err(e) => { | ||||
|             let elapsed = start_time.elapsed(); | ||||
|             info!("Timeout Example: Received error as expected: {}", e); | ||||
|             info!("Elapsed time: {:?}", elapsed); | ||||
|  | ||||
|             match e { | ||||
|                 RhaiDispatcherError::Timeout(task_id) => { | ||||
|                     info!("Timeout Example PASSED: Correctly received RhaiDispatcherError::Timeout for task_id: {}", task_id); | ||||
|                     // Ensure the elapsed time is close to the timeout duration | ||||
|                     // Allow for some buffer for processing | ||||
|                     assert!( | ||||
|                         elapsed >= very_short_timeout | ||||
|                             && elapsed < very_short_timeout + Duration::from_secs(1), | ||||
|                         "Elapsed time {:?} should be close to timeout {:?}", | ||||
|                         elapsed, | ||||
|                         very_short_timeout | ||||
|                     ); | ||||
|                     info!( | ||||
|                         "Elapsed time {:?} is consistent with timeout duration {:?}.", | ||||
|                         elapsed, very_short_timeout | ||||
|                     ); | ||||
|                     Ok(()) | ||||
|                 } | ||||
|                 other_error => { | ||||
|                     log::error!( | ||||
|                         "Timeout Example FAILED: Expected RhaiDispatcherError::Timeout, but got other error: {:?}", | ||||
|                         other_error | ||||
|                     ); | ||||
|                     Err(format!( | ||||
|                         "Expected RhaiDispatcherError::Timeout, got other error: {:?}", | ||||
|                         other_error | ||||
|                     ) | ||||
|                     .into()) | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
							
								
								
									
										638
									
								
								rhailib/_archive/dispatcher/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										638
									
								
								rhailib/_archive/dispatcher/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,638 @@ | ||||
| //! # Rhai Client Library | ||||
| //! | ||||
| //! A Redis-based client library for submitting Rhai scripts to distributed worker services | ||||
| //! and awaiting their execution results. This crate implements a request-reply pattern | ||||
| //! using Redis as the message broker. | ||||
| //! | ||||
| //! ## Quick Start | ||||
| //! | ||||
| //! ```rust | ||||
| //! use rhai_dispatcher::{RhaiDispatcherBuilder, RhaiDispatcherError}; | ||||
| //! use std::time::Duration; | ||||
| //! | ||||
| //! #[tokio::main] | ||||
| //! async fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
| //!     // Build the client | ||||
| //!     let client = RhaiDispatcherBuilder::new() | ||||
| //!         .caller_id("my-app-instance-1") | ||||
| //!         .redis_url("redis://127.0.0.1/") | ||||
| //!         .build()?; | ||||
| //! | ||||
| //!     // Submit a script and await the result | ||||
| //!     let result = client | ||||
| //!         .new_play_request() | ||||
| //!         .worker_id("worker-1") | ||||
| //!         .script(r#""Hello, World!""#) | ||||
| //!         .timeout(Duration::from_secs(5)) | ||||
| //!         .await_response() | ||||
| //!         .await?; | ||||
| //! | ||||
| //!     println!("Result: {:?}", result); | ||||
| //!     Ok(()) | ||||
| //! } | ||||
| //! ``` | ||||
|  | ||||
| use chrono::Utc; | ||||
| use log::{debug, error, info, warn}; // Added error | ||||
| use redis::AsyncCommands; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use std::time::Duration; // Duration is still used, Instant and sleep were removed | ||||
| use uuid::Uuid; | ||||
|  | ||||
| /// Redis namespace prefix for all rhailib-related keys | ||||
| const NAMESPACE_PREFIX: &str = "rhailib:"; | ||||
|  | ||||
| /// Represents the complete details and state of a Rhai task execution. | ||||
| /// | ||||
| /// This structure contains all information about a task throughout its lifecycle, | ||||
| /// from submission to completion. It's used for both storing task state in Redis | ||||
| /// and returning results to clients. | ||||
| /// | ||||
| /// # Fields | ||||
| /// | ||||
| /// * `task_id` - Unique identifier for the task (UUID) | ||||
| /// * `script` - The Rhai script content to execute | ||||
| /// * `status` - Current execution status: "pending", "processing", "completed", or "error" | ||||
| /// * `output` - Script execution output (if successful) | ||||
| /// * `error` - Error message (if execution failed) | ||||
| /// * `created_at` - Timestamp when the task was created | ||||
| /// * `updated_at` - Timestamp when the task was last modified | ||||
| /// * `caller_id` - Identifier of the client that submitted the task | ||||
| #[derive(Debug, Serialize, Deserialize, Clone)] | ||||
| pub struct RhaiTaskDetails { | ||||
|     #[serde(rename = "taskId")] // Ensure consistent naming with other fields | ||||
|     pub task_id: String, | ||||
|     pub script: String, | ||||
|     pub status: String, // "pending", "processing", "completed", "error" | ||||
|     // client_rpc_id: Option<Value> is removed. | ||||
|     // Worker responses should ideally not include it, or Serde will ignore unknown fields by default. | ||||
|     pub output: Option<String>, | ||||
|     pub error: Option<String>, // Renamed from error_message for consistency | ||||
|     #[serde(rename = "createdAt")] | ||||
|     pub created_at: chrono::DateTime<chrono::Utc>, | ||||
|     #[serde(rename = "updatedAt")] | ||||
|     pub updated_at: chrono::DateTime<chrono::Utc>, | ||||
|     #[serde(rename = "callerId")] | ||||
|     pub caller_id: String, | ||||
|     #[serde(rename = "contextId")] | ||||
|     pub context_id: String, | ||||
|     #[serde(rename = "workerId")] | ||||
|     pub worker_id: String, | ||||
| } | ||||
|  | ||||
| /// Comprehensive error type for all possible failures in the Rhai client. | ||||
| /// | ||||
| /// This enum covers all error scenarios that can occur during client operations, | ||||
| /// from Redis connectivity issues to task execution timeouts. | ||||
| #[derive(Debug)] | ||||
| pub enum RhaiDispatcherError { | ||||
|     /// Redis connection or operation error | ||||
|     RedisError(redis::RedisError), | ||||
|     /// JSON serialization/deserialization error | ||||
|     SerializationError(serde_json::Error), | ||||
|     /// Task execution timeout - contains the task_id that timed out | ||||
|     Timeout(String), | ||||
|     /// Task not found after submission - contains the task_id (rare occurrence) | ||||
|     TaskNotFound(String), | ||||
|     /// Context ID is missing | ||||
|     ContextIdMissing, | ||||
| } | ||||
|  | ||||
| impl From<redis::RedisError> for RhaiDispatcherError { | ||||
|     fn from(err: redis::RedisError) -> Self { | ||||
|         RhaiDispatcherError::RedisError(err) | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl From<serde_json::Error> for RhaiDispatcherError { | ||||
|     fn from(err: serde_json::Error) -> Self { | ||||
|         RhaiDispatcherError::SerializationError(err) | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl std::fmt::Display for RhaiDispatcherError { | ||||
|     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||||
|         match self { | ||||
|             RhaiDispatcherError::RedisError(e) => write!(f, "Redis error: {}", e), | ||||
|             RhaiDispatcherError::SerializationError(e) => write!(f, "Serialization error: {}", e), | ||||
|             RhaiDispatcherError::Timeout(task_id) => { | ||||
|                 write!(f, "Timeout waiting for task {} to complete", task_id) | ||||
|             } | ||||
|             RhaiDispatcherError::TaskNotFound(task_id) => { | ||||
|                 write!(f, "Task {} not found after submission", task_id) | ||||
|             } | ||||
|             RhaiDispatcherError::ContextIdMissing => { | ||||
|                 write!(f, "Context ID is missing") | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl std::error::Error for RhaiDispatcherError {} | ||||
|  | ||||
| /// The main client for interacting with the Rhai task execution system. | ||||
| /// | ||||
| /// This client manages Redis connections and provides factory methods for creating | ||||
| /// script execution requests. It maintains a caller ID for task attribution and | ||||
| /// handles all low-level Redis operations. | ||||
| /// | ||||
| /// # Example | ||||
| /// | ||||
| /// ```rust | ||||
| /// use rhai_dispatcher::RhaiDispatcherBuilder; | ||||
| /// | ||||
| /// let client = RhaiDispatcherBuilder::new() | ||||
| ///     .caller_id("my-service") | ||||
| ///     .redis_url("redis://localhost/") | ||||
| ///     .build()?; | ||||
| /// ``` | ||||
| pub struct RhaiDispatcher { | ||||
|     redis_client: redis::Client, | ||||
|     caller_id: String, | ||||
|     worker_id: String, | ||||
|     context_id: String, | ||||
| } | ||||
|  | ||||
| /// Builder for constructing `RhaiDispatcher` instances with proper configuration. | ||||
| /// | ||||
| /// This builder ensures that all required configuration is provided before | ||||
| /// creating a client instance. It validates the configuration and provides | ||||
| /// sensible defaults where appropriate. | ||||
| /// | ||||
| /// # Required Configuration | ||||
| /// | ||||
| /// - `caller_id`: A unique identifier for this client instance | ||||
| /// | ||||
| /// # Optional Configuration | ||||
| /// | ||||
| /// - `redis_url`: Redis connection URL (defaults to "redis://127.0.0.1/") | ||||
| pub struct RhaiDispatcherBuilder { | ||||
|     redis_url: Option<String>, | ||||
|     caller_id: String, | ||||
|     worker_id: String, | ||||
|     context_id: String, | ||||
| } | ||||
|  | ||||
| impl RhaiDispatcherBuilder { | ||||
|     /// Creates a new `RhaiDispatcherBuilder` with default settings. | ||||
|     /// | ||||
|     /// The builder starts with no Redis URL (will default to "redis://127.0.0.1/") | ||||
|     /// and an empty caller ID (which must be set before building). | ||||
|     pub fn new() -> Self { | ||||
|         Self { | ||||
|             redis_url: None, | ||||
|             caller_id: "".to_string(), | ||||
|             worker_id: "".to_string(), | ||||
|             context_id: "".to_string(), | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /// Sets the caller ID for this client instance. | ||||
|     /// | ||||
|     /// The caller ID is used to identify which client submitted a task and is | ||||
|     /// included in task metadata. This is required and the build will fail if | ||||
|     /// not provided. | ||||
|     /// | ||||
|     /// # Arguments | ||||
|     /// | ||||
|     /// * `caller_id` - A unique identifier for this client instance | ||||
|     pub fn caller_id(mut self, caller_id: &str) -> Self { | ||||
|         self.caller_id = caller_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|     /// Sets the circle ID for this client instance. | ||||
|     /// | ||||
|     /// The circle ID is used to identify which circle's context a task should be executed in. | ||||
|     /// This is required at the time the client dispatches a script, but can be set on construction or on script dispatch. | ||||
|     /// | ||||
|     /// # Arguments | ||||
|     /// | ||||
|     /// * `context_id` - A unique identifier for this client instance | ||||
|     pub fn context_id(mut self, context_id: &str) -> Self { | ||||
|         self.context_id = context_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     /// Sets the worker ID for this client instance. | ||||
|     /// | ||||
|     /// The worker ID is used to identify which worker a task should be executed on. | ||||
|     /// This is required at the time the client dispatches a script, but can be set on construction or on script dispatch. | ||||
|     /// | ||||
|     /// # Arguments | ||||
|     /// | ||||
|     /// * `worker_id` - A unique identifier for this client instance | ||||
|     pub fn worker_id(mut self, worker_id: &str) -> Self { | ||||
|         self.worker_id = worker_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     /// Sets the Redis connection URL. | ||||
|     /// | ||||
|     /// If not provided, defaults to "redis://127.0.0.1/". | ||||
|     /// | ||||
|     /// # Arguments | ||||
|     /// | ||||
|     /// * `url` - Redis connection URL (e.g., "redis://localhost:6379/0") | ||||
|     pub fn redis_url(mut self, url: &str) -> Self { | ||||
|         self.redis_url = Some(url.to_string()); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     /// Builds the final `RhaiDispatcher` instance. | ||||
|     /// | ||||
|     /// This method validates the configuration and creates the Redis client. | ||||
|     /// It will return an error if the caller ID is empty or if the Redis | ||||
|     /// connection cannot be established. | ||||
|     /// | ||||
|     /// # Returns | ||||
|     /// | ||||
|     /// * `Ok(RhaiDispatcher)` - Successfully configured client | ||||
|     /// * `Err(RhaiDispatcherError)` - Configuration or connection error | ||||
|     pub fn build(self) -> Result<RhaiDispatcher, RhaiDispatcherError> { | ||||
|         let url = self | ||||
|             .redis_url | ||||
|             .unwrap_or_else(|| "redis://127.0.0.1/".to_string()); | ||||
|         let client = redis::Client::open(url)?; | ||||
|         Ok(RhaiDispatcher { | ||||
|             redis_client: client, | ||||
|             caller_id: self.caller_id, | ||||
|             worker_id: self.worker_id, | ||||
|             context_id: self.context_id, | ||||
|         }) | ||||
|     } | ||||
| } | ||||
|  | ||||
| /// Representation of a script execution request. | ||||
| /// | ||||
| /// This structure contains all the information needed to execute a Rhai script | ||||
| /// on a worker service, including the script content, target worker, and timeout. | ||||
| #[derive(Debug, Clone)] | ||||
| pub struct PlayRequest { | ||||
|     pub id: String, | ||||
|     pub worker_id: String, | ||||
|     pub context_id: String, | ||||
|     pub script: String, | ||||
|     pub timeout: Duration, | ||||
| } | ||||
|  | ||||
| /// Builder for constructing and submitting script execution requests. | ||||
| /// | ||||
| /// This builder provides a fluent interface for configuring script execution | ||||
| /// parameters and offers two submission modes: fire-and-forget (`submit()`) | ||||
| /// and request-reply (`await_response()`). | ||||
| /// | ||||
| /// # Example | ||||
| /// | ||||
| /// ```rust | ||||
| /// use std::time::Duration; | ||||
| /// | ||||
| /// let result = client | ||||
| ///     .new_play_request() | ||||
| ///     .worker_id("worker-1") | ||||
| ///     .script(r#"print("Hello, World!");"#) | ||||
| ///     .timeout(Duration::from_secs(30)) | ||||
| ///     .await_response() | ||||
| ///     .await?; | ||||
| /// ``` | ||||
| pub struct PlayRequestBuilder<'a> { | ||||
|     client: &'a RhaiDispatcher, | ||||
|     request_id: String, | ||||
|     worker_id: String, | ||||
|     context_id: String, | ||||
|     caller_id: String, | ||||
|     script: String, | ||||
|     timeout: Duration, | ||||
|     retries: u32, | ||||
| } | ||||
|  | ||||
| impl<'a> PlayRequestBuilder<'a> { | ||||
|     pub fn new(client: &'a RhaiDispatcher) -> Self { | ||||
|         Self { | ||||
|             client, | ||||
|             request_id: "".to_string(), | ||||
|             worker_id: client.worker_id.clone(), | ||||
|             context_id: client.context_id.clone(), | ||||
|             caller_id: client.caller_id.clone(), | ||||
|             script: "".to_string(), | ||||
|             timeout: Duration::from_secs(5), | ||||
|             retries: 0, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub fn request_id(mut self, request_id: &str) -> Self { | ||||
|         self.request_id = request_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     pub fn worker_id(mut self, worker_id: &str) -> Self { | ||||
|         self.worker_id = worker_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     pub fn context_id(mut self, context_id: &str) -> Self { | ||||
|         self.context_id = context_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     pub fn script(mut self, script: &str) -> Self { | ||||
|         self.script = script.to_string(); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     pub fn script_path(mut self, script_path: &str) -> Self { | ||||
|         self.script = std::fs::read_to_string(script_path).unwrap(); | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     pub fn timeout(mut self, timeout: Duration) -> Self { | ||||
|         self.timeout = timeout; | ||||
|         self | ||||
|     } | ||||
|  | ||||
|     pub fn build(self) -> Result<PlayRequest, RhaiDispatcherError> { | ||||
|         let request_id = if self.request_id.is_empty() { | ||||
|             // Generate a UUID for the request_id | ||||
|             Uuid::new_v4().to_string() | ||||
|         } else { | ||||
|             self.request_id.clone() | ||||
|         }; | ||||
|  | ||||
|         if self.context_id.is_empty() { | ||||
|             return Err(RhaiDispatcherError::ContextIdMissing); | ||||
|         } | ||||
|  | ||||
|         if self.caller_id.is_empty() { | ||||
|             return Err(RhaiDispatcherError::ContextIdMissing); | ||||
|         } | ||||
|  | ||||
|         let play_request = PlayRequest { | ||||
|             id: request_id, | ||||
|             worker_id: self.worker_id.clone(), | ||||
|             context_id: self.context_id.clone(), | ||||
|             script: self.script.clone(), | ||||
|             timeout: self.timeout, | ||||
|         }; | ||||
|         Ok(play_request) | ||||
|     } | ||||
|  | ||||
|     pub async fn submit(self) -> Result<(), RhaiDispatcherError> { | ||||
|         // Build the request and submit using self.client | ||||
|         println!( | ||||
|             "Submitting request {} with timeout {:?}", | ||||
|             self.request_id, self.timeout | ||||
|         ); | ||||
|         self.client.submit_play_request(&self.build()?).await?; | ||||
|         Ok(()) | ||||
|     } | ||||
|  | ||||
|     pub async fn await_response(self) -> Result<RhaiTaskDetails, RhaiDispatcherError> { | ||||
|         // Build the request and submit using self.client | ||||
|         let result = self | ||||
|             .client | ||||
|             .submit_play_request_and_await_result(&self.build()?) | ||||
|             .await; | ||||
|         result | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl RhaiDispatcher { | ||||
|     pub fn new_play_request(&self) -> PlayRequestBuilder { | ||||
|         PlayRequestBuilder::new(self) | ||||
|     } | ||||
|  | ||||
|     // Internal helper to submit script details and push to work queue | ||||
|     async fn submit_play_request_using_connection( | ||||
|         &self, | ||||
|         conn: &mut redis::aio::MultiplexedConnection, | ||||
|         play_request: &PlayRequest, | ||||
|     ) -> Result<(), RhaiDispatcherError> { | ||||
|         let now = Utc::now(); | ||||
|  | ||||
|         let task_key = format!("{}{}", NAMESPACE_PREFIX, play_request.id); | ||||
|  | ||||
|         let worker_queue_key = format!( | ||||
|             "{}{}", | ||||
|             NAMESPACE_PREFIX, | ||||
|             play_request.worker_id.replace(" ", "_").to_lowercase() | ||||
|         ); | ||||
|  | ||||
|         debug!( | ||||
|             "Submitting play request: {} to worker: {} with namespace prefix: {}", | ||||
|             play_request.id, play_request.worker_id, NAMESPACE_PREFIX | ||||
|         ); | ||||
|  | ||||
|         let hset_args: Vec<(String, String)> = vec![ | ||||
|             ("taskId".to_string(), play_request.id.to_string()), // Add taskId | ||||
|             ("script".to_string(), play_request.script.clone()), // script is moved here | ||||
|             ("callerId".to_string(), self.caller_id.clone()),    // script is moved here | ||||
|             ("contextId".to_string(), play_request.context_id.clone()), // script is moved here | ||||
|             ("status".to_string(), "pending".to_string()), | ||||
|             ("createdAt".to_string(), now.to_rfc3339()), | ||||
|             ("updatedAt".to_string(), now.to_rfc3339()), | ||||
|         ]; | ||||
|  | ||||
|         // Ensure hset_args is a slice of tuples (String, String) | ||||
|         // The redis crate's hset_multiple expects &[(K, V)] | ||||
|         // conn.hset_multiple::<_, String, String, ()>(&task_key, &hset_args).await?; | ||||
|         // Simpler: | ||||
|         // Explicitly type K, F, V for hset_multiple if inference is problematic. | ||||
|         // RV (return value of the command itself) is typically () for HSET type commands. | ||||
|         conn.hset_multiple::<_, _, _, ()>(&task_key, &hset_args) | ||||
|             .await?; | ||||
|  | ||||
|         // lpush also infers its types, RV is typically i64 (length of list) or () depending on exact command variant | ||||
|         // For `redis::AsyncCommands::lpush`, it's `RedisResult<R>` where R: FromRedisValue | ||||
|         // Often this is the length of the list. Let's allow inference or specify if needed. | ||||
|         let _: redis::RedisResult<i64> = | ||||
|             conn.lpush(&worker_queue_key, play_request.id.clone()).await; | ||||
|  | ||||
|         Ok(()) | ||||
|     } | ||||
|  | ||||
|     // Internal helper to await response from worker | ||||
|     async fn await_response_from_connection( | ||||
|         &self, | ||||
|         conn: &mut redis::aio::MultiplexedConnection, | ||||
|         task_key: &String, | ||||
|         reply_queue_key: &String, | ||||
|         timeout: Duration, | ||||
|     ) -> Result<RhaiTaskDetails, RhaiDispatcherError> { | ||||
|         // BLPOP on the reply queue | ||||
|         // The timeout for BLPOP is in seconds (integer) | ||||
|         let blpop_timeout_secs = timeout.as_secs().max(1); // Ensure at least 1 second for BLPOP timeout | ||||
|  | ||||
|         match conn | ||||
|             .blpop::<&String, Option<(String, String)>>(reply_queue_key, blpop_timeout_secs as f64) | ||||
|             .await | ||||
|         { | ||||
|             Ok(Some((_queue, result_message_str))) => { | ||||
|                 // Attempt to deserialize the result message into RhaiTaskDetails or a similar structure | ||||
|                 // For now, we assume the worker sends back a JSON string of RhaiTaskDetails | ||||
|                 // or at least status, output, error. | ||||
|                 // Let's refine what the worker sends. For now, assume it's a simplified result. | ||||
|                 // The worker should ideally send a JSON string that can be parsed into RhaiTaskDetails. | ||||
|                 // For this example, let's assume the worker sends a JSON string of a simplified result structure. | ||||
|                 // A more robust approach would be for the worker to send the full RhaiTaskDetails (or relevant parts) | ||||
|                 // and the client deserializes that. | ||||
|                 // For now, let's assume the worker sends a JSON string of RhaiTaskDetails. | ||||
|                 match serde_json::from_str::<RhaiTaskDetails>(&result_message_str) { | ||||
|                     Ok(details) => { | ||||
|                         info!( | ||||
|                             "Task {} finished with status: {}", | ||||
|                             details.task_id, details.status | ||||
|                         ); | ||||
|                         // Optionally, delete the reply queue | ||||
|                         let _: redis::RedisResult<i32> = conn.del(&reply_queue_key).await; | ||||
|                         Ok(details) | ||||
|                     } | ||||
|                     Err(e) => { | ||||
|                         error!( | ||||
|                             "Failed to deserialize result message from reply queue: {}", | ||||
|                             e | ||||
|                         ); | ||||
|                         // Optionally, delete the reply queue | ||||
|                         let _: redis::RedisResult<i32> = conn.del(&reply_queue_key).await; | ||||
|                         Err(RhaiDispatcherError::SerializationError(e)) | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|             Ok(None) => { | ||||
|                 // BLPOP timed out | ||||
|                 warn!( | ||||
|                     "Timeout waiting for result on reply queue {} for task {}", | ||||
|                     reply_queue_key, task_key | ||||
|                 ); | ||||
|                 // Optionally, delete the reply queue | ||||
|                 let _: redis::RedisResult<i32> = conn.del(&reply_queue_key).await; | ||||
|                 Err(RhaiDispatcherError::Timeout(task_key.clone())) | ||||
|             } | ||||
|             Err(e) => { | ||||
|                 // Redis error | ||||
|                 error!( | ||||
|                     "Redis error on BLPOP for reply queue {}: {}", | ||||
|                     reply_queue_key, e | ||||
|                 ); | ||||
|                 // Optionally, delete the reply queue | ||||
|                 let _: redis::RedisResult<i32> = conn.del(&reply_queue_key).await; | ||||
|                 Err(RhaiDispatcherError::RedisError(e)) | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // New method using dedicated reply queue | ||||
|     pub async fn submit_play_request( | ||||
|         &self, | ||||
|         play_request: &PlayRequest, | ||||
|     ) -> Result<(), RhaiDispatcherError> { | ||||
|         let mut conn = self.redis_client.get_multiplexed_async_connection().await?; | ||||
|  | ||||
|         self.submit_play_request_using_connection( | ||||
|             &mut conn, | ||||
|             &play_request, // Pass the task_id parameter | ||||
|         ) | ||||
|         .await?; | ||||
|         Ok(()) | ||||
|     } | ||||
|  | ||||
|     // New method using dedicated reply queue | ||||
|     pub async fn submit_play_request_and_await_result( | ||||
|         &self, | ||||
|         play_request: &PlayRequest, | ||||
|     ) -> Result<RhaiTaskDetails, RhaiDispatcherError> { | ||||
|         let mut conn = self.redis_client.get_multiplexed_async_connection().await?; | ||||
|  | ||||
|         let reply_queue_key = format!("{}:reply:{}", NAMESPACE_PREFIX, play_request.id); // Derived from the passed task_id | ||||
|  | ||||
|         self.submit_play_request_using_connection( | ||||
|             &mut conn, | ||||
|             &play_request, // Pass the task_id parameter | ||||
|         ) | ||||
|         .await?; | ||||
|  | ||||
|         info!( | ||||
|             "Task {} submitted. Waiting for result on queue {} with timeout {:?}...", | ||||
|             play_request.id, // This is the UUID | ||||
|             reply_queue_key, | ||||
|             play_request.timeout | ||||
|         ); | ||||
|  | ||||
|         self.await_response_from_connection( | ||||
|             &mut conn, | ||||
|             &play_request.id, | ||||
|             &reply_queue_key, | ||||
|             play_request.timeout, | ||||
|         ) | ||||
|         .await | ||||
|     } | ||||
|  | ||||
|     // Method to get task status | ||||
|     pub async fn get_task_status( | ||||
|         &self, | ||||
|         task_id: &str, | ||||
|     ) -> Result<Option<RhaiTaskDetails>, RhaiDispatcherError> { | ||||
|         let mut conn = self.redis_client.get_multiplexed_async_connection().await?; | ||||
|         let task_key = format!("{}{}", NAMESPACE_PREFIX, task_id); | ||||
|  | ||||
|         let result_map: Option<std::collections::HashMap<String, String>> = | ||||
|             conn.hgetall(&task_key).await?; | ||||
|  | ||||
|         match result_map { | ||||
|             Some(map) => { | ||||
|                 // Reconstruct RhaiTaskDetails from HashMap | ||||
|                 let details = RhaiTaskDetails { | ||||
|                     task_id: task_id.to_string(), // Use the task_id parameter passed to the function | ||||
|                     script: map.get("script").cloned().unwrap_or_else(|| { | ||||
|                         warn!("Task {}: 'script' field missing from Redis hash, defaulting to empty.", task_id); | ||||
|                         String::new() | ||||
|                     }), | ||||
|                     status: map.get("status").cloned().unwrap_or_else(|| { | ||||
|                         warn!("Task {}: 'status' field missing from Redis hash, defaulting to empty.", task_id); | ||||
|                         String::new() | ||||
|                     }), | ||||
|                     // client_rpc_id is no longer a field in RhaiTaskDetails | ||||
|                     output: map.get("output").cloned(), | ||||
|                     error: map.get("error").cloned(), | ||||
|                     created_at: map.get("createdAt") | ||||
|                                     .and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok()) | ||||
|                                     .map(|dt| dt.with_timezone(&Utc)) | ||||
|                                     .unwrap_or_else(|| { | ||||
|                                         warn!("Task {}: 'createdAt' field missing or invalid in Redis hash, defaulting to Utc::now().", task_id); | ||||
|                                         Utc::now() | ||||
|                                     }), | ||||
|                     updated_at: map.get("updatedAt") | ||||
|                                     .and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok()) | ||||
|                                     .map(|dt| dt.with_timezone(&Utc)) | ||||
|                                     .unwrap_or_else(|| { | ||||
|                                         warn!("Task {}: 'updatedAt' field missing or invalid in Redis hash, defaulting to Utc::now().", task_id); | ||||
|                                         Utc::now() | ||||
|                                     }), | ||||
|                     caller_id: map.get("callerId").cloned().expect("callerId field missing from Redis hash"), | ||||
|                     worker_id: map.get("workerId").cloned().expect("workerId field missing from Redis hash"), | ||||
|                     context_id: map.get("contextId").cloned().expect("contextId field missing from Redis hash"), | ||||
|                 }; | ||||
|                 // It's important to also check if the 'taskId' field exists in the map and matches the input task_id | ||||
|                 // for data integrity, though the struct construction above uses the input task_id directly. | ||||
|                 if let Some(redis_task_id) = map.get("taskId") { | ||||
|                     if redis_task_id != task_id { | ||||
|                         warn!("Task {}: Mismatch between requested task_id and taskId found in Redis hash ('{}'). Proceeding with requested task_id.", task_id, redis_task_id); | ||||
|                     } | ||||
|                 } else { | ||||
|                     warn!("Task {}: 'taskId' field missing from Redis hash.", task_id); | ||||
|                 } | ||||
|                 Ok(Some(details)) | ||||
|             } | ||||
|             None => Ok(None), | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     // use super::*; | ||||
|     // Basic tests can be added later, especially once examples are in place. | ||||
|     // For now, ensuring it compiles is the priority. | ||||
|     #[test] | ||||
|     fn it_compiles() { | ||||
|         assert_eq!(2 + 2, 4); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										38
									
								
								rhailib/_archive/engine/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								rhailib/_archive/engine/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| [package] | ||||
| name = "rhailib_engine" | ||||
| version = "0.1.0" | ||||
| edition = "2021" | ||||
| description = "Central Rhai engine for heromodels" | ||||
|  | ||||
| [dependencies] | ||||
| rhai = { version = "1.21.0", features = ["std", "sync", "decimal", "internals"] } | ||||
| heromodels = { path = "../../../db/heromodels", features = ["rhai"] } | ||||
| heromodels_core = { path = "../../../db/heromodels_core" } | ||||
| chrono = "0.4" | ||||
| heromodels-derive = { path = "../../../db/heromodels-derive" } | ||||
| rhailib_dsl = { path = "../dsl" } | ||||
|  | ||||
| [features] | ||||
| default = ["calendar", "finance"] | ||||
| calendar = [] | ||||
| finance = [] | ||||
| # Flow module is now updated to use our approach to Rhai engine registration | ||||
| flow = [] | ||||
| legal = [] | ||||
| projects = [] | ||||
| biz = [] | ||||
|  | ||||
| [[example]] | ||||
| name = "calendar_example" | ||||
| path = "examples/calendar/example.rs" | ||||
| required-features = ["calendar"] | ||||
|  | ||||
| [[example]] | ||||
| name = "flow_example" | ||||
| path = "examples/flow/example.rs" | ||||
| required-features = ["flow"] | ||||
|  | ||||
| [[example]] | ||||
| name = "finance" | ||||
| path = "examples/finance/example.rs" | ||||
| required-features = ["finance"] | ||||
							
								
								
									
										135
									
								
								rhailib/_archive/engine/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										135
									
								
								rhailib/_archive/engine/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,135 @@ | ||||
| # HeroModels Rhai Engine (`engine`) | ||||
|  | ||||
| The `engine` crate provides a central Rhai scripting engine for the HeroModels project. It offers a unified way to interact with various HeroModels modules (like Calendar, Flow, Legal, etc.) through Rhai scripts, leveraging a shared database connection. | ||||
|  | ||||
| ## Overview | ||||
|  | ||||
| This crate facilitates: | ||||
|  | ||||
| 1.  **Centralized Engine Creation**: A function `create_heromodels_engine` to instantiate a Rhai engine pre-configured with common settings and all enabled HeroModels modules. | ||||
| 2.  **Modular Registration**: HeroModels modules (Calendar, Flow, etc.) can be registered with a Rhai engine based on feature flags. | ||||
| 3.  **Script Evaluation Utilities**: Helper functions for compiling Rhai scripts into Abstract Syntax Trees (ASTs) and for evaluating scripts or ASTs. | ||||
| 4.  **Mock Database**: Includes a `mock_db` module for testing and running examples without needing a live database. | ||||
|  | ||||
| ## Core Components & Usage | ||||
|  | ||||
| ### Library (`src/lib.rs`) | ||||
|  | ||||
| -   **`create_heromodels_engine(db: Arc<OurDB>) -> Engine`**: | ||||
|     Creates and returns a new `rhai::Engine` instance. This engine is configured with default settings (e.g., max expression depths, string/array/map sizes) and then all available HeroModels modules (controlled by feature flags) are registered with it, using the provided `db` (an `Arc<OurDB>`) instance. | ||||
|  | ||||
| -   **`register_all_modules(engine: &mut Engine, db: Arc<OurDB>)`**: | ||||
|     Registers all HeroModels modules for which features are enabled (e.g., `calendar`, `flow`, `legal`, `projects`, `biz`) with the given Rhai `engine`. Each module is passed the shared `db` instance. | ||||
|  | ||||
| -   **`eval_script(engine: &Engine, script: &str) -> Result<rhai::Dynamic, Box<rhai::EvalAltResult>>`**: | ||||
|     A utility function to directly evaluate a Rhai script string using the provided `engine`. | ||||
|  | ||||
| -   **`compile_script(engine: &Engine, script: &str) -> Result<AST, Box<rhai::EvalAltResult>>`**: | ||||
|     Compiles a Rhai script string into an `AST` (Abstract Syntax Tree) for potentially faster repeated execution. | ||||
|  | ||||
| -   **`run_ast(engine: &Engine, ast: &AST, scope: &mut Scope) -> Result<rhai::Dynamic, Box<rhai::EvalAltResult>>`**: | ||||
|     Runs a pre-compiled `AST` with a given `scope` using the provided `engine`. | ||||
|  | ||||
| -   **`mock_db` module**: | ||||
|     Provides `create_mock_db()` which returns an `Arc<OurDB>` instance suitable for testing and examples. This allows scripts that interact with database functionalities to run without external database dependencies. | ||||
|  | ||||
| ### Basic Usage | ||||
|  | ||||
| ```rust | ||||
| use std::sync::Arc; | ||||
| use engine::{create_heromodels_engine, eval_script}; | ||||
| use engine::mock_db::create_mock_db; // For example usage | ||||
| use heromodels::db::hero::OurDB; // Actual DB type | ||||
|  | ||||
| // Create a mock database (or connect to a real one) | ||||
| let db: Arc<OurDB> = create_mock_db(); | ||||
|  | ||||
| // Create the Rhai engine with all enabled modules registered | ||||
| let engine = create_heromodels_engine(db); | ||||
|  | ||||
| // Run a Rhai script | ||||
| let script = r#" | ||||
|     // Example: Assuming 'calendar' feature is enabled | ||||
|     let cal = new_calendar("My Test Calendar"); | ||||
|     cal.set_description("This is a test."); | ||||
|     print(`Created calendar: ${cal.get_name()}`); | ||||
|     cal.get_id() // Return the ID | ||||
| "#; | ||||
|  | ||||
| match eval_script(&engine, script) { | ||||
|     Ok(val) => println!("Script returned: {:?}", val), | ||||
|     Err(err) => eprintln!("Script error: {}", err), | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ### Using Specific Modules Manually | ||||
|  | ||||
| If you need more fine-grained control or only want specific modules (and prefer not to rely solely on feature flags at compile time for `create_heromodels_engine`), you can initialize an engine and register modules manually: | ||||
|  | ||||
| ```rust | ||||
| use std::sync::Arc; | ||||
| use rhai::Engine; | ||||
| use engine::mock_db::create_mock_db; // For example usage | ||||
| use heromodels::db::hero::OurDB; | ||||
| // Import the specific module registration function | ||||
| use heromodels::models::calendar::register_calendar_rhai_module; | ||||
|  | ||||
|  | ||||
| // Create a mock database | ||||
| let db: Arc<OurDB> = create_mock_db(); | ||||
|  | ||||
| // Create a new Rhai engine | ||||
| let mut engine = Engine::new(); | ||||
|  | ||||
| // Register only the calendar module | ||||
| register_calendar_rhai_module(&mut engine, db.clone()); | ||||
|  | ||||
| // Now you can use calendar-related functions in your scripts | ||||
| let result = engine.eval::<String>(r#" let c = new_calendar("Solo Cal"); c.get_name() "#); | ||||
| match result { | ||||
|     Ok(name) => println!("Calendar name: {}", name), | ||||
|     Err(err) => eprintln!("Error: {}", err), | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ## Examples | ||||
|  | ||||
| This crate includes several examples demonstrating how to use different HeroModels modules with Rhai. Each example typically requires its corresponding feature to be enabled. | ||||
|  | ||||
| -   `calendar_example`: Working with calendars, events, and attendees (requires `calendar` feature). | ||||
| -   `flow_example`: Working with flows, steps, and signature requirements (requires `flow` feature). | ||||
| -   `finance_example`: Working with financial models (requires `finance` feature). | ||||
| -   *(Additional examples for `legal`, `projects`, `biz` would follow the same pattern if present).* | ||||
|  | ||||
| To run an example (e.g., `calendar_example`): | ||||
|  | ||||
| ```bash | ||||
| cargo run --example calendar_example --features calendar | ||||
| ``` | ||||
| *(Note: Examples in `Cargo.toml` already specify `required-features`, so simply `cargo run --example calendar_example` might suffice if those features are part of the default set or already enabled.)* | ||||
|  | ||||
| ## Features | ||||
|  | ||||
| The crate uses feature flags to control which HeroModels modules are compiled and registered: | ||||
|  | ||||
| -   `calendar`: Enables the Calendar module. | ||||
| -   `finance`: Enables the Finance module. | ||||
| -   `flow`: Enables the Flow module. | ||||
| -   `legal`: Enables the Legal module. | ||||
| -   `projects`: Enables the Projects module. | ||||
| -   `biz`: Enables the Business module. | ||||
|  | ||||
| The `default` features are `["calendar", "finance"]`. You can enable other modules by specifying them during the build or in your project's `Cargo.toml` if this `engine` crate is a dependency. | ||||
|  | ||||
| ## Dependencies | ||||
|  | ||||
| Key dependencies include: | ||||
| -   `rhai`: The Rhai scripting engine. | ||||
| -   `heromodels`: Provides the core data models and database interaction logic, including the Rhai registration functions for each module. | ||||
| -   `heromodels_core`: Core utilities for HeroModels. | ||||
| -   `chrono`: For date/time utilities. | ||||
| -   `heromodels-derive`: Procedural macros used by HeroModels. | ||||
|  | ||||
| ## License | ||||
|  | ||||
| This crate is part of the HeroModels project and shares its license. | ||||
							
								
								
									
										16
									
								
								rhailib/_archive/engine/build.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								rhailib/_archive/engine/build.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| fn main() { | ||||
|     // Tell Cargo to re-run this build script if the calendar/rhai.rs file changes | ||||
|     println!("cargo:rerun-if-changed=../heromodels/src/models/calendar/rhai.rs"); | ||||
|  | ||||
|     // Tell Cargo to re-run this build script if the flow/rhai.rs file changes | ||||
|     println!("cargo:rerun-if-changed=../heromodels/src/models/flow/rhai.rs"); | ||||
|  | ||||
|     // Tell Cargo to re-run this build script if the legal/rhai.rs file changes | ||||
|     println!("cargo:rerun-if-changed=../heromodels/src/models/legal/rhai.rs"); | ||||
|  | ||||
|     // Tell Cargo to re-run this build script if the projects/rhai.rs file changes | ||||
|     println!("cargo:rerun-if-changed=../heromodels/src/models/projects/rhai.rs"); | ||||
|  | ||||
|     // Tell Cargo to re-run this build script if the biz/rhai.rs file changes | ||||
|     println!("cargo:rerun-if-changed=../heromodels/src/models/biz/rhai.rs"); | ||||
| } | ||||
							
								
								
									
										331
									
								
								rhailib/_archive/engine/docs/ARCHITECTURE.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										331
									
								
								rhailib/_archive/engine/docs/ARCHITECTURE.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,331 @@ | ||||
| # Architecture of the `rhailib_engine` Crate | ||||
|  | ||||
| The `rhailib_engine` crate serves as the central Rhai scripting engine for the heromodels ecosystem. It provides a unified interface for creating, configuring, and executing Rhai scripts with access to all business domain modules through a feature-based architecture. | ||||
|  | ||||
| ## Core Architecture | ||||
|  | ||||
| The engine acts as an orchestration layer that brings together the DSL modules and provides execution utilities: | ||||
|  | ||||
| ```mermaid | ||||
| graph TD | ||||
|     A[rhailib_engine] --> B[Engine Creation] | ||||
|     A --> C[Script Execution] | ||||
|     A --> D[Mock Database] | ||||
|     A --> E[Feature Management] | ||||
|      | ||||
|     B --> B1[create_heromodels_engine] | ||||
|     B --> B2[Engine Configuration] | ||||
|     B --> B3[DSL Registration] | ||||
|      | ||||
|     C --> C1[eval_script] | ||||
|     C --> C2[eval_file] | ||||
|     C --> C3[compile_script] | ||||
|     C --> C4[run_ast] | ||||
|      | ||||
|     D --> D1[create_mock_db] | ||||
|     D --> D2[seed_mock_db] | ||||
|     D --> D3[Domain Data Seeding] | ||||
|      | ||||
|     E --> E1[calendar] | ||||
|     E --> E2[finance] | ||||
|     E --> E3[flow] | ||||
|     E --> E4[legal] | ||||
|     E --> E5[projects] | ||||
|     E --> E6[biz] | ||||
|      | ||||
|     B3 --> F[rhailib_dsl] | ||||
|     F --> G[All Domain Modules] | ||||
| ``` | ||||
|  | ||||
| ## Core Components | ||||
|  | ||||
| ### 1. Engine Factory (`create_heromodels_engine`) | ||||
|  | ||||
| The primary entry point for creating a fully configured Rhai engine: | ||||
|  | ||||
| ```rust | ||||
| pub fn create_heromodels_engine() -> Engine | ||||
| ``` | ||||
|  | ||||
| **Responsibilities:** | ||||
| - Creates a new Rhai engine instance | ||||
| - Configures engine limits and settings | ||||
| - Registers all available DSL modules | ||||
| - Returns a ready-to-use engine | ||||
|  | ||||
| **Configuration Settings:** | ||||
| - **Expression Depth**: 128 levels for both expressions and functions | ||||
| - **String Size Limit**: 10 MB maximum string size | ||||
| - **Array Size Limit**: 10,000 elements maximum | ||||
| - **Map Size Limit**: 10,000 key-value pairs maximum | ||||
|  | ||||
| ### 2. Script Execution Utilities | ||||
|  | ||||
| #### Direct Script Evaluation | ||||
| ```rust | ||||
| pub fn eval_script(engine: &Engine, script: &str) -> Result<Dynamic, Box<EvalAltResult>> | ||||
| ``` | ||||
| Executes Rhai script strings directly with immediate results. | ||||
|  | ||||
| #### File-Based Script Execution | ||||
| ```rust | ||||
| pub fn eval_file(engine: &Engine, file_path: &Path) -> Result<Dynamic, Box<EvalAltResult>> | ||||
| ``` | ||||
| Loads and executes Rhai scripts from filesystem with proper error handling. | ||||
|  | ||||
| #### Compiled Script Execution | ||||
| ```rust | ||||
| pub fn compile_script(engine: &Engine, script: &str) -> Result<AST, Box<EvalAltResult>> | ||||
| pub fn run_ast(engine: &Engine, ast: &AST, scope: &mut Scope) -> Result<Dynamic, Box<EvalAltResult>> | ||||
| ``` | ||||
| Provides compilation and execution of scripts for performance optimization. | ||||
|  | ||||
| ### 3. Mock Database System | ||||
|  | ||||
| #### Database Creation | ||||
| ```rust | ||||
| pub fn create_mock_db() -> Arc<OurDB> | ||||
| ``` | ||||
| Creates an in-memory database instance for testing and examples. | ||||
|  | ||||
| #### Data Seeding | ||||
| ```rust | ||||
| pub fn seed_mock_db(db: Arc<OurDB>) | ||||
| ``` | ||||
| Populates the mock database with representative data across all domains. | ||||
|  | ||||
| ## Feature-Based Architecture | ||||
|  | ||||
| The engine uses Cargo features to control which domain modules are included: | ||||
|  | ||||
| ### Available Features | ||||
|  | ||||
| - **`calendar`** (default): Calendar and event management | ||||
| - **`finance`** (default): Financial accounts, assets, and marketplace | ||||
| - **`flow`**: Workflow and approval processes | ||||
| - **`legal`**: Contract and legal document management | ||||
| - **`projects`**: Project and task management | ||||
| - **`biz`**: Business operations and entities | ||||
|  | ||||
| ### Feature Integration Pattern | ||||
|  | ||||
| ```rust | ||||
| #[cfg(feature = "calendar")] | ||||
| use heromodels::models::calendar::*; | ||||
|  | ||||
| #[cfg(feature = "finance")] | ||||
| use heromodels::models::finance::*; | ||||
| ``` | ||||
|  | ||||
| This allows for: | ||||
| - **Selective Compilation**: Only include needed functionality | ||||
| - **Reduced Binary Size**: Exclude unused domain modules | ||||
| - **Modular Deployment**: Different configurations for different use cases | ||||
|  | ||||
| ## Mock Database Architecture | ||||
|  | ||||
| ### Database Structure | ||||
|  | ||||
| The mock database provides a complete testing environment: | ||||
|  | ||||
| ```mermaid | ||||
| graph LR | ||||
|     A[Mock Database] --> B[Calendar Data] | ||||
|     A --> C[Finance Data] | ||||
|     A --> D[Flow Data] | ||||
|     A --> E[Legal Data] | ||||
|     A --> F[Projects Data] | ||||
|      | ||||
|     B --> B1[Calendars] | ||||
|     B --> B2[Events] | ||||
|     B --> B3[Attendees] | ||||
|      | ||||
|     C --> C1[Accounts] | ||||
|     C --> C2[Assets - ERC20/ERC721] | ||||
|     C --> C3[Marketplace Listings] | ||||
|      | ||||
|     D --> D1[Flows] | ||||
|     D --> D2[Flow Steps] | ||||
|     D --> D3[Signature Requirements] | ||||
|      | ||||
|     E --> E1[Contracts] | ||||
|     E --> E2[Contract Revisions] | ||||
|     E --> E3[Contract Signers] | ||||
|      | ||||
|     F --> F1[Projects] | ||||
|     F --> F2[Project Members] | ||||
|     F --> F3[Project Tags] | ||||
| ``` | ||||
|  | ||||
| ### Seeding Strategy | ||||
|  | ||||
| Each domain has its own seeding function that creates realistic test data: | ||||
|  | ||||
| #### Calendar Seeding | ||||
| - Creates work calendars with descriptions | ||||
| - Adds team meetings with attendees | ||||
| - Sets up recurring events | ||||
|  | ||||
| #### Finance Seeding | ||||
| - Creates demo trading accounts | ||||
| - Generates ERC20 tokens and ERC721 NFTs | ||||
| - Sets up marketplace listings with metadata | ||||
|  | ||||
| #### Flow Seeding (Feature-Gated) | ||||
| - Creates document approval workflows | ||||
| - Defines multi-step approval processes | ||||
| - Sets up signature requirements | ||||
|  | ||||
| #### Legal Seeding (Feature-Gated) | ||||
| - Creates service agreements | ||||
| - Adds contract revisions and versions | ||||
| - Defines contract signers and roles | ||||
|  | ||||
| #### Projects Seeding (Feature-Gated) | ||||
| - Creates project instances with status tracking | ||||
| - Assigns team members and priorities | ||||
| - Adds project tags and categorization | ||||
|  | ||||
| ## Error Handling Architecture | ||||
|  | ||||
| ### Comprehensive Error Propagation | ||||
|  | ||||
| ```rust | ||||
| Result<Dynamic, Box<EvalAltResult>> | ||||
| ``` | ||||
|  | ||||
| All functions return proper Rhai error types that include: | ||||
| - **Script Compilation Errors**: Syntax and parsing issues | ||||
| - **Runtime Errors**: Execution failures and exceptions | ||||
| - **File System Errors**: File reading and path resolution issues | ||||
| - **Database Errors**: Mock database operation failures | ||||
|  | ||||
| ### Error Context Enhancement | ||||
|  | ||||
| File operations include enhanced error context: | ||||
| ```rust | ||||
| Err(Box::new(EvalAltResult::ErrorSystem( | ||||
|     format!("Failed to read script file: {}", file_path.display()), | ||||
|     Box::new(io_err), | ||||
| ))) | ||||
| ``` | ||||
|  | ||||
| ## Performance Considerations | ||||
|  | ||||
| ### Engine Configuration | ||||
|  | ||||
| Optimized settings for production use: | ||||
| - **Memory Limits**: Prevent runaway script execution | ||||
| - **Depth Limits**: Avoid stack overflow from deep recursion | ||||
| - **Size Limits**: Control memory usage for large data structures | ||||
|  | ||||
| ### Compilation Strategy | ||||
|  | ||||
| - **AST Caching**: Compile once, execute multiple times | ||||
| - **Scope Management**: Efficient variable scope handling | ||||
| - **Module Registration**: One-time registration at engine creation | ||||
|  | ||||
| ### Mock Database Performance | ||||
|  | ||||
| - **In-Memory Storage**: Fast access for testing scenarios | ||||
| - **Temporary Directories**: Automatic cleanup after use | ||||
| - **Lazy Loading**: Data seeded only when needed | ||||
|  | ||||
| ## Integration Patterns | ||||
|  | ||||
| ### Script Development Workflow | ||||
|  | ||||
| ```rust | ||||
| // 1. Create engine with all modules | ||||
| let engine = create_heromodels_engine(); | ||||
|  | ||||
| // 2. Execute business logic scripts | ||||
| let result = eval_script(&engine, r#" | ||||
|     let company = new_company() | ||||
|         .name("Tech Startup") | ||||
|         .business_type("startup"); | ||||
|     save_company(company) | ||||
| "#)?; | ||||
|  | ||||
| // 3. Handle results and errors | ||||
| match result { | ||||
|     Ok(value) => println!("Success: {:?}", value), | ||||
|     Err(error) => eprintln!("Error: {}", error), | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ### Testing Integration | ||||
|  | ||||
| ```rust | ||||
| // 1. Create mock database | ||||
| let db = create_mock_db(); | ||||
| seed_mock_db(db.clone()); | ||||
|  | ||||
| // 2. Create engine | ||||
| let engine = create_heromodels_engine(); | ||||
|  | ||||
| // 3. Test scripts against seeded data | ||||
| let script = r#" | ||||
|     let calendars = list_calendars(); | ||||
|     calendars.len() | ||||
| "#; | ||||
| let count = eval_script(&engine, script)?; | ||||
| ``` | ||||
|  | ||||
| ### File-Based Script Execution | ||||
|  | ||||
| ```rust | ||||
| // Execute scripts from files | ||||
| let result = eval_file(&engine, Path::new("scripts/business_logic.rhai"))?; | ||||
| ``` | ||||
|  | ||||
| ## Deployment Configurations | ||||
|  | ||||
| ### Minimal Configuration | ||||
| ```toml | ||||
| [dependencies] | ||||
| rhailib_engine = { version = "0.1.0", default-features = false, features = ["calendar"] } | ||||
| ``` | ||||
|  | ||||
| ### Full Configuration | ||||
| ```toml | ||||
| [dependencies] | ||||
| rhailib_engine = { version = "0.1.0", features = ["calendar", "finance", "flow", "legal", "projects", "biz"] } | ||||
| ``` | ||||
|  | ||||
| ### Custom Configuration | ||||
| ```toml | ||||
| [dependencies] | ||||
| rhailib_engine = { version = "0.1.0", default-features = false, features = ["finance", "biz"] } | ||||
| ``` | ||||
|  | ||||
| ## Security Considerations | ||||
|  | ||||
| ### Script Execution Limits | ||||
| - **Resource Limits**: Prevent resource exhaustion attacks | ||||
| - **Execution Time**: Configurable timeouts for long-running scripts | ||||
| - **Memory Bounds**: Controlled memory allocation | ||||
|  | ||||
| ### Database Access | ||||
| - **Mock Environment**: Safe testing without production data exposure | ||||
| - **Temporary Storage**: Automatic cleanup prevents data persistence | ||||
| - **Isolated Execution**: Each test run gets fresh database state | ||||
|  | ||||
| ## Extensibility | ||||
|  | ||||
| ### Adding New Domains | ||||
| 1. Create new feature flag in `Cargo.toml` | ||||
| 2. Add conditional imports for new models | ||||
| 3. Implement seeding function for test data | ||||
| 4. Register with DSL module system | ||||
|  | ||||
| ### Custom Engine Configuration | ||||
| ```rust | ||||
| let mut engine = Engine::new(); | ||||
| // Custom configuration | ||||
| engine.set_max_expr_depths(256, 256); | ||||
| // Register specific modules | ||||
| rhailib_dsl::register_dsl_modules(&mut engine); | ||||
| ``` | ||||
|  | ||||
| This architecture provides a robust, feature-rich foundation for Rhai script execution while maintaining flexibility, performance, and security. | ||||
							
								
								
									
										101
									
								
								rhailib/_archive/engine/examples/calendar/calendar_script.rhai
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										101
									
								
								rhailib/_archive/engine/examples/calendar/calendar_script.rhai
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,101 @@ | ||||
| // calendar_script.rhai | ||||
| // Example Rhai script for working with Calendar models | ||||
|  | ||||
| // Constants for AttendanceStatus | ||||
| const NO_RESPONSE = "NoResponse"; | ||||
| const ACCEPTED = "Accepted"; | ||||
| const DECLINED = "Declined"; | ||||
| const TENTATIVE = "Tentative"; | ||||
|  | ||||
| // Create a new calendar using builder pattern | ||||
| let my_calendar = new_calendar() | ||||
|     .name("Team Calendar") | ||||
|     .description("Calendar for team events and meetings"); | ||||
|  | ||||
| print(`Created calendar: ${my_calendar.name} (${my_calendar.id})`); | ||||
|  | ||||
|  | ||||
| // Add attendees to the event | ||||
| let alice = new_attendee() | ||||
|     .with_contact_id(1) | ||||
|     .with_status(NO_RESPONSE); | ||||
| let bob = new_attendee() | ||||
|     .with_contact_id(2) | ||||
|     .with_status(ACCEPTED); | ||||
| let charlie = new_attendee() | ||||
|     .with_contact_id(3) | ||||
|     .with_status(TENTATIVE); | ||||
|  | ||||
|  | ||||
| // Create a new event using builder pattern | ||||
| // Note: Timestamps are in seconds since epoch | ||||
| let now = timestamp_now(); | ||||
| let one_hour = 60 * 60; | ||||
| let meeting = new_event() | ||||
|     .title("Weekly Sync") | ||||
|     .reschedule(now, now + one_hour) | ||||
|     .location("Conference Room A") | ||||
|     .description("Regular team sync meeting") | ||||
|     .add_attendee(alice) | ||||
|     .add_attendee(bob) | ||||
|     .add_attendee(charlie) | ||||
|     .save_event(); | ||||
|  | ||||
| print(`Created event: ${meeting.title}`); | ||||
|  | ||||
| meeting.delete_event(); | ||||
|  | ||||
| print(`Deleted event: ${meeting.title}`); | ||||
|  | ||||
| // Print attendees info | ||||
| let attendees = meeting.attendees; | ||||
| print(`Added attendees to the event`); | ||||
|  | ||||
| // Update Charlie's attendee status directly | ||||
| meeting.update_attendee_status(3, ACCEPTED); | ||||
| print(`Updated Charlie's status to: ${ACCEPTED}`); | ||||
|  | ||||
| // Add the event to the calendar | ||||
| my_calendar.add_event_to_calendar(meeting); | ||||
| // Print events info | ||||
| print(`Added event to calendar`); | ||||
|  | ||||
| // Save the calendar to the database | ||||
| let saved_calendar = my_calendar.save_calendar(); | ||||
| print(`Calendar saved to database with ID: ${saved_calendar.id}`); | ||||
|  | ||||
| // Retrieve the calendar from the database using the ID from the saved calendar | ||||
| let retrieved_calendar = get_calendar_by_id(saved_calendar.id); | ||||
| if retrieved_calendar != () { | ||||
|     print(`Retrieved calendar: ${retrieved_calendar.name}`); | ||||
|     print(`Retrieved calendar successfully`); | ||||
| } else { | ||||
|     print("Failed to retrieve calendar from database"); | ||||
| } | ||||
|  | ||||
| // List all calendars in the database | ||||
| let all_calendars = list_calendars(); | ||||
| print("\nListing all calendars in database:"); | ||||
| let calendar_count = 0; | ||||
| for calendar in all_calendars { | ||||
|     print(`  - Calendar: ${calendar.name} (ID: ${calendar.id})`); | ||||
|     calendar_count += 1; | ||||
| } | ||||
| print(`Total calendars: ${calendar_count}`); | ||||
|  | ||||
| // List all events in the database | ||||
| let all_events = list_events(); | ||||
| print("\nListing all events in database:"); | ||||
| let event_count = 0; | ||||
| for event in all_events { | ||||
|     print(`  - Event: ${event.title} (ID: ${event.id})`); | ||||
|     event_count += 1; | ||||
| } | ||||
| print(`Total events: ${event_count}`); | ||||
|  | ||||
| // Helper function to get current timestamp | ||||
| fn timestamp_now() { | ||||
|     // This would typically be provided by the host application | ||||
|     // For this example, we'll use a fixed timestamp | ||||
|     1685620800 // June 1, 2023, 12:00 PM | ||||
| } | ||||
							
								
								
									
										70
									
								
								rhailib/_archive/engine/examples/calendar/example.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								rhailib/_archive/engine/examples/calendar/example.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,70 @@ | ||||
| use engine::mock_db::create_mock_db; | ||||
| use engine::{create_heromodels_engine, eval_file}; | ||||
| use rhai::Engine; | ||||
|  | ||||
| mod mock; | ||||
| use mock::seed_calendar_data; | ||||
|  | ||||
| fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
|     println!("Calendar Rhai Example"); | ||||
|     println!("====================="); | ||||
|  | ||||
|     // Create a mock database | ||||
|     let db = create_mock_db(); | ||||
|  | ||||
|     // Seed the database with some initial data | ||||
|     seed_calendar_data(db.clone()); | ||||
|  | ||||
|     // Create the Rhai engine using our central engine creator | ||||
|     let mut engine = create_heromodels_engine(db.clone()); | ||||
|  | ||||
|     // Register timestamp helper functions | ||||
|     register_timestamp_helpers(&mut engine); | ||||
|  | ||||
|     // Get the path to the script | ||||
|     let manifest_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")); | ||||
|     let script_path = manifest_dir | ||||
|         .join("examples") | ||||
|         .join("calendar") | ||||
|         .join("calendar_script.rhai"); | ||||
|  | ||||
|     println!("\nRunning script: {}", script_path.display()); | ||||
|     println!("---------------------"); | ||||
|  | ||||
|     // Run the script | ||||
|     match eval_file(&engine, &script_path) { | ||||
|         Ok(result) => { | ||||
|             if !result.is_unit() { | ||||
|                 println!("\nScript returned: {:?}", result); | ||||
|             } | ||||
|             println!("\nScript executed successfully!"); | ||||
|             Ok(()) | ||||
|         } | ||||
|         Err(err) => { | ||||
|             eprintln!("\nError running script: {}", err); | ||||
|             Err(Box::new(std::io::Error::new( | ||||
|                 std::io::ErrorKind::Other, | ||||
|                 err.to_string(), | ||||
|             ))) | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| // Register timestamp helper functions with the engine | ||||
| fn register_timestamp_helpers(engine: &mut Engine) { | ||||
|     use chrono::{TimeZone, Utc}; | ||||
|  | ||||
|     // Function to get current timestamp | ||||
|     engine.register_fn("timestamp_now", || Utc::now().timestamp() as i64); | ||||
|  | ||||
|     // Function to format a timestamp | ||||
|     engine.register_fn("format_timestamp", |ts: i64| { | ||||
|         let dt = Utc | ||||
|             .timestamp_opt(ts, 0) | ||||
|             .single() | ||||
|             .expect("Invalid timestamp"); | ||||
|         dt.format("%Y-%m-%d %H:%M:%S UTC").to_string() | ||||
|     }); | ||||
|  | ||||
|     println!("Timestamp helper functions registered successfully."); | ||||
| } | ||||
							
								
								
									
										60
									
								
								rhailib/_archive/engine/examples/calendar/mock.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								rhailib/_archive/engine/examples/calendar/mock.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,60 @@ | ||||
| use chrono::Utc; | ||||
| use heromodels::db::hero::OurDB; | ||||
| use heromodels::db::{Collection, Db}; | ||||
| use heromodels::models::calendar::{Calendar, Event}; | ||||
| use heromodels_core::Model; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| /// Seed the mock database with calendar data | ||||
| pub fn seed_calendar_data(db: Arc<OurDB>) { | ||||
|     // Create a calendar | ||||
|     let calendar = Calendar::new(None, "Work Calendar".to_string()) | ||||
|         .description("My work schedule".to_string()); | ||||
|  | ||||
|     // Store the calendar in the database | ||||
|     let (calendar_id, mut saved_calendar) = db | ||||
|         .collection::<Calendar>() | ||||
|         .expect("Failed to get Calendar collection") | ||||
|         .set(&calendar) | ||||
|         .expect("Failed to store calendar"); | ||||
|  | ||||
|     // Create an event | ||||
|     let now = Utc::now().timestamp(); | ||||
|     let end_time = now + 3600; // Add 1 hour in seconds | ||||
|  | ||||
|     let event = Event::new() | ||||
|         .title("Team Meeting".to_string()) | ||||
|         .reschedule(now, end_time) | ||||
|         .location("Conference Room A".to_string()) | ||||
|         .description("Weekly sync".to_string()) | ||||
|         .build(); | ||||
|  | ||||
|     // Store the event in the database first to get its ID | ||||
|     let (event_id, saved_event) = db | ||||
|         .collection() | ||||
|         .expect("Failed to get Event collection") | ||||
|         .set(&event) | ||||
|         .expect("Failed to store event"); | ||||
|  | ||||
|     // Add the event ID to the calendar | ||||
|     saved_calendar = saved_calendar.add_event(event_id as i64); | ||||
|  | ||||
|     // Store the updated calendar in the database | ||||
|     let (_calendar_id, final_calendar) = db | ||||
|         .collection::<Calendar>() | ||||
|         .expect("Failed to get Calendar collection") | ||||
|         .set(&saved_calendar) | ||||
|         .expect("Failed to store calendar"); | ||||
|  | ||||
|     println!("Mock database seeded with calendar data:"); | ||||
|     println!( | ||||
|         "  - Added calendar: {} (ID: {})", | ||||
|         final_calendar.name, | ||||
|         final_calendar.get_id() | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added event: {} (ID: {})", | ||||
|         saved_event.title, | ||||
|         saved_event.get_id() | ||||
|     ); | ||||
| } | ||||
							
								
								
									
										70
									
								
								rhailib/_archive/engine/examples/finance/example.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								rhailib/_archive/engine/examples/finance/example.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,70 @@ | ||||
| use engine::mock_db::create_mock_db; | ||||
| use engine::{create_heromodels_engine, eval_file}; | ||||
| use rhai::Engine; | ||||
| use std::path::Path; | ||||
|  | ||||
| mod mock; | ||||
| use mock::seed_finance_data; | ||||
|  | ||||
| fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
|     println!("Finance Rhai Example"); | ||||
|     println!("==================="); | ||||
|  | ||||
|     // Create a mock database | ||||
|     let db = create_mock_db(); | ||||
|  | ||||
|     // Seed the database with some initial data | ||||
|     seed_finance_data(db.clone()); | ||||
|  | ||||
|     // Create the Rhai engine using our central engine creator | ||||
|     let mut engine = create_heromodels_engine(db.clone()); | ||||
|  | ||||
|     // Register timestamp helper functions | ||||
|     register_timestamp_helpers(&mut engine); | ||||
|  | ||||
|     // Get the path to the script | ||||
|     let script_path = Path::new(file!()) | ||||
|         .parent() | ||||
|         .unwrap() | ||||
|         .join("finance_script.rhai"); | ||||
|  | ||||
|     println!("\nRunning script: {}", script_path.display()); | ||||
|     println!("---------------------"); | ||||
|  | ||||
|     // Run the script | ||||
|     match eval_file(&engine, &script_path) { | ||||
|         Ok(result) => { | ||||
|             if !result.is_unit() { | ||||
|                 println!("\nScript returned: {:?}", result); | ||||
|             } | ||||
|             println!("\nScript executed successfully!"); | ||||
|             Ok(()) | ||||
|         } | ||||
|         Err(err) => { | ||||
|             eprintln!("\nError running script: {}", err); | ||||
|             Err(Box::new(std::io::Error::new( | ||||
|                 std::io::ErrorKind::Other, | ||||
|                 err.to_string(), | ||||
|             ))) | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| // Register timestamp helper functions with the engine | ||||
| fn register_timestamp_helpers(engine: &mut Engine) { | ||||
|     use chrono::{TimeZone, Utc}; | ||||
|  | ||||
|     // Function to get current timestamp | ||||
|     engine.register_fn("timestamp_now", || Utc::now().timestamp() as i64); | ||||
|  | ||||
|     // Function to format a timestamp | ||||
|     engine.register_fn("format_timestamp", |ts: i64| { | ||||
|         let dt = Utc | ||||
|             .timestamp_opt(ts, 0) | ||||
|             .single() | ||||
|             .expect("Invalid timestamp"); | ||||
|         dt.format("%Y-%m-%d %H:%M:%S UTC").to_string() | ||||
|     }); | ||||
|  | ||||
|     println!("Timestamp helper functions registered successfully."); | ||||
| } | ||||
							
								
								
									
										202
									
								
								rhailib/_archive/engine/examples/finance/finance_script.rhai
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										202
									
								
								rhailib/_archive/engine/examples/finance/finance_script.rhai
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,202 @@ | ||||
| // finance_script.rhai | ||||
| // Example Rhai script for working with Finance models | ||||
|  | ||||
| // Constants for AssetType | ||||
| const NATIVE = "Native"; | ||||
| const ERC20 = "Erc20"; | ||||
| const ERC721 = "Erc721"; | ||||
| const ERC1155 = "Erc1155"; | ||||
|  | ||||
| // Constants for ListingStatus | ||||
| const ACTIVE = "Active"; | ||||
| const SOLD = "Sold"; | ||||
| const CANCELLED = "Cancelled"; | ||||
| const EXPIRED = "Expired"; | ||||
|  | ||||
| // Constants for ListingType | ||||
| const FIXED_PRICE = "FixedPrice"; | ||||
| const AUCTION = "Auction"; | ||||
| const EXCHANGE = "Exchange"; | ||||
|  | ||||
| // Constants for BidStatus | ||||
| const BID_ACTIVE = "Active"; | ||||
| const BID_ACCEPTED = "Accepted"; | ||||
| const BID_REJECTED = "Rejected"; | ||||
| const BID_CANCELLED = "Cancelled"; | ||||
|  | ||||
| // Create a new account using builder pattern | ||||
| let alice_account = new_account() | ||||
|     .name("Alice's Account") | ||||
|     .user_id(101) | ||||
|     .description("Alice's primary trading account") | ||||
|     .ledger("ethereum") | ||||
|     .address("0x1234567890abcdef1234567890abcdef12345678") | ||||
|     .pubkey("0xabcdef1234567890abcdef1234567890abcdef12"); | ||||
|  | ||||
| print(`Created account: ${alice_account.get_name()} (User ID: ${alice_account.get_user_id()})`); | ||||
|  | ||||
| // Save the account to the database | ||||
| let saved_alice = set_account(alice_account); | ||||
| print(`Account saved to database with ID: ${saved_alice.get_id()}`); | ||||
|  | ||||
| // Create a new asset using builder pattern | ||||
| let token_asset = new_asset() | ||||
|     .name("HERO Token") | ||||
|     .description("Herocode governance token") | ||||
|     .amount(1000.0) | ||||
|     .address("0x9876543210abcdef9876543210abcdef98765432") | ||||
|     .asset_type(ERC20) | ||||
|     .decimals(18); | ||||
|  | ||||
| print(`Created asset: ${token_asset.get_name()} (${token_asset.get_amount()} ${token_asset.get_asset_type()})`); | ||||
|  | ||||
| // Save the asset to the database | ||||
| let saved_token = set_asset(token_asset); | ||||
| print(`Asset saved to database with ID: ${saved_token.get_id()}`); | ||||
|  | ||||
| // Add the asset to Alice's account | ||||
| saved_alice = saved_alice.add_asset(saved_token.get_id()); | ||||
| saved_alice = set_account(saved_alice); | ||||
| print(`Added asset ${saved_token.get_name()} to ${saved_alice.get_name()}`); | ||||
|  | ||||
| // Create a new NFT asset | ||||
| let nft_asset = new_asset() | ||||
|     .name("Herocode #42") | ||||
|     .description("Unique digital collectible") | ||||
|     .amount(1.0) | ||||
|     .address("0xabcdef1234567890abcdef1234567890abcdef12") | ||||
|     .asset_type(ERC721) | ||||
|     .decimals(0); | ||||
|  | ||||
| // Save the NFT to the database | ||||
| let saved_nft = set_asset(nft_asset); | ||||
| print(`NFT saved to database with ID: ${saved_nft.get_id()}`); | ||||
|  | ||||
| // Create Bob's account | ||||
| let bob_account = new_account() | ||||
|     .name("Bob's Account") | ||||
|     .user_id(102) | ||||
|     .description("Bob's trading account") | ||||
|     .ledger("ethereum") | ||||
|     .address("0xfedcba0987654321fedcba0987654321fedcba09") | ||||
|     .pubkey("0x654321fedcba0987654321fedcba0987654321fe"); | ||||
|  | ||||
| // Save Bob's account | ||||
| let saved_bob = set_account(bob_account); | ||||
| print(`Created and saved Bob's account with ID: ${saved_bob.get_id()}`); | ||||
|  | ||||
| // Create a listing for the NFT | ||||
| let nft_listing = new_listing() | ||||
|     .seller_id(saved_alice.get_id()) | ||||
|     .asset_id(saved_nft.get_id()) | ||||
|     .price(0.5) | ||||
|     .currency("ETH") | ||||
|     .listing_type(AUCTION) | ||||
|     .title("Rare Herocode NFT") | ||||
|     .description("One of a kind digital collectible") | ||||
|     .image_url("https://example.com/nft/42.png") | ||||
|     .expires_at(timestamp_now() + 86400) // 24 hours from now | ||||
|     .add_tag("rare") | ||||
|     .add_tag("collectible") | ||||
|     .add_tag("digital art") | ||||
|     .set_listing(); | ||||
|  | ||||
| // Save the listing | ||||
| print(`Created listing: ${nft_listing.get_title()} (ID: ${nft_listing.get_id()})`); | ||||
| print(`Listing status: ${nft_listing.get_status()}, Type: ${nft_listing.get_listing_type()}`); | ||||
| print(`Listing price: ${nft_listing.get_price()} ${nft_listing.get_currency()}`); | ||||
|  | ||||
| // Create a bid from Bob | ||||
| let bob_bid = new_bid() | ||||
|     .listing_id(nft_listing.get_id().to_string()) | ||||
|     .bidder_id(saved_bob.get_id()) | ||||
|     .amount(1.5) | ||||
|     .currency("ETH") | ||||
|     .set_bid(); | ||||
|  | ||||
| // Save the bid | ||||
| print(`Created bid from ${saved_bob.get_name()} for ${bob_bid.get_amount()} ${bob_bid.get_currency()}`); | ||||
|  | ||||
| // Add the bid to the listing | ||||
| nft_listing.add_bid(bob_bid); | ||||
| nft_listing.set_listing(); | ||||
| print(`Added bid to listing ${nft_listing.get_title()}`); | ||||
|  | ||||
| // Create another bid with higher amount | ||||
| let charlie_account = new_account() | ||||
|     .name("Charlie's Account") | ||||
|     .user_id(103) | ||||
|     .description("Charlie's trading account") | ||||
|     .ledger("ethereum") | ||||
|     .address("0x1122334455667788991122334455667788990011") | ||||
|     .pubkey("0x8877665544332211887766554433221188776655"); | ||||
|  | ||||
| let saved_charlie = set_account(charlie_account); | ||||
| print(`Created and saved Charlie's account with ID: ${saved_charlie.get_id()}`); | ||||
|  | ||||
| let charlie_bid = new_bid() | ||||
|     .listing_id(nft_listing.get_id().to_string()) | ||||
|     .bidder_id(saved_charlie.get_id()) | ||||
|     .amount(2.5) | ||||
|     .currency("ETH") | ||||
|     .set_bid(); | ||||
|  | ||||
| print(`Created higher bid from ${saved_charlie.get_name()} for ${charlie_bid.get_amount()} ${charlie_bid.get_currency()}`); | ||||
|  | ||||
| // Add the higher bid to the listing | ||||
| nft_listing.add_bid(charlie_bid) | ||||
|     .set_listing(); | ||||
|  | ||||
|  | ||||
|  | ||||
| print(`Added higher bid to listing ${nft_listing.get_title()}`); | ||||
|  | ||||
| nft_listing.sale_price(2.5) | ||||
|     .set_listing(); | ||||
|  | ||||
| // Complete the sale to the highest bidder (Charlie) | ||||
| nft_listing.complete_sale(saved_charlie.get_id()) | ||||
|     .set_listing(); | ||||
|  | ||||
| print(`Completed sale of ${nft_listing.get_title()} to ${saved_charlie.get_name()}`); | ||||
| print(`New listing status: ${saved_listing.get_status()}`); | ||||
|  | ||||
| // Retrieve the listing from the database | ||||
| let retrieved_listing = get_listing_by_id(saved_listing.get_id()); | ||||
| print(`Retrieved listing: ${retrieved_listing.get_title()} (Status: ${retrieved_listing.get_status()})`); | ||||
|  | ||||
| // Create a fixed price listing | ||||
| let token_listing = new_listing() | ||||
|     .seller_id(saved_alice.get_id()) | ||||
|     .asset_id(saved_token.get_id()) | ||||
|     .price(100.0) | ||||
|     .currency("USDC") | ||||
|     .listing_type(FIXED_PRICE) | ||||
|     .title("HERO Tokens for Sale") | ||||
|     .description("100 HERO tokens at fixed price") | ||||
|     .set_listing(); | ||||
|  | ||||
| // Save the fixed price listing | ||||
| print(`Created fixed price listing: ${token_listing.get_title()} (ID: ${token_listing.get_id()})`); | ||||
|  | ||||
| // Cancel the listing | ||||
| token_listing.cancel(); | ||||
| token_listing.set_listing(); | ||||
| print(`Cancelled listing: ${token_listing.get_title()}`); | ||||
| print(`Listing status: ${token_listing.get_status()}`); | ||||
|  | ||||
| // Print summary of all accounts | ||||
| print("\nAccount Summary:"); | ||||
| print(`Alice (ID: ${saved_alice.get_id()}): ${saved_alice.get_assets().len()} assets`); | ||||
| print(`Bob (ID: ${saved_bob.get_id()}): ${saved_bob.get_assets().len()} assets`); | ||||
| print(`Charlie (ID: ${saved_charlie.get_id()}): ${saved_charlie.get_assets().len()} assets`); | ||||
|  | ||||
| // Print summary of all listings | ||||
| print("\nListing Summary:"); | ||||
| print(`NFT Auction (ID: ${nft_listing.get_id()}): ${nft_listing.get_status()}`); | ||||
| print(`Token Sale (ID: ${token_listing.get_id()}): ${token_listing.get_status()}`); | ||||
|  | ||||
| // Print summary of all bids | ||||
| print("\nBid Summary:"); | ||||
| print(`Bob's bid: ${bob_bid.get_amount()} ${bob_bid.get_currency()} (Status: ${bob_bid.get_status()})`); | ||||
| print(`Charlie's bid: ${charlie_bid.get_amount()} ${charlie_bid.get_currency()} (Status: ${charlie_bid.get_status()})`); | ||||
							
								
								
									
										111
									
								
								rhailib/_archive/engine/examples/finance/mock.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								rhailib/_archive/engine/examples/finance/mock.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,111 @@ | ||||
| use heromodels::db::hero::OurDB; | ||||
| use heromodels::db::{Collection, Db}; | ||||
| use heromodels::models::finance::account::Account; | ||||
| use heromodels::models::finance::asset::{Asset, AssetType}; | ||||
| use heromodels::models::finance::marketplace::{Listing, ListingType}; | ||||
| use heromodels_core::Model; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| /// Seed the mock database with finance data | ||||
| pub fn seed_finance_data(db: Arc<OurDB>) { | ||||
|     // Create a user account | ||||
|     let account = Account::new() | ||||
|         .name("Demo Account") | ||||
|         .user_id(1) | ||||
|         .description("Demo trading account") | ||||
|         .ledger("ethereum") | ||||
|         .address("0x1234567890abcdef1234567890abcdef12345678") | ||||
|         .pubkey("0xabcdef1234567890abcdef1234567890abcdef12"); | ||||
|  | ||||
|     // Store the account in the database | ||||
|     let (account_id, mut updated_account) = db | ||||
|         .collection::<Account>() | ||||
|         .expect("Failed to get Account collection") | ||||
|         .set(&account) | ||||
|         .expect("Failed to store account"); | ||||
|  | ||||
|     // Create an ERC20 token asset | ||||
|     let token_asset = Asset::new() | ||||
|         .name("HERO Token") | ||||
|         .description("Herocode governance token") | ||||
|         .amount(1000.0) | ||||
|         .address("0x9876543210abcdef9876543210abcdef98765432") | ||||
|         .asset_type(AssetType::Erc20) | ||||
|         .decimals(18); | ||||
|  | ||||
|     // Store the token asset in the database | ||||
|     let (token_id, updated_token) = db | ||||
|         .collection::<Asset>() | ||||
|         .expect("Failed to get Asset collection") | ||||
|         .set(&token_asset) | ||||
|         .expect("Failed to store token asset"); | ||||
|  | ||||
|     // Create an NFT asset | ||||
|     let nft_asset = Asset::new() | ||||
|         .name("Herocode #1") | ||||
|         .description("Unique digital collectible") | ||||
|         .amount(1.0) | ||||
|         .address("0xabcdef1234567890abcdef1234567890abcdef12") | ||||
|         .asset_type(AssetType::Erc721) | ||||
|         .decimals(0); | ||||
|  | ||||
|     // Store the NFT asset in the database | ||||
|     let (nft_id, updated_nft) = db | ||||
|         .collection::<Asset>() | ||||
|         .expect("Failed to get Asset collection") | ||||
|         .set(&nft_asset) | ||||
|         .expect("Failed to store NFT asset"); | ||||
|  | ||||
|     // Add assets to the account | ||||
|     updated_account = updated_account.add_asset(token_id); | ||||
|     updated_account = updated_account.add_asset(nft_id); | ||||
|  | ||||
|     // Update the account in the database | ||||
|     let (_, final_account) = db | ||||
|         .collection::<Account>() | ||||
|         .expect("Failed to get Account collection") | ||||
|         .set(&updated_account) | ||||
|         .expect("Failed to store updated account"); | ||||
|  | ||||
|     // Create a listing for the NFT | ||||
|     let listing = Listing::new() | ||||
|         .seller_id(account_id) | ||||
|         .asset_id(nft_id) | ||||
|         .price(0.5) | ||||
|         .currency("ETH") | ||||
|         .listing_type(ListingType::Auction) | ||||
|         .title("Rare Herocode NFT".to_string()) | ||||
|         .description("One of a kind digital collectible".to_string()) | ||||
|         .image_url(Some("https://example.com/nft/1.png".to_string())) | ||||
|         .add_tag("rare".to_string()) | ||||
|         .add_tag("collectible".to_string()); | ||||
|  | ||||
|     // Store the listing in the database | ||||
|     let (_listing_id, updated_listing) = db | ||||
|         .collection::<Listing>() | ||||
|         .expect("Failed to get Listing collection") | ||||
|         .set(&listing) | ||||
|         .expect("Failed to store listing"); | ||||
|  | ||||
|     println!("Mock database seeded with finance data:"); | ||||
|     println!( | ||||
|         "  - Added account: {} (ID: {})", | ||||
|         final_account.name, | ||||
|         final_account.get_id() | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added token asset: {} (ID: {})", | ||||
|         updated_token.name, | ||||
|         updated_token.get_id() | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added NFT asset: {} (ID: {})", | ||||
|         updated_nft.name, | ||||
|         updated_nft.get_id() | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added listing: {} (ID: {})", | ||||
|         updated_listing.title, | ||||
|         updated_listing.get_id() | ||||
|     ); | ||||
| } | ||||
							
								
								
									
										162
									
								
								rhailib/_archive/engine/examples/flow/example.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										162
									
								
								rhailib/_archive/engine/examples/flow/example.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,162 @@ | ||||
| use engine::mock_db::create_mock_db; | ||||
| use engine::{create_heromodels_engine, eval_file}; | ||||
| use heromodels::models::flow::{Flow, FlowStep, SignatureRequirement}; | ||||
| use heromodels_core::Model; | ||||
| use rhai::Scope; | ||||
| use std::path::Path; | ||||
|  | ||||
| mod mock; | ||||
| use mock::seed_flow_data; | ||||
|  | ||||
| fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
|     println!("Flow Rhai Example"); | ||||
|     println!("================="); | ||||
|  | ||||
|     // Create a mock database | ||||
|     let db = create_mock_db(); | ||||
|  | ||||
|     // Seed the database with initial data | ||||
|     seed_flow_data(db.clone()); | ||||
|  | ||||
|     // Create the Rhai engine with all modules registered | ||||
|     let engine = create_heromodels_engine(db.clone()); | ||||
|  | ||||
|     // Get the path to the script | ||||
|     let script_path = Path::new(file!()) | ||||
|         .parent() | ||||
|         .unwrap() | ||||
|         .join("flow_script.rhai"); | ||||
|  | ||||
|     println!("\nRunning script: {}", script_path.display()); | ||||
|     println!("---------------------"); | ||||
|  | ||||
|     // Run the script | ||||
|     match eval_file(&engine, &script_path.to_string_lossy()) { | ||||
|         Ok(result) => { | ||||
|             if !result.is_unit() { | ||||
|                 println!("\nScript returned: {:?}", result); | ||||
|             } | ||||
|             println!("\nScript executed successfully!"); | ||||
|         } | ||||
|         Err(err) => { | ||||
|             eprintln!("\nError running script: {}", err); | ||||
|             return Err(Box::new(std::io::Error::new( | ||||
|                 std::io::ErrorKind::Other, | ||||
|                 err.to_string(), | ||||
|             ))); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Demonstrate direct Rust interaction with the Rhai-exposed flow functionality | ||||
|     println!("\nDirect Rust interaction with Rhai-exposed flow functionality"); | ||||
|     println!("----------------------------------------------------------"); | ||||
|  | ||||
|     // Create a new scope | ||||
|     let mut scope = Scope::new(); | ||||
|  | ||||
|     // Create a new flow using the Rhai function | ||||
|     let result = engine.eval::<Flow>("new_flow(0, \"Direct Rust Flow\")"); | ||||
|     match result { | ||||
|         Ok(mut flow) => { | ||||
|             println!( | ||||
|                 "Created flow from Rust: {} (ID: {})", | ||||
|                 flow.name, | ||||
|                 flow.get_id() | ||||
|             ); | ||||
|  | ||||
|             // Set flow status using the builder pattern | ||||
|             flow = flow.status("active".to_string()); | ||||
|             println!("Set flow status to: {}", flow.status); | ||||
|  | ||||
|             // Create a new flow step using the Rhai function | ||||
|             let result = engine.eval::<FlowStep>("new_flow_step(0, 1)"); | ||||
|  | ||||
|             match result { | ||||
|                 Ok(mut step) => { | ||||
|                     println!( | ||||
|                         "Created flow step from Rust: Step Order {} (ID: {})", | ||||
|                         step.step_order, | ||||
|                         step.get_id() | ||||
|                     ); | ||||
|  | ||||
|                     // Set step description | ||||
|                     step = step.description("Direct Rust Step".to_string()); | ||||
|                     println!( | ||||
|                         "Set step description to: {}", | ||||
|                         step.description | ||||
|                             .clone() | ||||
|                             .unwrap_or_else(|| "None".to_string()) | ||||
|                     ); | ||||
|  | ||||
|                     // Create a signature requirement using the Rhai function | ||||
|                     let result = engine.eval::<SignatureRequirement>( | ||||
|                         "new_signature_requirement(0, 1, \"Direct Rust Signer\", \"Please sign this document\")" | ||||
|                     ); | ||||
|  | ||||
|                     match result { | ||||
|                         Ok(req) => { | ||||
|                             println!( | ||||
|                                 "Created signature requirement from Rust: Public Key {} (ID: {})", | ||||
|                                 req.public_key, | ||||
|                                 req.get_id() | ||||
|                             ); | ||||
|  | ||||
|                             // Add the step to the flow using the builder pattern | ||||
|                             flow = flow.add_step(step); | ||||
|                             println!( | ||||
|                                 "Added step to flow. Flow now has {} steps", | ||||
|                                 flow.steps.len() | ||||
|                             ); | ||||
|  | ||||
|                             // Save the flow to the database using the Rhai function | ||||
|                             let save_flow_script = "fn save_it(f) { return db::save_flow(f); }"; | ||||
|                             let save_flow_ast = engine.compile(save_flow_script).unwrap(); | ||||
|                             let result = engine.call_fn::<Flow>( | ||||
|                                 &mut scope, | ||||
|                                 &save_flow_ast, | ||||
|                                 "save_it", | ||||
|                                 (flow,), | ||||
|                             ); | ||||
|                             match result { | ||||
|                                 Ok(saved_flow) => { | ||||
|                                     println!( | ||||
|                                         "Saved flow to database with ID: {}", | ||||
|                                         saved_flow.get_id() | ||||
|                                     ); | ||||
|                                 } | ||||
|                                 Err(err) => eprintln!("Error saving flow: {}", err), | ||||
|                             } | ||||
|  | ||||
|                             // Save the signature requirement to the database using the Rhai function | ||||
|                             let save_req_script = | ||||
|                                 "fn save_it(r) { return db::save_signature_requirement(r); }"; | ||||
|                             let save_req_ast = engine.compile(save_req_script).unwrap(); | ||||
|                             let result = engine.call_fn::<SignatureRequirement>( | ||||
|                                 &mut scope, | ||||
|                                 &save_req_ast, | ||||
|                                 "save_it", | ||||
|                                 (req,), | ||||
|                             ); | ||||
|                             match result { | ||||
|                                 Ok(saved_req) => { | ||||
|                                     println!( | ||||
|                                         "Saved signature requirement to database with ID: {}", | ||||
|                                         saved_req.get_id() | ||||
|                                     ); | ||||
|                                 } | ||||
|                                 Err(err) => { | ||||
|                                     eprintln!("Error saving signature requirement: {}", err) | ||||
|                                 } | ||||
|                             } | ||||
|                         } | ||||
|                         Err(err) => eprintln!("Error creating signature requirement: {}", err), | ||||
|                     } | ||||
|                 } | ||||
|                 Err(err) => eprintln!("Error creating flow step: {}", err), | ||||
|             } | ||||
|         } | ||||
|         Err(err) => eprintln!("Error creating flow: {}", err), | ||||
|     } | ||||
|  | ||||
|     Ok(()) | ||||
| } | ||||
							
								
								
									
										111
									
								
								rhailib/_archive/engine/examples/flow/flow_script.rhai
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								rhailib/_archive/engine/examples/flow/flow_script.rhai
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,111 @@ | ||||
| // flow_script.rhai | ||||
| // Example Rhai script for working with Flow models | ||||
|  | ||||
| // Constants for Flow status | ||||
| const STATUS_DRAFT = "draft"; | ||||
| const STATUS_ACTIVE = "active"; | ||||
| const STATUS_COMPLETED = "completed"; | ||||
| const STATUS_CANCELLED = "cancelled"; | ||||
|  | ||||
| // Create a new flow using builder pattern | ||||
| let my_flow = new_flow(0, "flow-123"); | ||||
| name(my_flow, "Document Approval Flow"); | ||||
| status(my_flow, STATUS_DRAFT); | ||||
|  | ||||
| print(`Created flow: ${get_flow_name(my_flow)} (ID: ${get_flow_id(my_flow)})`); | ||||
| print(`Status: ${get_flow_status(my_flow)}`); | ||||
|  | ||||
| // Create flow steps using builder pattern | ||||
| let step1 = new_flow_step(0, 1); | ||||
| description(step1, "Initial review by legal team"); | ||||
| status(step1, STATUS_DRAFT); | ||||
|  | ||||
| let step2 = new_flow_step(0, 2); | ||||
| description(step2, "Approval by department head"); | ||||
| status(step2, STATUS_DRAFT); | ||||
|  | ||||
| let step3 = new_flow_step(0, 3); | ||||
| description(step3, "Final signature by CEO"); | ||||
| status(step3, STATUS_DRAFT); | ||||
|  | ||||
| // Create signature requirements using builder pattern | ||||
| let req1 = new_signature_requirement(0, get_flow_step_id(step1), "legal@example.com", "Please review this document"); | ||||
| signed_by(req1, "Legal Team"); | ||||
| status(req1, STATUS_DRAFT); | ||||
|  | ||||
| let req2 = new_signature_requirement(0, get_flow_step_id(step2), "dept@example.com", "Department approval needed"); | ||||
| signed_by(req2, "Department Head"); | ||||
| status(req2, STATUS_DRAFT); | ||||
|  | ||||
| let req3 = new_signature_requirement(0, get_flow_step_id(step3), "ceo@example.com", "Final approval required"); | ||||
| signed_by(req3, "CEO"); | ||||
| status(req3, STATUS_DRAFT); | ||||
|  | ||||
| print(`Created flow steps with signature requirements`); | ||||
|  | ||||
| // Add steps to the flow | ||||
| let flow_with_steps = my_flow; | ||||
| add_step(flow_with_steps, step1); | ||||
| add_step(flow_with_steps, step2); | ||||
| add_step(flow_with_steps, step3); | ||||
|  | ||||
| print(`Added steps to flow. Flow now has ${get_flow_steps(flow_with_steps).len()} steps`); | ||||
|  | ||||
| // Activate the flow | ||||
| let active_flow = flow_with_steps; | ||||
| status(active_flow, STATUS_ACTIVE); | ||||
| print(`Updated flow status to: ${get_flow_status(active_flow)}`); | ||||
|  | ||||
| // Save the flow to the database | ||||
| let saved_flow = db::save_flow(active_flow); | ||||
| print(`Flow saved to database with ID: ${get_flow_id(saved_flow)}`); | ||||
|  | ||||
| // Save signature requirements to the database | ||||
| let saved_req1 = db::save_signature_requirement(req1); | ||||
| let saved_req2 = db::save_signature_requirement(req2); | ||||
| let saved_req3 = db::save_signature_requirement(req3); | ||||
| print(`Signature requirements saved to database with IDs: ${get_signature_requirement_id(saved_req1)}, ${get_signature_requirement_id(saved_req2)}, ${get_signature_requirement_id(saved_req3)}`); | ||||
|  | ||||
| // Retrieve the flow from the database | ||||
| let retrieved_flow = db::get_flow_by_id(get_flow_id(saved_flow)); | ||||
| print(`Retrieved flow: ${get_flow_name(retrieved_flow)}`); | ||||
| print(`It has ${get_flow_steps(retrieved_flow).len()} steps`); | ||||
|  | ||||
| // Complete the flow | ||||
| let completed_flow = retrieved_flow; | ||||
| status(completed_flow, STATUS_COMPLETED); | ||||
| print(`Updated retrieved flow status to: ${get_flow_status(completed_flow)}`); | ||||
|  | ||||
| // Save the updated flow | ||||
| db::save_flow(completed_flow); | ||||
| print("Updated flow saved to database"); | ||||
|  | ||||
| // List all flows in the database | ||||
| let all_flows = db::list_flows(); | ||||
| print("\nListing all flows in database:"); | ||||
| let flow_count = 0; | ||||
| for flow in all_flows { | ||||
|     print(`  - Flow: ${get_flow_name(flow)} (ID: ${get_flow_id(flow)})`); | ||||
|     flow_count += 1; | ||||
| } | ||||
| print(`Total flows: ${flow_count}`); | ||||
|  | ||||
| // List all signature requirements | ||||
| let all_reqs = db::list_signature_requirements(); | ||||
| print("\nListing all signature requirements in database:"); | ||||
| let req_count = 0; | ||||
| for req in all_reqs { | ||||
|     print(`  - Requirement for step ${get_signature_requirement_flow_step_id(req)} (ID: ${get_signature_requirement_id(req)})`); | ||||
|     req_count += 1; | ||||
| } | ||||
| print(`Total signature requirements: ${req_count}`); | ||||
|  | ||||
| // Clean up - delete the flow | ||||
| db::delete_flow(get_flow_id(completed_flow)); | ||||
| print(`Deleted flow with ID: ${get_flow_id(completed_flow)}`); | ||||
|  | ||||
| // Clean up - delete signature requirements | ||||
| db::delete_signature_requirement(get_signature_requirement_id(saved_req1)); | ||||
| db::delete_signature_requirement(get_signature_requirement_id(saved_req2)); | ||||
| db::delete_signature_requirement(get_signature_requirement_id(saved_req3)); | ||||
| print("Deleted all signature requirements"); | ||||
							
								
								
									
										65
									
								
								rhailib/_archive/engine/examples/flow/mock.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								rhailib/_archive/engine/examples/flow/mock.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | ||||
| use heromodels::db::hero::OurDB; | ||||
| use heromodels::db::{Collection, Db}; | ||||
| use heromodels::models::flow::{Flow, FlowStep, SignatureRequirement}; | ||||
| use heromodels_core::Model; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| /// Seed the mock database with flow data | ||||
| #[cfg(feature = "flow")] | ||||
| pub fn seed_flow_data(db: Arc<OurDB>) { | ||||
|     // Create a flow | ||||
|     let flow = Flow::new(None, "Onboarding Flow".to_string()) | ||||
|         .description("New employee onboarding process".to_string()) | ||||
|         .status("active".to_string()); | ||||
|  | ||||
|     // Create a signature requirement first | ||||
|     let sig_req = SignatureRequirement::new( | ||||
|         None, | ||||
|         1, | ||||
|         "hr_manager_pubkey".to_string(), | ||||
|         "Please sign the employment contract".to_string(), | ||||
|     ); | ||||
|     let (sig_req_id, saved_sig_req) = db | ||||
|         .collection::<SignatureRequirement>() | ||||
|         .expect("Failed to get SignatureRequirement collection") | ||||
|         .set(&sig_req) | ||||
|         .expect("Failed to store signature requirement"); | ||||
|  | ||||
|     // Create a flow step and add the signature requirement | ||||
|     let step = FlowStep::new(None, 1) | ||||
|         .description("Complete HR paperwork".to_string()) | ||||
|         .add_signature_requirement(sig_req_id); | ||||
|  | ||||
|     let (step_id, saved_step) = db | ||||
|         .collection::<FlowStep>() | ||||
|         .expect("Failed to get FlowStep collection") | ||||
|         .set(&step) | ||||
|         .expect("Failed to store flow step"); | ||||
|  | ||||
|     // Add the step to the flow | ||||
|     let flow_with_step = flow.add_step(step_id); | ||||
|  | ||||
|     // Store the flow | ||||
|     let (_flow_id, saved_flow) = db | ||||
|         .collection::<Flow>() | ||||
|         .expect("Failed to get Flow collection") | ||||
|         .set(&flow_with_step) | ||||
|         .expect("Failed to store flow"); | ||||
|  | ||||
|     println!("Mock database seeded with flow data:"); | ||||
|     println!( | ||||
|         "  - Added flow: {} (ID: {})", | ||||
|         saved_flow.name, | ||||
|         saved_flow.get_id() | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added step with order: {} (ID: {})", | ||||
|         saved_step.step_order, | ||||
|         saved_step.get_id() | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added signature requirement for: {} (ID: {})", | ||||
|         saved_sig_req.public_key, | ||||
|         saved_sig_req.get_id() | ||||
|     ); | ||||
| } | ||||
							
								
								
									
										305
									
								
								rhailib/_archive/engine/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										305
									
								
								rhailib/_archive/engine/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,305 @@ | ||||
| //! # Rhailib Engine | ||||
| //! | ||||
| //! The central Rhai scripting engine for the heromodels ecosystem. This crate provides | ||||
| //! a unified interface for creating, configuring, and executing Rhai scripts with access | ||||
| //! to all business domain modules. | ||||
| //! | ||||
| //! ## Features | ||||
| //! | ||||
| //! - **Unified Engine Creation**: Pre-configured Rhai engine with all DSL modules | ||||
| //! - **Script Execution Utilities**: Direct evaluation, file-based execution, and AST compilation | ||||
| //! - **Mock Database System**: Complete testing environment with seeded data | ||||
| //! - **Feature-Based Architecture**: Modular compilation based on required domains | ||||
| //! | ||||
| //! ## Quick Start | ||||
| //! | ||||
| //! ```rust | ||||
| //! use rhailib_engine::{create_heromodels_engine, eval_script}; | ||||
| //! | ||||
| //! // Create a fully configured engine | ||||
| //! let engine = create_heromodels_engine(); | ||||
| //! | ||||
| //! // Execute a business logic script | ||||
| //! let result = eval_script(&engine, r#" | ||||
| //!     let company = new_company() | ||||
| //!         .name("Acme Corp") | ||||
| //!         .business_type("global"); | ||||
| //!     company.name | ||||
| //! "#)?; | ||||
| //! | ||||
| //! println!("Company name: {}", result.as_string().unwrap()); | ||||
| //! ``` | ||||
| //! | ||||
| //! ## Available Features | ||||
| //! | ||||
| //! - `calendar` (default): Calendar and event management | ||||
| //! - `finance` (default): Financial accounts, assets, and marketplace | ||||
| //! - `flow`: Workflow and approval processes | ||||
| //! - `legal`: Contract and legal document management | ||||
| //! - `projects`: Project and task management | ||||
| //! - `biz`: Business operations and entities | ||||
|  | ||||
| use rhai::{Engine, EvalAltResult, Scope, AST}; | ||||
| use rhailib_dsl; | ||||
| use std::fs; | ||||
| use std::path::Path; | ||||
|  | ||||
| /// Mock database module for testing and examples | ||||
| pub mod mock_db; | ||||
|  | ||||
| /// Creates a fully configured Rhai engine with all available DSL modules. | ||||
| /// | ||||
| /// This function creates a new Rhai engine instance, configures it with appropriate | ||||
| /// limits and settings, and registers all available business domain modules based | ||||
| /// on enabled features. | ||||
| /// | ||||
| /// # Engine Configuration | ||||
| /// | ||||
| /// The engine is configured with the following limits: | ||||
| /// - **Expression Depth**: 128 levels for both expressions and functions | ||||
| /// - **String Size**: 10 MB maximum | ||||
| /// - **Array Size**: 10,000 elements maximum | ||||
| /// - **Map Size**: 10,000 key-value pairs maximum | ||||
| /// | ||||
| /// # Registered Modules | ||||
| /// | ||||
| /// All enabled DSL modules are automatically registered, including: | ||||
| /// - Business operations (companies, products, sales, shareholders) | ||||
| /// - Financial models (accounts, assets, marketplace) | ||||
| /// - Content management (collections, images, PDFs, books) | ||||
| /// - Workflow management (flows, steps, signatures) | ||||
| /// - And more based on enabled features | ||||
| /// | ||||
| /// # Returns | ||||
| /// | ||||
| /// A fully configured `Engine` instance ready for script execution. | ||||
| /// | ||||
| /// # Example | ||||
| /// | ||||
| /// ```rust | ||||
| /// use rhailib_engine::create_heromodels_engine; | ||||
| /// | ||||
| /// let engine = create_heromodels_engine(); | ||||
| /// | ||||
| /// // Engine is now ready to execute scripts with access to all DSL functions | ||||
| /// let result = engine.eval::<String>(r#" | ||||
| ///     let company = new_company().name("Test Corp"); | ||||
| ///     company.name | ||||
| /// "#).unwrap(); | ||||
| /// assert_eq!(result, "Test Corp"); | ||||
| /// ``` | ||||
| pub fn create_heromodels_engine() -> Engine { | ||||
|     let mut engine = Engine::new(); | ||||
|  | ||||
|     // Configure engine settings | ||||
|     engine.set_max_expr_depths(128, 128); | ||||
|     engine.set_max_string_size(10 * 1024 * 1024); // 10 MB | ||||
|     engine.set_max_array_size(10 * 1024); // 10K elements | ||||
|     engine.set_max_map_size(10 * 1024); // 10K elements | ||||
|  | ||||
|     // Register all heromodels Rhai modules | ||||
|     rhailib_dsl::register_dsl_modules(&mut engine); | ||||
|  | ||||
|     engine | ||||
| } | ||||
|  | ||||
| // /// Register all heromodels Rhai modules with the engine | ||||
| // pub fn register_all_modules(engine: &mut Engine, db: Arc<OurDB>) { | ||||
| //     // Register the calendar module if the feature is enabled | ||||
| //     heromodels::models::access::register_access_rhai_module(engine, db.clone()); | ||||
| //     #[cfg(feature = "calendar")] | ||||
| //     heromodels::models::calendar::register_calendar_rhai_module(engine, db.clone()); | ||||
| //     heromodels::models::contact::register_contact_rhai_module(engine, db.clone()); | ||||
| //     heromodels::models::library::register_library_rhai_module(engine, db.clone()); | ||||
| //     heromodels::models::circle::register_circle_rhai_module(engine, db.clone()); | ||||
|  | ||||
| //     // Register the flow module if the feature is enabled | ||||
| //     #[cfg(feature = "flow")] | ||||
| //     heromodels::models::flow::register_flow_rhai_module(engine, db.clone()); | ||||
|  | ||||
| //     // // Register the finance module if the feature is enabled | ||||
| //     // #[cfg(feature = "finance")] | ||||
| //     // heromodels::models::finance::register_finance_rhai_module(engine, db.clone()); | ||||
|  | ||||
| //     // Register the legal module if the feature is enabled | ||||
| //     #[cfg(feature = "legal")] | ||||
| //     heromodels::models::legal::register_legal_rhai_module(engine, db.clone()); | ||||
|  | ||||
| //     // Register the projects module if the feature is enabled | ||||
| //     #[cfg(feature = "projects")] | ||||
| //     heromodels::models::projects::register_projects_rhai_module(engine, db.clone()); | ||||
|  | ||||
| //     // Register the biz module if the feature is enabled | ||||
| //     #[cfg(feature = "biz")] | ||||
| //     heromodels::models::biz::register_biz_rhai_module(engine, db.clone()); | ||||
|  | ||||
| //     println!("Heromodels Rhai modules registered successfully."); | ||||
| // } | ||||
|  | ||||
| /// Evaluates a Rhai script string and returns the result. | ||||
| /// | ||||
| /// This function provides a convenient way to execute Rhai script strings directly | ||||
| /// using the provided engine. It's suitable for one-off script execution or when | ||||
| /// the script content is dynamically generated. | ||||
| /// | ||||
| /// # Arguments | ||||
| /// | ||||
| /// * `engine` - The Rhai engine to use for script execution | ||||
| /// * `script` - The Rhai script content as a string | ||||
| /// | ||||
| /// # Returns | ||||
| /// | ||||
| /// * `Ok(Dynamic)` - The result of script execution | ||||
| /// * `Err(Box<EvalAltResult>)` - Script compilation or execution error | ||||
| /// | ||||
| /// # Example | ||||
| /// | ||||
| /// ```rust | ||||
| /// use rhailib_engine::{create_heromodels_engine, eval_script}; | ||||
| /// | ||||
| /// let engine = create_heromodels_engine(); | ||||
| /// let result = eval_script(&engine, r#" | ||||
| ///     let x = 42; | ||||
| ///     let y = 8; | ||||
| ///     x + y | ||||
| /// "#)?; | ||||
| /// assert_eq!(result.as_int().unwrap(), 50); | ||||
| /// ``` | ||||
| pub fn eval_script( | ||||
|     engine: &Engine, | ||||
|     script: &str, | ||||
| ) -> Result<rhai::Dynamic, Box<rhai::EvalAltResult>> { | ||||
|     engine.eval::<rhai::Dynamic>(script) | ||||
| } | ||||
|  | ||||
| /// Evaluates a Rhai script from a file and returns the result. | ||||
| /// | ||||
| /// This function reads a Rhai script from the filesystem and executes it using | ||||
| /// the provided engine. It handles file reading errors gracefully and provides | ||||
| /// meaningful error messages. | ||||
| /// | ||||
| /// # Arguments | ||||
| /// | ||||
| /// * `engine` - The Rhai engine to use for script execution | ||||
| /// * `file_path` - Path to the Rhai script file | ||||
| /// | ||||
| /// # Returns | ||||
| /// | ||||
| /// * `Ok(Dynamic)` - The result of script execution | ||||
| /// * `Err(Box<EvalAltResult>)` - File reading, compilation, or execution error | ||||
| /// | ||||
| /// # Example | ||||
| /// | ||||
| /// ```rust | ||||
| /// use rhailib_engine::{create_heromodels_engine, eval_file}; | ||||
| /// use std::path::Path; | ||||
| /// | ||||
| /// let engine = create_heromodels_engine(); | ||||
| /// let result = eval_file(&engine, Path::new("scripts/business_logic.rhai"))?; | ||||
| /// println!("Script result: {:?}", result); | ||||
| /// ``` | ||||
| /// | ||||
| /// # Error Handling | ||||
| /// | ||||
| /// File reading errors are converted to Rhai `ErrorSystem` variants with | ||||
| /// descriptive messages including the file path that failed to load. | ||||
| pub fn eval_file( | ||||
|     engine: &Engine, | ||||
|     file_path: &Path, | ||||
| ) -> Result<rhai::Dynamic, Box<rhai::EvalAltResult>> { | ||||
|     match fs::read_to_string(file_path) { | ||||
|         Ok(script_content) => engine.eval::<rhai::Dynamic>(&script_content), | ||||
|         Err(io_err) => Err(Box::new(EvalAltResult::ErrorSystem( | ||||
|             format!("Failed to read script file: {}", file_path.display()), | ||||
|             Box::new(io_err), | ||||
|         ))), | ||||
|     } | ||||
| } | ||||
|  | ||||
| /// Compiles a Rhai script string into an Abstract Syntax Tree (AST). | ||||
| /// | ||||
| /// This function compiles a Rhai script into an AST that can be executed multiple | ||||
| /// times with different scopes. This is more efficient than re-parsing the script | ||||
| /// for each execution when the same script needs to be run repeatedly. | ||||
| /// | ||||
| /// # Arguments | ||||
| /// | ||||
| /// * `engine` - The Rhai engine to use for compilation | ||||
| /// * `script` - The Rhai script content as a string | ||||
| /// | ||||
| /// # Returns | ||||
| /// | ||||
| /// * `Ok(AST)` - The compiled Abstract Syntax Tree | ||||
| /// * `Err(Box<EvalAltResult>)` - Script compilation error | ||||
| /// | ||||
| /// # Example | ||||
| /// | ||||
| /// ```rust | ||||
| /// use rhailib_engine::{create_heromodels_engine, compile_script, run_ast}; | ||||
| /// use rhai::Scope; | ||||
| /// | ||||
| /// let engine = create_heromodels_engine(); | ||||
| /// let ast = compile_script(&engine, r#" | ||||
| ///     let company = new_company().name(company_name); | ||||
| ///     save_company(company) | ||||
| /// "#)?; | ||||
| /// | ||||
| /// // Execute the compiled script multiple times with different variables | ||||
| /// let mut scope1 = Scope::new(); | ||||
| /// scope1.push("company_name", "Acme Corp"); | ||||
| /// let result1 = run_ast(&engine, &ast, &mut scope1)?; | ||||
| /// | ||||
| /// let mut scope2 = Scope::new(); | ||||
| /// scope2.push("company_name", "Tech Startup"); | ||||
| /// let result2 = run_ast(&engine, &ast, &mut scope2)?; | ||||
| /// ``` | ||||
| pub fn compile_script(engine: &Engine, script: &str) -> Result<AST, Box<rhai::EvalAltResult>> { | ||||
|     Ok(engine.compile(script)?) | ||||
| } | ||||
|  | ||||
| /// Executes a compiled Rhai script AST with the provided scope. | ||||
| /// | ||||
| /// This function runs a pre-compiled AST using the provided engine and scope. | ||||
| /// The scope can contain variables and functions that will be available to | ||||
| /// the script during execution. | ||||
| /// | ||||
| /// # Arguments | ||||
| /// | ||||
| /// * `engine` - The Rhai engine to use for execution | ||||
| /// * `ast` - The compiled Abstract Syntax Tree to execute | ||||
| /// * `scope` - Mutable scope containing variables and functions for the script | ||||
| /// | ||||
| /// # Returns | ||||
| /// | ||||
| /// * `Ok(Dynamic)` - The result of script execution | ||||
| /// * `Err(Box<EvalAltResult>)` - Script execution error | ||||
| /// | ||||
| /// # Example | ||||
| /// | ||||
| /// ```rust | ||||
| /// use rhailib_engine::{create_heromodels_engine, compile_script, run_ast}; | ||||
| /// use rhai::Scope; | ||||
| /// | ||||
| /// let engine = create_heromodels_engine(); | ||||
| /// let ast = compile_script(&engine, "x + y")?; | ||||
| /// | ||||
| /// let mut scope = Scope::new(); | ||||
| /// scope.push("x", 10_i64); | ||||
| /// scope.push("y", 32_i64); | ||||
| /// | ||||
| /// let result = run_ast(&engine, &ast, &mut scope)?; | ||||
| /// assert_eq!(result.as_int().unwrap(), 42); | ||||
| /// ``` | ||||
| /// | ||||
| /// # Performance Notes | ||||
| /// | ||||
| /// Using compiled ASTs is significantly more efficient than re-parsing scripts | ||||
| /// for repeated execution, especially for complex scripts or when executing | ||||
| /// the same logic with different input parameters. | ||||
| pub fn run_ast( | ||||
|     engine: &Engine, | ||||
|     ast: &AST, | ||||
|     scope: &mut Scope, | ||||
| ) -> Result<rhai::Dynamic, Box<rhai::EvalAltResult>> { | ||||
|     engine.eval_ast_with_scope(scope, ast) | ||||
| } | ||||
							
								
								
									
										374
									
								
								rhailib/_archive/engine/src/mock_db.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										374
									
								
								rhailib/_archive/engine/src/mock_db.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,374 @@ | ||||
| use chrono::Utc; | ||||
| use heromodels::db::hero::OurDB; | ||||
| use heromodels::db::{Collection, Db}; // Import both Db and Collection traits | ||||
| use heromodels::models::calendar::{Calendar, Event}; | ||||
| use heromodels_core::Model; // Import Model trait to use build method | ||||
| use std::env; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| // Import finance models | ||||
| use heromodels::models::finance::account::Account; | ||||
| use heromodels::models::finance::asset::{Asset, AssetType}; | ||||
| use heromodels::models::finance::marketplace::{Listing, ListingType}; | ||||
|  | ||||
| // Conditionally import other modules based on features | ||||
| #[cfg(feature = "flow")] | ||||
| use heromodels::models::flow::{Flow, FlowStep, SignatureRequirement}; | ||||
|  | ||||
| #[cfg(feature = "legal")] | ||||
| use heromodels::models::legal::{ | ||||
|     Contract, ContractRevision, ContractSigner, ContractStatus, SignerStatus, | ||||
| }; | ||||
|  | ||||
| #[cfg(feature = "projects")] | ||||
| use heromodels::models::projects::{ItemType, Priority, Project, Status as ProjectStatus}; | ||||
|  | ||||
| /// Create a mock in-memory database for examples | ||||
| pub fn create_mock_db() -> Arc<OurDB> { | ||||
|     // Create a temporary directory for the database files | ||||
|     let temp_dir = env::temp_dir().join("engine_examples"); | ||||
|     std::fs::create_dir_all(&temp_dir).expect("Failed to create temp directory"); | ||||
|  | ||||
|     // Create a new OurDB instance with reset=true to ensure it's clean | ||||
|     let db = OurDB::new(temp_dir, true).expect("Failed to create OurDB instance"); | ||||
|  | ||||
|     Arc::new(db) | ||||
| } | ||||
|  | ||||
| /// Seed the mock database with some initial data for all modules | ||||
| pub fn seed_mock_db(db: Arc<OurDB>) { | ||||
|     // Seed calendar data | ||||
|     seed_calendar_data(db.clone()); | ||||
|  | ||||
|     // Seed finance data | ||||
|     seed_finance_data(db.clone()); | ||||
|  | ||||
|     // Seed flow data if the feature is enabled | ||||
|     #[cfg(feature = "flow")] | ||||
|     seed_flow_data(db.clone()); | ||||
|  | ||||
|     // Seed legal data if the feature is enabled | ||||
|     #[cfg(feature = "legal")] | ||||
|     seed_legal_data(db.clone()); | ||||
|  | ||||
|     // Seed projects data if the feature is enabled | ||||
|     #[cfg(feature = "projects")] | ||||
|     seed_projects_data(db.clone()); | ||||
|  | ||||
|     println!("Mock database seeded with initial data for all enabled modules."); | ||||
| } | ||||
|  | ||||
| /// Seed the mock database with calendar data | ||||
| fn seed_calendar_data(db: Arc<OurDB>) { | ||||
|     // Create a calendar | ||||
|     let mut calendar = Calendar::new(None, "Work Calendar".to_string()); | ||||
|     calendar.description = Some("My work schedule".to_string()); | ||||
|  | ||||
|     // Store the calendar in the database | ||||
|     let (_calendar_id, _updated_calendar) = db | ||||
|         .collection::<Calendar>() | ||||
|         .expect("Failed to get Calendar collection") | ||||
|         .set(&calendar) | ||||
|         .expect("Failed to store calendar"); | ||||
|  | ||||
|     // Create an event | ||||
|     let now = Utc::now().timestamp(); | ||||
|     let end_time = now + 3600; // Add 1 hour in seconds | ||||
|  | ||||
|     // Use the builder pattern for Event | ||||
|     let event = Event::new() | ||||
|         .title("Team Meeting".to_string()) | ||||
|         .reschedule(now, end_time) | ||||
|         .location("Conference Room A".to_string()) | ||||
|         .description("Weekly sync".to_string()) | ||||
|         // .add_attendee(Attendee::new(1)) | ||||
|         // .add_attendee(Attendee::new(2)) | ||||
|         .build(); | ||||
|  | ||||
|     // // Add attendees to the event using the builder pattern | ||||
|     // let attendee1 = Attendee::new(1); | ||||
|     // let attendee2 = Attendee::new(2); | ||||
|  | ||||
|     // // Add attendees using the builder pattern | ||||
|     // event = event.add_attendee(attendee1); | ||||
|     // event = event.add_attendee(attendee2); | ||||
|  | ||||
|     // Call build and capture the returned value | ||||
|     // let event = event.build(); | ||||
|  | ||||
|     // Store the event in the database first to get its ID | ||||
|     let (event_id, updated_event) = db | ||||
|         .collection() | ||||
|         .expect("Failed to get Event collection") | ||||
|         .set(&event) | ||||
|         .expect("Failed to store event"); | ||||
|  | ||||
|     // Add the event ID to the calendar | ||||
|     calendar = calendar.add_event(event_id as i64); | ||||
|  | ||||
|     // Store the calendar in the database | ||||
|     let (_calendar_id, updated_calendar) = db | ||||
|         .collection::<Calendar>() | ||||
|         .expect("Failed to get Calendar collection") | ||||
|         .set(&calendar) | ||||
|         .expect("Failed to store calendar"); | ||||
|  | ||||
|     println!("Mock database seeded with calendar data:"); | ||||
|     println!( | ||||
|         "  - Added calendar: {} (ID: {})", | ||||
|         updated_calendar.name, updated_calendar.base_data.id | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added event: {} (ID: {})", | ||||
|         updated_event.title, updated_event.base_data.id | ||||
|     ); | ||||
| } | ||||
|  | ||||
| /// Seed the mock database with flow data | ||||
| #[cfg(feature = "flow")] | ||||
| fn seed_flow_data(db: Arc<OurDB>) { | ||||
|     // Create a flow | ||||
|     let mut flow = Flow::new(0, "Document Approval".to_string()); | ||||
|  | ||||
|     // Set flow properties using the builder pattern | ||||
|     flow = flow.status("draft".to_string()); | ||||
|     flow = flow.name("Document Approval Flow".to_string()); | ||||
|  | ||||
|     // Create flow steps | ||||
|     let mut step1 = FlowStep::new(0, 1); | ||||
|     step1 = step1.description("Initial review by legal team".to_string()); | ||||
|     step1 = step1.status("pending".to_string()); | ||||
|  | ||||
|     let mut step2 = FlowStep::new(0, 2); | ||||
|     step2 = step2.description("Approval by department head".to_string()); | ||||
|     step2 = step2.status("pending".to_string()); | ||||
|  | ||||
|     // Add signature requirements | ||||
|     let mut req1 = SignatureRequirement::new( | ||||
|         0, | ||||
|         1, | ||||
|         "Legal Team".to_string(), | ||||
|         "Please review this document".to_string(), | ||||
|     ); | ||||
|     let mut req2 = SignatureRequirement::new( | ||||
|         0, | ||||
|         2, | ||||
|         "Department Head".to_string(), | ||||
|         "Please approve this document".to_string(), | ||||
|     ); | ||||
|  | ||||
|     // Add steps to flow | ||||
|     flow = flow.add_step(step1); | ||||
|     flow = flow.add_step(step2); | ||||
|  | ||||
|     // Store in the database | ||||
|     let (_, updated_flow) = db | ||||
|         .collection::<Flow>() | ||||
|         .expect("Failed to get Flow collection") | ||||
|         .set(&flow) | ||||
|         .expect("Failed to store flow"); | ||||
|  | ||||
|     // Store signature requirements in the database | ||||
|     let (_, updated_req1) = db | ||||
|         .collection::<SignatureRequirement>() | ||||
|         .expect("Failed to get SignatureRequirement collection") | ||||
|         .set(&req1) | ||||
|         .expect("Failed to store signature requirement"); | ||||
|  | ||||
|     let (_, updated_req2) = db | ||||
|         .collection::<SignatureRequirement>() | ||||
|         .expect("Failed to get SignatureRequirement collection") | ||||
|         .set(&req2) | ||||
|         .expect("Failed to store signature requirement"); | ||||
|  | ||||
|     println!("Mock database seeded with flow data:"); | ||||
|     println!( | ||||
|         "  - Added flow: {} (ID: {})", | ||||
|         updated_flow.name, updated_flow.base_data.id | ||||
|     ); | ||||
|     println!("  - Added {} steps", updated_flow.steps.len()); | ||||
|     println!( | ||||
|         "  - Added signature requirements with IDs: {} and {}", | ||||
|         updated_req1.base_data.id, updated_req2.base_data.id | ||||
|     ); | ||||
| } | ||||
|  | ||||
| /// Seed the mock database with legal data | ||||
| #[cfg(feature = "legal")] | ||||
| fn seed_legal_data(db: Arc<OurDB>) { | ||||
|     // Create a contract | ||||
|     let mut contract = Contract::new(None, "Service Agreement".to_string()); | ||||
|     contract.description = Some("Agreement for software development services".to_string()); | ||||
|     contract.status = ContractStatus::Draft; | ||||
|  | ||||
|     // Create a revision | ||||
|     let revision = ContractRevision::new( | ||||
|         None, | ||||
|         "Initial draft".to_string(), | ||||
|         "https://example.com/contract/v1".to_string(), | ||||
|     ); | ||||
|  | ||||
|     // Create signers | ||||
|     let signer1 = ContractSigner::new(None, 1, "Client".to_string()); | ||||
|     let signer2 = ContractSigner::new(None, 2, "Provider".to_string()); | ||||
|  | ||||
|     // Add revision and signers to contract | ||||
|     contract.add_revision(revision); | ||||
|     contract.add_signer(signer1); | ||||
|     contract.add_signer(signer2); | ||||
|  | ||||
|     // Store in the database | ||||
|     let (_, updated_contract) = db | ||||
|         .collection::<Contract>() | ||||
|         .expect("Failed to get Contract collection") | ||||
|         .set(&contract) | ||||
|         .expect("Failed to store contract"); | ||||
|  | ||||
|     println!("Mock database seeded with legal data:"); | ||||
|     println!( | ||||
|         "  - Added contract: {} (ID: {})", | ||||
|         updated_contract.name, updated_contract.base_data.id | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added {} revisions and {} signers", | ||||
|         updated_contract.revisions.len(), | ||||
|         updated_contract.signers.len() | ||||
|     ); | ||||
| } | ||||
|  | ||||
| /// Seed the mock database with projects data | ||||
| #[cfg(feature = "projects")] | ||||
| fn seed_projects_data(db: Arc<OurDB>) { | ||||
|     // Create a project | ||||
|     let mut project = Project::new(None, "Website Redesign".to_string()); | ||||
|     project.description = Some("Redesign the company website".to_string()); | ||||
|     project.status = ProjectStatus::InProgress; | ||||
|     project.priority = Priority::High; | ||||
|  | ||||
|     // Add members and tags | ||||
|     project.add_member_id(1); | ||||
|     project.add_member_id(2); | ||||
|     project.add_tag("design".to_string()); | ||||
|     project.add_tag("web".to_string()); | ||||
|  | ||||
|     // Store in the database | ||||
|     let (_, updated_project) = db | ||||
|         .collection::<Project>() | ||||
|         .expect("Failed to get Project collection") | ||||
|         .set(&project) | ||||
|         .expect("Failed to store project"); | ||||
|  | ||||
|     println!("Mock database seeded with projects data:"); | ||||
|     println!( | ||||
|         "  - Added project: {} (ID: {})", | ||||
|         updated_project.name, updated_project.base_data.id | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Status: {}, Priority: {}", | ||||
|         updated_project.status, updated_project.priority | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added {} members and {} tags", | ||||
|         updated_project.member_ids.len(), | ||||
|         updated_project.tags.len() | ||||
|     ); | ||||
| } | ||||
| /// Seed the mock database with finance data | ||||
| fn seed_finance_data(db: Arc<OurDB>) { | ||||
|     // Create a user account | ||||
|     let mut account = Account::new() | ||||
|         .name("Demo Account") | ||||
|         .user_id(1) | ||||
|         .description("Demo trading account") | ||||
|         .ledger("ethereum") | ||||
|         .address("0x1234567890abcdef1234567890abcdef12345678") | ||||
|         .pubkey("0xabcdef1234567890abcdef1234567890abcdef12"); | ||||
|  | ||||
|     // Store the account in the database | ||||
|     let (account_id, updated_account) = db | ||||
|         .collection::<Account>() | ||||
|         .expect("Failed to get Account collection") | ||||
|         .set(&account) | ||||
|         .expect("Failed to store account"); | ||||
|  | ||||
|     // Create an ERC20 token asset | ||||
|     let token_asset = Asset::new() | ||||
|         .name("HERO Token") | ||||
|         .description("Herocode governance token") | ||||
|         .amount(1000.0) | ||||
|         .address("0x9876543210abcdef9876543210abcdef98765432") | ||||
|         .asset_type(AssetType::Erc20) | ||||
|         .decimals(18); | ||||
|  | ||||
|     // Store the token asset in the database | ||||
|     let (token_id, updated_token) = db | ||||
|         .collection::<Asset>() | ||||
|         .expect("Failed to get Asset collection") | ||||
|         .set(&token_asset) | ||||
|         .expect("Failed to store token asset"); | ||||
|  | ||||
|     // Create an NFT asset | ||||
|     let nft_asset = Asset::new() | ||||
|         .name("Herocode #1") | ||||
|         .description("Unique digital collectible") | ||||
|         .amount(1.0) | ||||
|         .address("0xabcdef1234567890abcdef1234567890abcdef12") | ||||
|         .asset_type(AssetType::Erc721) | ||||
|         .decimals(0); | ||||
|  | ||||
|     // Store the NFT asset in the database | ||||
|     let (nft_id, updated_nft) = db | ||||
|         .collection::<Asset>() | ||||
|         .expect("Failed to get Asset collection") | ||||
|         .set(&nft_asset) | ||||
|         .expect("Failed to store NFT asset"); | ||||
|  | ||||
|     // Add assets to the account | ||||
|     account = updated_account.add_asset(token_id); | ||||
|     account = account.add_asset(nft_id); | ||||
|  | ||||
|     // Update the account in the database | ||||
|     let (_, updated_account) = db | ||||
|         .collection::<Account>() | ||||
|         .expect("Failed to get Account collection") | ||||
|         .set(&account) | ||||
|         .expect("Failed to store updated account"); | ||||
|  | ||||
|     // Create a listing for the NFT | ||||
|     let listing = Listing::new() | ||||
|         .seller_id(account_id) | ||||
|         .asset_id(nft_id) | ||||
|         .price(0.5) | ||||
|         .currency("ETH") | ||||
|         .listing_type(ListingType::Auction) | ||||
|         .title("Rare Herocode NFT".to_string()) | ||||
|         .description("One of a kind digital collectible".to_string()) | ||||
|         .image_url(Some("hcttps://example.com/nft/1.png".to_string())) | ||||
|         .add_tag("rare".to_string()) | ||||
|         .add_tag("collectible".to_string()); | ||||
|  | ||||
|     // Store the listing in the database | ||||
|     let (_listing_id, updated_listing) = db | ||||
|         .collection::<Listing>() | ||||
|         .expect("Failed to get Listing collection") | ||||
|         .set(&listing) | ||||
|         .expect("Failed to store listing"); | ||||
|  | ||||
|     println!("Mock database seeded with finance data:"); | ||||
|     println!( | ||||
|         "  - Added account: {} (ID: {})", | ||||
|         updated_account.name, updated_account.base_data.id | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added token asset: {} (ID: {})", | ||||
|         updated_token.name, updated_token.base_data.id | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added NFT asset: {} (ID: {})", | ||||
|         updated_nft.name, updated_nft.base_data.id | ||||
|     ); | ||||
|     println!( | ||||
|         "  - Added listing: {} (ID: {})", | ||||
|         updated_listing.title, updated_listing.base_data.id | ||||
|     ); | ||||
| } | ||||
							
								
								
									
										97
									
								
								rhailib/_archive/flow/flow.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								rhailib/_archive/flow/flow.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,97 @@ | ||||
| use heromodels::db::Db; | ||||
| use macros::{ | ||||
|     register_authorized_create_by_id_fn, register_authorized_delete_by_id_fn, | ||||
|     register_authorized_get_by_id_fn, | ||||
| }; | ||||
| use rhai::plugin::*; | ||||
| use rhai::{Array, Dynamic, Engine, EvalAltResult, Module, INT}; | ||||
| use std::mem; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use heromodels::db::hero::OurDB; | ||||
| use heromodels::db::Collection; | ||||
| use heromodels::models::flow::flow::Flow; | ||||
| use heromodels::models::flow::flow_step::FlowStep; | ||||
|  | ||||
| type RhaiFlow = Flow; | ||||
| type RhaiFlowStep = FlowStep; | ||||
|  | ||||
| #[export_module] | ||||
| mod rhai_flow_module { | ||||
|     use super::{Array, Dynamic, RhaiFlow, RhaiFlowStep, INT}; | ||||
|  | ||||
|     #[rhai_fn(name = "new_flow", return_raw)] | ||||
|     pub fn new_flow() -> Result<RhaiFlow, Box<EvalAltResult>> { | ||||
|         Ok(Flow::new()) | ||||
|     } | ||||
|  | ||||
|     // --- Setters --- | ||||
|     #[rhai_fn(name = "name", return_raw)] | ||||
|     pub fn set_name(flow: &mut RhaiFlow, name: String) -> Result<RhaiFlow, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(flow); | ||||
|         *flow = owned.name(name); | ||||
|         Ok(flow.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "status", return_raw)] | ||||
|     pub fn set_status(flow: &mut RhaiFlow, status: String) -> Result<RhaiFlow, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(flow); | ||||
|         *flow = owned.status(status); | ||||
|         Ok(flow.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "add_step", return_raw)] | ||||
|     pub fn add_step( | ||||
|         flow: &mut RhaiFlow, | ||||
|         step: RhaiFlowStep, | ||||
|     ) -> Result<RhaiFlow, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(flow); | ||||
|         *flow = owned.add_step(step); | ||||
|         Ok(flow.clone()) | ||||
|     } | ||||
|  | ||||
|     // --- Getters --- | ||||
|     #[rhai_fn(get = "id", pure)] | ||||
|     pub fn get_id(f: &mut RhaiFlow) -> INT { | ||||
|         f.base_data.id as INT | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(get = "name", pure)] | ||||
|     pub fn get_name(f: &mut RhaiFlow) -> String { | ||||
|         f.name.clone() | ||||
|     } | ||||
|     #[rhai_fn(get = "status", pure)] | ||||
|     pub fn get_status(f: &mut RhaiFlow) -> String { | ||||
|         f.status.clone() | ||||
|     } | ||||
|     #[rhai_fn(get = "steps", pure)] | ||||
|     pub fn get_steps(f: &mut RhaiFlow) -> Array { | ||||
|         f.steps.clone().into_iter().map(Dynamic::from).collect() | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub fn register_flow_rhai_module(engine: &mut Engine) { | ||||
|     engine.build_type::<RhaiFlow>(); | ||||
|     let mut module = exported_module!(rhai_flow_module); | ||||
|  | ||||
|     register_authorized_create_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "save_flow", | ||||
|         resource_type_str: "Flow", | ||||
|         rhai_return_rust_type: heromodels::models::flow::flow::Flow | ||||
|     ); | ||||
|     register_authorized_get_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "get_flow", | ||||
|         resource_type_str: "Flow", | ||||
|         rhai_return_rust_type: heromodels::models::flow::flow::Flow | ||||
|     ); | ||||
|     register_authorized_delete_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "delete_flow", | ||||
|         resource_type_str: "Flow", | ||||
|         rhai_return_rust_type: heromodels::models::flow::flow::Flow | ||||
|     ); | ||||
|  | ||||
|     engine.register_global_module(module.into()); | ||||
| } | ||||
							
								
								
									
										86
									
								
								rhailib/_archive/flow/flow_step.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										86
									
								
								rhailib/_archive/flow/flow_step.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,86 @@ | ||||
| use heromodels::db::Db; | ||||
| use macros::{ | ||||
|     register_authorized_create_by_id_fn, register_authorized_delete_by_id_fn, | ||||
|     register_authorized_get_by_id_fn, | ||||
| }; | ||||
| use rhai::plugin::*; | ||||
| use rhai::{Dynamic, Engine, EvalAltResult, Module, INT}; | ||||
| use std::mem; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use heromodels::db::hero::OurDB; | ||||
| use heromodels::db::Collection; | ||||
| use heromodels::models::flow::flow_step::FlowStep; | ||||
|  | ||||
| type RhaiFlowStep = FlowStep; | ||||
|  | ||||
| #[export_module] | ||||
| mod rhai_flow_step_module { | ||||
|     use super::{RhaiFlowStep, INT}; | ||||
|  | ||||
|     #[rhai_fn(name = "new_flow_step", return_raw)] | ||||
|     pub fn new_flow_step() -> Result<RhaiFlowStep, Box<EvalAltResult>> { | ||||
|         Ok(FlowStep::default()) | ||||
|     } | ||||
|  | ||||
|     // --- Setters --- | ||||
|     #[rhai_fn(name = "description", return_raw)] | ||||
|     pub fn set_description( | ||||
|         step: &mut RhaiFlowStep, | ||||
|         description: String, | ||||
|     ) -> Result<RhaiFlowStep, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(step); | ||||
|         *step = owned.description(description); | ||||
|         Ok(step.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "status", return_raw)] | ||||
|     pub fn set_status( | ||||
|         step: &mut RhaiFlowStep, | ||||
|         status: String, | ||||
|     ) -> Result<RhaiFlowStep, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(step); | ||||
|         *step = owned.status(status); | ||||
|         Ok(step.clone()) | ||||
|     } | ||||
|  | ||||
|     // --- Getters --- | ||||
|     #[rhai_fn(get = "id", pure)] | ||||
|     pub fn get_id(s: &mut RhaiFlowStep) -> INT { | ||||
|         s.base_data.id as INT | ||||
|     } | ||||
|     #[rhai_fn(get = "description", pure)] | ||||
|     pub fn get_description(s: &mut RhaiFlowStep) -> Option<String> { | ||||
|         s.description.clone() | ||||
|     } | ||||
|     #[rhai_fn(get = "status", pure)] | ||||
|     pub fn get_status(s: &mut RhaiFlowStep) -> String { | ||||
|         s.status.clone() | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub fn register_flow_step_rhai_module(engine: &mut Engine) { | ||||
|     engine.build_type::<RhaiFlowStep>(); | ||||
|     let mut module = exported_module!(rhai_flow_step_module); | ||||
|  | ||||
|     register_authorized_create_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "save_flow_step", | ||||
|         resource_type_str: "FlowStep", | ||||
|         rhai_return_rust_type: heromodels::models::flow::flow_step::FlowStep | ||||
|     ); | ||||
|     register_authorized_get_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "get_flow_step", | ||||
|         resource_type_str: "FlowStep", | ||||
|         rhai_return_rust_type: heromodels::models::flow::flow_step::FlowStep | ||||
|     ); | ||||
|     register_authorized_delete_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "delete_flow_step", | ||||
|         resource_type_str: "FlowStep", | ||||
|         rhai_return_rust_type: heromodels::models::flow::flow_step::FlowStep | ||||
|     ); | ||||
|  | ||||
|     engine.register_global_module(module.into()); | ||||
| } | ||||
							
								
								
									
										17
									
								
								rhailib/_archive/flow/mod.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								rhailib/_archive/flow/mod.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| use rhai::Engine; | ||||
|  | ||||
| pub mod flow; | ||||
| pub mod flow_step; | ||||
| pub mod signature_requirement; | ||||
| pub mod orchestrated_flow; | ||||
| pub mod orchestrated_flow_step; | ||||
|  | ||||
| // Re-export the orchestrated models for easy access | ||||
| pub use orchestrated_flow::{OrchestratedFlow, OrchestratorError, FlowStatus}; | ||||
| pub use orchestrated_flow_step::OrchestratedFlowStep; | ||||
|  | ||||
| pub fn register_flow_rhai_modules(engine: &mut Engine) { | ||||
|     flow::register_flow_rhai_module(engine); | ||||
|     flow_step::register_flow_step_rhai_module(engine); | ||||
|     signature_requirement::register_signature_requirement_rhai_module(engine); | ||||
| } | ||||
							
								
								
									
										154
									
								
								rhailib/_archive/flow/orchestrated_flow.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										154
									
								
								rhailib/_archive/flow/orchestrated_flow.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,154 @@ | ||||
| //! Orchestrated Flow model for DAG-based workflow execution | ||||
|  | ||||
| use heromodels_core::BaseModelData; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use std::collections::HashSet; | ||||
| use thiserror::Error; | ||||
|  | ||||
| use super::orchestrated_flow_step::OrchestratedFlowStep; | ||||
|  | ||||
| /// Extended Flow with orchestrator-specific steps | ||||
| #[derive(Debug, Clone, Serialize, Deserialize)] | ||||
| pub struct OrchestratedFlow { | ||||
|     /// Base model data (id, created_at, updated_at) | ||||
|     pub base_data: BaseModelData, | ||||
|      | ||||
|     /// Name of the flow | ||||
|     pub name: String, | ||||
|      | ||||
|     /// Orchestrated steps with dependencies | ||||
|     pub orchestrated_steps: Vec<OrchestratedFlowStep>, | ||||
| } | ||||
|  | ||||
| impl OrchestratedFlow { | ||||
|     /// Create a new orchestrated flow | ||||
|     pub fn new(name: &str) -> Self { | ||||
|         Self { | ||||
|             base_data: BaseModelData::new(), | ||||
|             name: name.to_string(), | ||||
|             orchestrated_steps: Vec::new(), | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// Add a step to the flow | ||||
|     pub fn add_step(mut self, step: OrchestratedFlowStep) -> Self { | ||||
|         self.orchestrated_steps.push(step); | ||||
|         self | ||||
|     } | ||||
|      | ||||
|     /// Get the flow ID | ||||
|     pub fn id(&self) -> u32 { | ||||
|         self.base_data.id | ||||
|     } | ||||
|      | ||||
|     /// Validate the DAG structure (no cycles) | ||||
|     pub fn validate_dag(&self) -> Result<(), OrchestratorError> { | ||||
|         let mut visited = HashSet::new(); | ||||
|         let mut rec_stack = HashSet::new(); | ||||
|          | ||||
|         for step in &self.orchestrated_steps { | ||||
|             if !visited.contains(&step.id()) { | ||||
|                 if self.has_cycle(step.id(), &mut visited, &mut rec_stack)? { | ||||
|                     return Err(OrchestratorError::CyclicDependency); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|          | ||||
|         Ok(()) | ||||
|     } | ||||
|      | ||||
|     /// Check for cycles in the dependency graph | ||||
|     fn has_cycle( | ||||
|         &self, | ||||
|         step_id: u32, | ||||
|         visited: &mut HashSet<u32>, | ||||
|         rec_stack: &mut HashSet<u32>, | ||||
|     ) -> Result<bool, OrchestratorError> { | ||||
|         visited.insert(step_id); | ||||
|         rec_stack.insert(step_id); | ||||
|          | ||||
|         let step = self.orchestrated_steps | ||||
|             .iter() | ||||
|             .find(|s| s.id() == step_id) | ||||
|             .ok_or(OrchestratorError::StepNotFound(step_id))?; | ||||
|          | ||||
|         for &dep_id in &step.depends_on { | ||||
|             if !visited.contains(&dep_id) { | ||||
|                 if self.has_cycle(dep_id, visited, rec_stack)? { | ||||
|                     return Ok(true); | ||||
|                 } | ||||
|             } else if rec_stack.contains(&dep_id) { | ||||
|                 return Ok(true); | ||||
|             } | ||||
|         } | ||||
|          | ||||
|         rec_stack.remove(&step_id); | ||||
|         Ok(false) | ||||
|     } | ||||
| } | ||||
|  | ||||
| /// Orchestrator errors | ||||
| #[derive(Error, Debug)] | ||||
| pub enum OrchestratorError { | ||||
|     #[error("Database error: {0}")] | ||||
|     DatabaseError(String), | ||||
|      | ||||
|     #[error("Executor error: {0}")] | ||||
|     ExecutorError(String), | ||||
|      | ||||
|     #[error("No ready steps found - possible deadlock")] | ||||
|     NoReadySteps, | ||||
|      | ||||
|     #[error("Step {0} failed: {1:?}")] | ||||
|     StepFailed(u32, Option<String>), | ||||
|      | ||||
|     #[error("Cyclic dependency detected in workflow")] | ||||
|     CyclicDependency, | ||||
|      | ||||
|     #[error("Step {0} not found")] | ||||
|     StepNotFound(u32), | ||||
|      | ||||
|     #[error("Invalid dependency: step {0} depends on non-existent step {1}")] | ||||
|     InvalidDependency(u32, u32), | ||||
| } | ||||
|  | ||||
| /// Flow execution status | ||||
| #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] | ||||
| pub enum FlowStatus { | ||||
|     Pending, | ||||
|     Running, | ||||
|     Completed, | ||||
|     Failed, | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|  | ||||
|     #[test] | ||||
|     fn test_orchestrated_flow_builder() { | ||||
|         let step1 = OrchestratedFlowStep::new("step1").script("let x = 1;"); | ||||
|         let step2 = OrchestratedFlowStep::new("step2").script("let y = 2;"); | ||||
|          | ||||
|         let flow = OrchestratedFlow::new("test_flow") | ||||
|             .add_step(step1) | ||||
|             .add_step(step2); | ||||
|          | ||||
|         assert_eq!(flow.name, "test_flow"); | ||||
|         assert_eq!(flow.orchestrated_steps.len(), 2); | ||||
|     } | ||||
|      | ||||
|     #[test] | ||||
|     fn test_dag_validation_no_cycle() { | ||||
|         let step1 = OrchestratedFlowStep::new("step1").script("let x = 1;"); | ||||
|         let step2 = OrchestratedFlowStep::new("step2") | ||||
|             .script("let y = 2;") | ||||
|             .depends_on(step1.id()); | ||||
|          | ||||
|         let flow = OrchestratedFlow::new("test_flow") | ||||
|             .add_step(step1) | ||||
|             .add_step(step2); | ||||
|          | ||||
|         assert!(flow.validate_dag().is_ok()); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										124
									
								
								rhailib/_archive/flow/orchestrated_flow_step.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								rhailib/_archive/flow/orchestrated_flow_step.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,124 @@ | ||||
| //! Orchestrated Flow Step model for DAG-based workflow execution | ||||
|  | ||||
| use heromodels_core::BaseModelData; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use std::collections::HashMap; | ||||
|  | ||||
| /// Extended FlowStep with orchestrator-specific fields | ||||
| #[derive(Debug, Clone, Serialize, Deserialize)] | ||||
| pub struct OrchestratedFlowStep { | ||||
|     /// Base model data (id, created_at, updated_at) | ||||
|     pub base_data: BaseModelData, | ||||
|      | ||||
|     /// Name of the flow step | ||||
|     pub name: String, | ||||
|      | ||||
|     /// Rhai script to execute | ||||
|     pub script: String, | ||||
|      | ||||
|     /// IDs of steps this step depends on | ||||
|     pub depends_on: Vec<u32>, | ||||
|      | ||||
|     /// Execution context (circle) | ||||
|     pub context_id: String, | ||||
|      | ||||
|     /// Target worker for execution | ||||
|     pub worker_id: String, | ||||
|      | ||||
|     /// Input parameters | ||||
|     pub inputs: HashMap<String, String>, | ||||
|      | ||||
|     /// Output results | ||||
|     pub outputs: HashMap<String, String>, | ||||
| } | ||||
|  | ||||
| impl OrchestratedFlowStep { | ||||
|     /// Create a new orchestrated flow step | ||||
|     pub fn new(name: &str) -> Self { | ||||
|         Self { | ||||
|             base_data: BaseModelData::new(), | ||||
|             name: name.to_string(), | ||||
|             script: String::new(), | ||||
|             depends_on: Vec::new(), | ||||
|             context_id: String::new(), | ||||
|             worker_id: String::new(), | ||||
|             inputs: HashMap::new(), | ||||
|             outputs: HashMap::new(), | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// Set the script content | ||||
|     pub fn script(mut self, script: &str) -> Self { | ||||
|         self.script = script.to_string(); | ||||
|         self | ||||
|     } | ||||
|      | ||||
|     /// Add a dependency on another step | ||||
|     pub fn depends_on(mut self, step_id: u32) -> Self { | ||||
|         self.depends_on.push(step_id); | ||||
|         self | ||||
|     } | ||||
|      | ||||
|     /// Set the context ID | ||||
|     pub fn context_id(mut self, context_id: &str) -> Self { | ||||
|         self.context_id = context_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|      | ||||
|     /// Set the worker ID | ||||
|     pub fn worker_id(mut self, worker_id: &str) -> Self { | ||||
|         self.worker_id = worker_id.to_string(); | ||||
|         self | ||||
|     } | ||||
|      | ||||
|     /// Add an input parameter | ||||
|     pub fn input(mut self, key: &str, value: &str) -> Self { | ||||
|         self.inputs.insert(key.to_string(), value.to_string()); | ||||
|         self | ||||
|     } | ||||
|      | ||||
|     /// Get the step ID | ||||
|     pub fn id(&self) -> u32 { | ||||
|         self.base_data.id | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|  | ||||
|     #[test] | ||||
|     fn test_orchestrated_flow_step_builder() { | ||||
|         let step = OrchestratedFlowStep::new("test_step") | ||||
|             .script("let x = 1;") | ||||
|             .context_id("test_context") | ||||
|             .worker_id("test_worker") | ||||
|             .input("key1", "value1"); | ||||
|          | ||||
|         assert_eq!(step.name, "test_step"); | ||||
|         assert_eq!(step.script, "let x = 1;"); | ||||
|         assert_eq!(step.context_id, "test_context"); | ||||
|         assert_eq!(step.worker_id, "test_worker"); | ||||
|         assert_eq!(step.inputs.get("key1"), Some(&"value1".to_string())); | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|  | ||||
|     #[test] | ||||
|     fn test_orchestrated_flow_step_builder() { | ||||
|         let step = OrchestratedFlowStep::new("test_step") | ||||
|             .script("let x = 1;") | ||||
|             .context_id("test_context") | ||||
|             .worker_id("test_worker") | ||||
|             .input("key1", "value1"); | ||||
|          | ||||
|         assert_eq!(step.flow_step.name, "test_step"); | ||||
|         assert_eq!(step.script, "let x = 1;"); | ||||
|         assert_eq!(step.context_id, "test_context"); | ||||
|         assert_eq!(step.worker_id, "test_worker"); | ||||
|         assert_eq!(step.inputs.get("key1"), Some(&"value1".to_string())); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										145
									
								
								rhailib/_archive/flow/signature_requirement.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										145
									
								
								rhailib/_archive/flow/signature_requirement.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,145 @@ | ||||
| use heromodels::db::Db; | ||||
| use macros::{ | ||||
|     register_authorized_create_by_id_fn, register_authorized_delete_by_id_fn, | ||||
|     register_authorized_get_by_id_fn, | ||||
| }; | ||||
| use rhai::plugin::*; | ||||
| use rhai::{Dynamic, Engine, EvalAltResult, Module, INT}; | ||||
| use std::mem; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use heromodels::db::hero::OurDB; | ||||
| use heromodels::db::Collection; | ||||
| use heromodels::models::flow::signature_requirement::SignatureRequirement; | ||||
|  | ||||
| type RhaiSignatureRequirement = SignatureRequirement; | ||||
|  | ||||
| #[export_module] | ||||
| mod rhai_signature_requirement_module { | ||||
|     use super::{RhaiSignatureRequirement, INT}; | ||||
|  | ||||
|     #[rhai_fn(name = "new_signature_requirement", return_raw)] | ||||
|     pub fn new_signature_requirement() -> Result<RhaiSignatureRequirement, Box<EvalAltResult>> { | ||||
|         Ok(SignatureRequirement::default()) | ||||
|     } | ||||
|  | ||||
|     // --- Setters --- | ||||
|     #[rhai_fn(name = "flow_step_id", return_raw)] | ||||
|     pub fn set_flow_step_id( | ||||
|         sr: &mut RhaiSignatureRequirement, | ||||
|         flow_step_id: INT, | ||||
|     ) -> Result<RhaiSignatureRequirement, Box<EvalAltResult>> { | ||||
|         let mut owned = std::mem::take(sr); | ||||
|         owned.flow_step_id = flow_step_id as u32; | ||||
|         *sr = owned; | ||||
|         Ok(sr.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "public_key", return_raw)] | ||||
|     pub fn set_public_key( | ||||
|         sr: &mut RhaiSignatureRequirement, | ||||
|         public_key: String, | ||||
|     ) -> Result<RhaiSignatureRequirement, Box<EvalAltResult>> { | ||||
|         let mut owned = std::mem::take(sr); | ||||
|         owned.public_key = public_key; | ||||
|         *sr = owned; | ||||
|         Ok(sr.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "message", return_raw)] | ||||
|     pub fn set_message( | ||||
|         sr: &mut RhaiSignatureRequirement, | ||||
|         message: String, | ||||
|     ) -> Result<RhaiSignatureRequirement, Box<EvalAltResult>> { | ||||
|         let mut owned = std::mem::take(sr); | ||||
|         owned.message = message; | ||||
|         *sr = owned; | ||||
|         Ok(sr.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "signed_by", return_raw)] | ||||
|     pub fn set_signed_by( | ||||
|         sr: &mut RhaiSignatureRequirement, | ||||
|         signed_by: String, | ||||
|     ) -> Result<RhaiSignatureRequirement, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(sr); | ||||
|         *sr = owned.signed_by(signed_by); | ||||
|         Ok(sr.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "signature", return_raw)] | ||||
|     pub fn set_signature( | ||||
|         sr: &mut RhaiSignatureRequirement, | ||||
|         signature: String, | ||||
|     ) -> Result<RhaiSignatureRequirement, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(sr); | ||||
|         *sr = owned.signature(signature); | ||||
|         Ok(sr.clone()) | ||||
|     } | ||||
|  | ||||
|     #[rhai_fn(name = "status", return_raw)] | ||||
|     pub fn set_status( | ||||
|         sr: &mut RhaiSignatureRequirement, | ||||
|         status: String, | ||||
|     ) -> Result<RhaiSignatureRequirement, Box<EvalAltResult>> { | ||||
|         let owned = std::mem::take(sr); | ||||
|         *sr = owned.status(status); | ||||
|         Ok(sr.clone()) | ||||
|     } | ||||
|  | ||||
|     // --- Getters --- | ||||
|     #[rhai_fn(get = "id", pure)] | ||||
|     pub fn get_id(s: &mut RhaiSignatureRequirement) -> INT { | ||||
|         s.base_data.id as INT | ||||
|     } | ||||
|     #[rhai_fn(get = "flow_step_id", pure)] | ||||
|     pub fn get_flow_step_id(s: &mut RhaiSignatureRequirement) -> INT { | ||||
|         s.flow_step_id as INT | ||||
|     } | ||||
|     #[rhai_fn(get = "public_key", pure)] | ||||
|     pub fn get_public_key(s: &mut RhaiSignatureRequirement) -> String { | ||||
|         s.public_key.clone() | ||||
|     } | ||||
|     #[rhai_fn(get = "message", pure)] | ||||
|     pub fn get_message(s: &mut RhaiSignatureRequirement) -> String { | ||||
|         s.message.clone() | ||||
|     } | ||||
|     #[rhai_fn(get = "signed_by", pure)] | ||||
|     pub fn get_signed_by(s: &mut RhaiSignatureRequirement) -> Option<String> { | ||||
|         s.signed_by.clone() | ||||
|     } | ||||
|     #[rhai_fn(get = "signature", pure)] | ||||
|     pub fn get_signature(s: &mut RhaiSignatureRequirement) -> Option<String> { | ||||
|         s.signature.clone() | ||||
|     } | ||||
|     #[rhai_fn(get = "status", pure)] | ||||
|     pub fn get_status(s: &mut RhaiSignatureRequirement) -> String { | ||||
|         s.status.clone() | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub fn register_signature_requirement_rhai_module(engine: &mut Engine) { | ||||
|     engine.build_type::<RhaiSignatureRequirement>(); | ||||
|     let mut module = exported_module!(rhai_signature_requirement_module); | ||||
|  | ||||
|     register_authorized_create_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "save_signature_requirement", | ||||
|         resource_type_str: "SignatureRequirement", | ||||
|         rhai_return_rust_type: heromodels::models::flow::signature_requirement::SignatureRequirement | ||||
|     ); | ||||
|     register_authorized_get_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "get_signature_requirement", | ||||
|         resource_type_str: "SignatureRequirement", | ||||
|         rhai_return_rust_type: heromodels::models::flow::signature_requirement::SignatureRequirement | ||||
|     ); | ||||
|     register_authorized_delete_by_id_fn!( | ||||
|         module: &mut module, | ||||
|         rhai_fn_name: "delete_signature_requirement", | ||||
|         resource_type_str: "SignatureRequirement", | ||||
|         rhai_return_rust_type: heromodels::models::flow::signature_requirement::SignatureRequirement | ||||
|     ); | ||||
|  | ||||
|     engine.register_global_module(module.into()); | ||||
| } | ||||
							
								
								
									
										51
									
								
								rhailib/_archive/orchestrator/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								rhailib/_archive/orchestrator/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| [package] | ||||
| name = "orchestrator" | ||||
| version = "0.1.0" | ||||
| edition = "2021" | ||||
|  | ||||
| [dependencies] | ||||
| # Core async runtime | ||||
| tokio = { version = "1", features = ["macros", "rt-multi-thread", "sync", "time"] } | ||||
| async-trait = "0.1" | ||||
| futures = "0.3" | ||||
| futures-util = "0.3" | ||||
|  | ||||
| # Serialization | ||||
| serde = { version = "1.0", features = ["derive"] } | ||||
| serde_json = "1.0" | ||||
|  | ||||
| # Error handling | ||||
| thiserror = "1.0" | ||||
|  | ||||
| # Collections | ||||
| uuid = { version = "1.6", features = ["v4", "serde"] } | ||||
|  | ||||
| # Time handling | ||||
| chrono = { version = "0.4", features = ["serde"] } | ||||
|  | ||||
| # HTTP client | ||||
| reqwest = { version = "0.11", features = ["json"] } | ||||
|  | ||||
| # WebSocket client | ||||
| tokio-tungstenite = "0.20" | ||||
|  | ||||
| # Rhai scripting | ||||
| rhai = "1.21.0" | ||||
|  | ||||
| # Database and models | ||||
| heromodels = { path = "/Users/timurgordon/code/git.ourworld.tf/herocode/db/heromodels" } | ||||
| heromodels_core = { path = "/Users/timurgordon/code/git.ourworld.tf/herocode/db/heromodels_core" } | ||||
|  | ||||
| # DSL integration for flow models | ||||
| rhailib_dsl = { path = "../dsl" } | ||||
|  | ||||
| # Dispatcher integration | ||||
| rhai_dispatcher = { path = "../dispatcher" } | ||||
|  | ||||
| # Logging | ||||
| log = "0.4" | ||||
| tracing = "0.1" | ||||
| tracing-subscriber = "0.3" | ||||
|  | ||||
| [dev-dependencies] | ||||
| tokio-test = "0.4" | ||||
							
								
								
									
										320
									
								
								rhailib/_archive/orchestrator/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										320
									
								
								rhailib/_archive/orchestrator/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,320 @@ | ||||
| # Rationale for Orchestrator | ||||
|  | ||||
| We may have scripts that run asynchrounsly, depend on human input or depend on other scripts to complete. We want to be able to implement high-level workflows of rhai scripts.  | ||||
|  | ||||
| ## Design | ||||
|  | ||||
| Direct Acyclic Graphs (DAGs) are a natural fit for representing workflows.  | ||||
|  | ||||
| ## Requirements | ||||
|  | ||||
| 1. Uses Direct Acyclic Graphs (DAGs) to represent workflows. | ||||
| 2. Each step in the workflow defines the script to execute, the inputs to pass to it, and the outputs to expect from it.  | ||||
| 3. Simplicity: the output cases are binary (success or failure), and params inputted / outputted are simple key-value pairs. | ||||
| 4. Multiple steps can depend on the same step.  | ||||
| 5. Scripts are executed using [RhaiDispatcher](../dispatcher/README.md). | ||||
|  | ||||
| ## Architecture | ||||
|  | ||||
| The Orchestrator is a simple DAG-based workflow execution system that extends the heromodels flow structures to support workflows with dependencies and distributed script execution. | ||||
|  | ||||
| ### Core Component | ||||
|  | ||||
| ```mermaid | ||||
| graph TB | ||||
|     subgraph "Orchestrator" | ||||
|         O[Orchestrator] --> RE[RhaiExecutor Trait] | ||||
|         O --> DB[(Database)] | ||||
|     end | ||||
|      | ||||
|     subgraph "Executor Implementations" | ||||
|         RE --> RD[RhaiDispatcher] | ||||
|         RE --> WS[WebSocketClient] | ||||
|         RE --> HTTP[HttpClient] | ||||
|         RE --> LOCAL[LocalExecutor] | ||||
|     end | ||||
|      | ||||
|     subgraph "Data Models (heromodels)" | ||||
|         F[Flow] --> FS[FlowStep] | ||||
|         FS --> SR[SignatureRequirement] | ||||
|     end | ||||
|      | ||||
|     subgraph "Infrastructure" | ||||
|         RD --> RQ[Redis Queues] | ||||
|         RD --> W[Workers] | ||||
|         WS --> WSS[WebSocket Server] | ||||
|         HTTP --> API[REST API] | ||||
|     end | ||||
| ``` | ||||
|  | ||||
| ### Execution Abstraction | ||||
|  | ||||
| The orchestrator uses a trait-based approach for script execution, allowing different execution backends: | ||||
|  | ||||
| #### RhaiExecutor Trait | ||||
| ```rust | ||||
| use rhai_dispatcher::{PlayRequestBuilder, RhaiTaskDetails, RhaiDispatcherError}; | ||||
|  | ||||
| #[async_trait] | ||||
| pub trait RhaiExecutor { | ||||
|     async fn call(&self, request: PlayRequestBuilder<'_>) -> Result<RhaiTaskDetails, RhaiDispatcherError>; | ||||
| } | ||||
| ``` | ||||
|  | ||||
| #### Executor Implementations | ||||
|  | ||||
| **RhaiDispatcher Implementation:** | ||||
| ```rust | ||||
| pub struct DispatcherExecutor { | ||||
|     dispatcher: RhaiDispatcher, | ||||
| } | ||||
|  | ||||
| #[async_trait] | ||||
| impl RhaiExecutor for DispatcherExecutor { | ||||
|     async fn call(&self, request: PlayRequestBuilder<'_>) -> Result<RhaiTaskDetails, RhaiDispatcherError> { | ||||
|         // Use RhaiDispatcher to execute script via Redis queues | ||||
|         request.await_response().await | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| **WebSocket Client Implementation:** | ||||
| ```rust | ||||
| pub struct WebSocketExecutor { | ||||
|     ws_client: WebSocketClient, | ||||
|     endpoint: String, | ||||
| } | ||||
|  | ||||
| #[async_trait] | ||||
| impl RhaiExecutor for WebSocketExecutor { | ||||
|     async fn call(&self, request: PlayRequestBuilder<'_>) -> Result<RhaiTaskDetails, RhaiDispatcherError> { | ||||
|         // Build the PlayRequest and send via WebSocket | ||||
|         let play_request = request.build()?; | ||||
|          | ||||
|         // Send script execution request via WebSocket | ||||
|         let ws_message = serde_json::to_string(&play_request)?; | ||||
|         self.ws_client.send(ws_message).await?; | ||||
|          | ||||
|         // Wait for response and convert to RhaiTaskDetails | ||||
|         let response = self.ws_client.receive().await?; | ||||
|         serde_json::from_str(&response).map_err(RhaiDispatcherError::from) | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| **HTTP Client Implementation:** | ||||
| ```rust | ||||
| pub struct HttpExecutor { | ||||
|     http_client: reqwest::Client, | ||||
|     base_url: String, | ||||
| } | ||||
|  | ||||
| #[async_trait] | ||||
| impl RhaiExecutor for HttpExecutor { | ||||
|     async fn call(&self, request: PlayRequestBuilder<'_>) -> Result<RhaiTaskDetails, RhaiDispatcherError> { | ||||
|         // Build the PlayRequest and send via HTTP | ||||
|         let play_request = request.build()?; | ||||
|          | ||||
|         // Send script execution request via HTTP API | ||||
|         let response = self.http_client | ||||
|             .post(&format!("{}/execute", self.base_url)) | ||||
|             .json(&play_request) | ||||
|             .send() | ||||
|             .await?; | ||||
|              | ||||
|         response.json().await.map_err(RhaiDispatcherError::from) | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| **Local Executor Implementation:** | ||||
| ```rust | ||||
| pub struct LocalExecutor { | ||||
|     engine: Engine, | ||||
| } | ||||
|  | ||||
| #[async_trait] | ||||
| impl RhaiExecutor for LocalExecutor { | ||||
|     async fn call(&self, request: PlayRequestBuilder<'_>) -> Result<RhaiTaskDetails, RhaiDispatcherError> { | ||||
|         // Build the PlayRequest and execute locally | ||||
|         let play_request = request.build()?; | ||||
|          | ||||
|         // Execute script directly in local Rhai engine | ||||
|         let result = self.engine.eval::<String>(&play_request.script); | ||||
|          | ||||
|         // Convert to RhaiTaskDetails format | ||||
|         let task_details = RhaiTaskDetails { | ||||
|             task_id: play_request.id, | ||||
|             script: play_request.script, | ||||
|             status: if result.is_ok() { "completed".to_string() } else { "error".to_string() }, | ||||
|             output: result.ok(), | ||||
|             error: result.err().map(|e| e.to_string()), | ||||
|             created_at: chrono::Utc::now(), | ||||
|             updated_at: chrono::Utc::now(), | ||||
|             caller_id: "local".to_string(), | ||||
|             context_id: play_request.context_id, | ||||
|             worker_id: "local".to_string(), | ||||
|         }; | ||||
|          | ||||
|         Ok(task_details) | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ### Data Model Extensions | ||||
|  | ||||
| Simple extensions to the existing heromodels flow structures: | ||||
|  | ||||
| #### Enhanced FlowStep Model | ||||
| ```rust | ||||
| // Extends heromodels::models::flow::FlowStep | ||||
| pub struct FlowStep { | ||||
|     // ... existing heromodels::models::flow::FlowStep fields | ||||
|     pub script: String,                // Rhai script to execute | ||||
|     pub depends_on: Vec<u32>,          // IDs of steps this step depends on | ||||
|     pub context_id: String,            // Execution context (circle) | ||||
|     pub inputs: HashMap<String, String>, // Input parameters | ||||
|     pub outputs: HashMap<String, String>, // Output results | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ### Execution Flow | ||||
|  | ||||
| ```mermaid | ||||
| sequenceDiagram | ||||
|     participant Client as Client | ||||
|     participant O as Orchestrator | ||||
|     participant RE as RhaiExecutor | ||||
|     participant DB as Database | ||||
|      | ||||
|     Client->>O: Submit Flow | ||||
|     O->>DB: Store flow and steps | ||||
|     O->>O: Find steps with no dependencies | ||||
|      | ||||
|     loop Until all steps complete | ||||
|         O->>RE: Execute ready steps | ||||
|         RE-->>O: Return results | ||||
|         O->>DB: Update step status | ||||
|         O->>O: Find newly ready steps | ||||
|     end | ||||
|      | ||||
|     O->>Client: Flow completed | ||||
| ``` | ||||
|  | ||||
| ### Flexible Orchestrator Implementation | ||||
|  | ||||
| ```rust | ||||
| use rhai_dispatcher::{RhaiDispatcher, PlayRequestBuilder}; | ||||
| use std::collections::HashSet; | ||||
|  | ||||
| pub struct Orchestrator<E: RhaiExecutor> { | ||||
|     executor: E, | ||||
|     database: Arc<Database>, | ||||
| } | ||||
|  | ||||
| impl<E: RhaiExecutor> Orchestrator<E> { | ||||
|     pub fn new(executor: E, database: Arc<Database>) -> Self { | ||||
|         Self { executor, database } | ||||
|     } | ||||
|      | ||||
|     pub async fn execute_flow(&self, flow: Flow) -> Result<(), OrchestratorError> { | ||||
|         // 1. Store flow in database | ||||
|         self.database.collection::<Flow>()?.set(&flow)?; | ||||
|          | ||||
|         // 2. Find steps with no dependencies (depends_on is empty) | ||||
|         let mut pending_steps: Vec<FlowStep> = flow.steps.clone(); | ||||
|         let mut completed_steps: HashSet<u32> = HashSet::new(); | ||||
|          | ||||
|         while !pending_steps.is_empty() { | ||||
|             // Find ready steps (all dependencies completed) | ||||
|             let ready_steps: Vec<FlowStep> = pending_steps | ||||
|                 .iter() | ||||
|                 .filter(|step| { | ||||
|                     step.depends_on.iter().all(|dep_id| completed_steps.contains(dep_id)) | ||||
|                 }) | ||||
|                 .cloned() | ||||
|                 .collect(); | ||||
|              | ||||
|             if ready_steps.is_empty() { | ||||
|                 return Err(OrchestratorError::NoReadySteps); | ||||
|             } | ||||
|              | ||||
|             // Execute ready steps concurrently | ||||
|             let mut tasks = Vec::new(); | ||||
|             for step in ready_steps { | ||||
|                 let executor = &self.executor; | ||||
|                 let task = async move { | ||||
|                     // Create PlayRequestBuilder for this step | ||||
|                     let request = RhaiDispatcher::new_play_request() | ||||
|                         .script(&step.script) | ||||
|                         .context_id(&step.context_id) | ||||
|                         .worker_id(&step.worker_id); | ||||
|                      | ||||
|                     // Execute via the trait | ||||
|                     let result = executor.call(request).await?; | ||||
|                     Ok((step.base_data.id, result)) | ||||
|                 }; | ||||
|                 tasks.push(task); | ||||
|             } | ||||
|              | ||||
|             // Wait for all ready steps to complete | ||||
|             let results = futures::future::try_join_all(tasks).await?; | ||||
|              | ||||
|             // Update step status and mark as completed | ||||
|             for (step_id, task_details) in results { | ||||
|                 if task_details.status == "completed" { | ||||
|                     completed_steps.insert(step_id); | ||||
|                     // Update step status in database | ||||
|                     // self.update_step_status(step_id, "completed", task_details.output).await?; | ||||
|                 } else { | ||||
|                     return Err(OrchestratorError::StepFailed(step_id, task_details.error)); | ||||
|                 } | ||||
|             } | ||||
|              | ||||
|             // Remove completed steps from pending | ||||
|             pending_steps.retain(|step| !completed_steps.contains(&step.base_data.id)); | ||||
|         } | ||||
|          | ||||
|         Ok(()) | ||||
|     } | ||||
|      | ||||
|     pub async fn get_flow_status(&self, flow_id: u32) -> Result<FlowStatus, OrchestratorError> { | ||||
|         // Return current status of flow and all its steps | ||||
|         let flow = self.database.collection::<Flow>()?.get(flow_id)?; | ||||
|         // Implementation would check step statuses and return overall flow status | ||||
|         Ok(FlowStatus::Running) // Placeholder | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub enum OrchestratorError { | ||||
|     DatabaseError(String), | ||||
|     ExecutorError(RhaiDispatcherError), | ||||
|     NoReadySteps, | ||||
|     StepFailed(u32, Option<String>), | ||||
| } | ||||
|  | ||||
| pub enum FlowStatus { | ||||
|     Pending, | ||||
|     Running, | ||||
|     Completed, | ||||
|     Failed, | ||||
| } | ||||
|  | ||||
| // Usage examples: | ||||
| // let orchestrator = Orchestrator::new(DispatcherExecutor::new(dispatcher), db); | ||||
| // let orchestrator = Orchestrator::new(WebSocketExecutor::new(ws_client), db); | ||||
| // let orchestrator = Orchestrator::new(HttpExecutor::new(http_client), db); | ||||
| // let orchestrator = Orchestrator::new(LocalExecutor::new(engine), db); | ||||
| ``` | ||||
|  | ||||
| ### Key Features | ||||
|  | ||||
| 1. **DAG Validation**: Ensures no circular dependencies exist in the `depends_on` relationships | ||||
| 2. **Parallel Execution**: Executes independent steps concurrently via multiple workers | ||||
| 3. **Simple Dependencies**: Each step lists the step IDs it depends on | ||||
| 4. **RhaiDispatcher Integration**: Uses existing dispatcher for script execution | ||||
| 5. **Binary Outcomes**: Steps either succeed or fail (keeping it simple as per requirements) | ||||
|  | ||||
| This simple architecture provides DAG-based workflow execution while leveraging the existing rhailib infrastructure and keeping complexity minimal. | ||||
|  | ||||
|  | ||||
							
								
								
									
										283
									
								
								rhailib/_archive/orchestrator/examples/basic_workflow.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										283
									
								
								rhailib/_archive/orchestrator/examples/basic_workflow.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,283 @@ | ||||
| //! Basic workflow example demonstrating orchestrator usage | ||||
|  | ||||
| use orchestrator::{ | ||||
|     interface::LocalInterface, | ||||
|     orchestrator::Orchestrator, | ||||
|     OrchestratedFlow, OrchestratedFlowStep, FlowStatus, | ||||
| }; | ||||
| use std::sync::Arc; | ||||
| use std::collections::HashMap; | ||||
|  | ||||
| #[tokio::main] | ||||
| async fn main() -> Result<(), Box<dyn std::error::Error>> { | ||||
|     // Initialize logging | ||||
|     tracing_subscriber::fmt().init(); | ||||
|      | ||||
|     // Create executor | ||||
|     let executor = Arc::new(LocalInterface::new()); | ||||
|      | ||||
|     // Create orchestrator | ||||
|     let orchestrator = Orchestrator::new(executor); | ||||
|      | ||||
|     println!("🚀 Starting basic workflow example"); | ||||
|      | ||||
|     // Example 1: Simple sequential workflow | ||||
|     println!("\n📋 Example 1: Sequential Workflow"); | ||||
|     let sequential_flow = create_sequential_workflow(); | ||||
|     let flow_id = orchestrator.execute_flow(sequential_flow).await?; | ||||
|      | ||||
|     // Wait for completion and show results | ||||
|     wait_and_show_results(&orchestrator, flow_id, "Sequential").await; | ||||
|      | ||||
|     // Example 2: Parallel workflow with convergence | ||||
|     println!("\n📋 Example 2: Parallel Workflow"); | ||||
|     let parallel_flow = create_parallel_workflow(); | ||||
|     let flow_id = orchestrator.execute_flow(parallel_flow).await?; | ||||
|      | ||||
|     // Wait for completion and show results | ||||
|     wait_and_show_results(&orchestrator, flow_id, "Parallel").await; | ||||
|      | ||||
|     // Example 3: Complex workflow with multiple dependencies | ||||
|     println!("\n📋 Example 3: Complex Workflow"); | ||||
|     let complex_flow = create_complex_workflow(); | ||||
|     let flow_id = orchestrator.execute_flow(complex_flow).await?; | ||||
|      | ||||
|     // Wait for completion and show results | ||||
|     wait_and_show_results(&orchestrator, flow_id, "Complex").await; | ||||
|      | ||||
|     // Clean up completed flows | ||||
|     orchestrator.cleanup_completed_flows().await; | ||||
|      | ||||
|     println!("\n✅ All examples completed successfully!"); | ||||
|      | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| /// Create a simple sequential workflow | ||||
| fn create_sequential_workflow() -> OrchestratedFlow { | ||||
|     let step1 = OrchestratedFlowStep::new("data_preparation") | ||||
|         .script(r#" | ||||
|             let data = [1, 2, 3, 4, 5]; | ||||
|             let sum = 0; | ||||
|             for item in data { | ||||
|                 sum += item; | ||||
|             } | ||||
|             let result = sum; | ||||
|         "#) | ||||
|         .context_id("sequential_context") | ||||
|         .worker_id("worker_1"); | ||||
|      | ||||
|     let step2 = OrchestratedFlowStep::new("data_processing") | ||||
|         .script(r#" | ||||
|             let processed_data = dep_1_result * 2; | ||||
|             let result = processed_data; | ||||
|         "#) | ||||
|         .depends_on(step1.id()) | ||||
|         .context_id("sequential_context") | ||||
|         .worker_id("worker_2"); | ||||
|      | ||||
|     let step3 = OrchestratedFlowStep::new("data_output") | ||||
|         .script(r#" | ||||
|             let final_result = "Processed value: " + dep_2_result; | ||||
|             let result = final_result; | ||||
|         "#) | ||||
|         .depends_on(step2.id()) | ||||
|         .context_id("sequential_context") | ||||
|         .worker_id("worker_3"); | ||||
|      | ||||
|     OrchestratedFlow::new("sequential_workflow") | ||||
|         .add_step(step1) | ||||
|         .add_step(step2) | ||||
|         .add_step(step3) | ||||
| } | ||||
|  | ||||
| /// Create a parallel workflow with convergence | ||||
| fn create_parallel_workflow() -> OrchestratedFlow { | ||||
|     let step1 = OrchestratedFlowStep::new("fetch_user_data") | ||||
|         .script(r#" | ||||
|             let user_id = 12345; | ||||
|             let user_name = "Alice"; | ||||
|             let result = user_name; | ||||
|         "#) | ||||
|         .context_id("parallel_context") | ||||
|         .worker_id("user_service"); | ||||
|      | ||||
|     let step2 = OrchestratedFlowStep::new("fetch_order_data") | ||||
|         .script(r#" | ||||
|             let order_id = 67890; | ||||
|             let order_total = 99.99; | ||||
|             let result = order_total; | ||||
|         "#) | ||||
|         .context_id("parallel_context") | ||||
|         .worker_id("order_service"); | ||||
|      | ||||
|     let step3 = OrchestratedFlowStep::new("fetch_inventory_data") | ||||
|         .script(r#" | ||||
|             let product_id = "ABC123"; | ||||
|             let stock_count = 42; | ||||
|             let result = stock_count; | ||||
|         "#) | ||||
|         .context_id("parallel_context") | ||||
|         .worker_id("inventory_service"); | ||||
|      | ||||
|     let step4 = OrchestratedFlowStep::new("generate_report") | ||||
|         .script(r#" | ||||
|             let report = "User: " + dep_1_result +  | ||||
|                         ", Order Total: $" + dep_2_result +  | ||||
|                         ", Stock: " + dep_3_result + " units"; | ||||
|             let result = report; | ||||
|         "#) | ||||
|         .depends_on(step1.id()) | ||||
|         .depends_on(step2.id()) | ||||
|         .depends_on(step3.id()) | ||||
|         .context_id("parallel_context") | ||||
|         .worker_id("report_service"); | ||||
|      | ||||
|     OrchestratedFlow::new("parallel_workflow") | ||||
|         .add_step(step1) | ||||
|         .add_step(step2) | ||||
|         .add_step(step3) | ||||
|         .add_step(step4) | ||||
| } | ||||
|  | ||||
| /// Create a complex workflow with multiple dependency levels | ||||
| fn create_complex_workflow() -> OrchestratedFlow { | ||||
|     // Level 1: Initial data gathering | ||||
|     let step1 = OrchestratedFlowStep::new("load_config") | ||||
|         .script(r#" | ||||
|             let config = #{ | ||||
|                 api_url: "https://api.example.com", | ||||
|                 timeout: 30, | ||||
|                 retries: 3 | ||||
|             }; | ||||
|             let result = config.api_url; | ||||
|         "#) | ||||
|         .context_id("complex_context") | ||||
|         .worker_id("config_service"); | ||||
|      | ||||
|     let step2 = OrchestratedFlowStep::new("authenticate") | ||||
|         .script(r#" | ||||
|             let token = "auth_token_12345"; | ||||
|             let expires_in = 3600; | ||||
|             let result = token; | ||||
|         "#) | ||||
|         .context_id("complex_context") | ||||
|         .worker_id("auth_service"); | ||||
|      | ||||
|     // Level 2: Data fetching (depends on config and auth) | ||||
|     let step3 = OrchestratedFlowStep::new("fetch_customers") | ||||
|         .script(r#" | ||||
|             let api_url = dep_1_result; | ||||
|             let auth_token = dep_2_result; | ||||
|             let customers = ["Customer A", "Customer B", "Customer C"]; | ||||
|             let result = customers.len(); | ||||
|         "#) | ||||
|         .depends_on(step1.id()) | ||||
|         .depends_on(step2.id()) | ||||
|         .context_id("complex_context") | ||||
|         .worker_id("customer_service"); | ||||
|      | ||||
|     let step4 = OrchestratedFlowStep::new("fetch_products") | ||||
|         .script(r#" | ||||
|             let api_url = dep_1_result; | ||||
|             let auth_token = dep_2_result; | ||||
|             let products = ["Product X", "Product Y", "Product Z"]; | ||||
|             let result = products.len(); | ||||
|         "#) | ||||
|         .depends_on(step1.id()) | ||||
|         .depends_on(step2.id()) | ||||
|         .context_id("complex_context") | ||||
|         .worker_id("product_service"); | ||||
|      | ||||
|     // Level 3: Data processing (depends on fetched data) | ||||
|     let step5 = OrchestratedFlowStep::new("calculate_metrics") | ||||
|         .script(r#" | ||||
|             let customer_count = dep_3_result; | ||||
|             let product_count = dep_4_result; | ||||
|             let ratio = customer_count / product_count; | ||||
|             let result = ratio; | ||||
|         "#) | ||||
|         .depends_on(step3.id()) | ||||
|         .depends_on(step4.id()) | ||||
|         .context_id("complex_context") | ||||
|         .worker_id("analytics_service"); | ||||
|      | ||||
|     // Level 4: Final reporting | ||||
|     let step6 = OrchestratedFlowStep::new("generate_dashboard") | ||||
|         .script(r#" | ||||
|             let customer_count = dep_3_result; | ||||
|             let product_count = dep_4_result; | ||||
|             let ratio = dep_5_result; | ||||
|             let dashboard = "Dashboard: " + customer_count + " customers, " +  | ||||
|                            product_count + " products, ratio: " + ratio; | ||||
|             let result = dashboard; | ||||
|         "#) | ||||
|         .depends_on(step3.id()) | ||||
|         .depends_on(step4.id()) | ||||
|         .depends_on(step5.id()) | ||||
|         .context_id("complex_context") | ||||
|         .worker_id("dashboard_service"); | ||||
|      | ||||
|     OrchestratedFlow::new("complex_workflow") | ||||
|         .add_step(step1) | ||||
|         .add_step(step2) | ||||
|         .add_step(step3) | ||||
|         .add_step(step4) | ||||
|         .add_step(step5) | ||||
|         .add_step(step6) | ||||
| } | ||||
|  | ||||
| /// Wait for flow completion and show results | ||||
| async fn wait_and_show_results( | ||||
|     orchestrator: &Orchestrator<LocalInterface>, | ||||
|     flow_id: u32, | ||||
|     workflow_name: &str, | ||||
| ) { | ||||
|     println!("  ⏳ Executing {} workflow (ID: {})...", workflow_name, flow_id); | ||||
|      | ||||
|     // Poll for completion | ||||
|     loop { | ||||
|         tokio::time::sleep(tokio::time::Duration::from_millis(50)).await; | ||||
|          | ||||
|         if let Some(execution) = orchestrator.get_flow_status(flow_id).await { | ||||
|             match execution.status { | ||||
|                 FlowStatus::Completed => { | ||||
|                     println!("  ✅ {} workflow completed successfully!", workflow_name); | ||||
|                     println!("     📊 Executed {} steps in {:?}",  | ||||
|                             execution.completed_steps.len(), | ||||
|                             execution.completed_at.unwrap() - execution.started_at); | ||||
|                      | ||||
|                     // Show step results | ||||
|                     for (step_id, outputs) in &execution.step_results { | ||||
|                         if let Some(result) = outputs.get("result") { | ||||
|                             let step_name = execution.flow.orchestrated_steps | ||||
|                                 .iter() | ||||
|                                 .find(|s| s.id() == *step_id) | ||||
|                                 .map(|s| s.flow_step.name.as_str()) | ||||
|                                 .unwrap_or("unknown"); | ||||
|                             println!("     📝 Step '{}': {}", step_name, result); | ||||
|                         } | ||||
|                     } | ||||
|                     break; | ||||
|                 } | ||||
|                 FlowStatus::Failed => { | ||||
|                     println!("  ❌ {} workflow failed!", workflow_name); | ||||
|                     if !execution.failed_steps.is_empty() { | ||||
|                         println!("     💥 Failed steps: {:?}", execution.failed_steps); | ||||
|                     } | ||||
|                     break; | ||||
|                 } | ||||
|                 FlowStatus::Running => { | ||||
|                     print!("."); | ||||
|                     std::io::Write::flush(&mut std::io::stdout()).unwrap(); | ||||
|                 } | ||||
|                 FlowStatus::Pending => { | ||||
|                     println!("  ⏸️  {} workflow is pending...", workflow_name); | ||||
|                 } | ||||
|             } | ||||
|         } else { | ||||
|             println!("  ❓ {} workflow not found!", workflow_name); | ||||
|             break; | ||||
|         } | ||||
|     } | ||||
| } | ||||
							
								
								
									
										61
									
								
								rhailib/_archive/orchestrator/src/interface/dispatcher.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								rhailib/_archive/orchestrator/src/interface/dispatcher.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | ||||
| //! Dispatcher interface implementation using RhaiDispatcher | ||||
|  | ||||
| use crate::RhaiInterface; | ||||
| use async_trait::async_trait; | ||||
| use rhai_dispatcher::{PlayRequest, RhaiDispatcher, RhaiDispatcherError}; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| /// Dispatcher-based interface using RhaiDispatcher | ||||
| pub struct DispatcherInterface { | ||||
|     dispatcher: Arc<RhaiDispatcher>, | ||||
| } | ||||
|  | ||||
| impl DispatcherInterface { | ||||
|     /// Create a new dispatcher interface | ||||
|     pub fn new(dispatcher: Arc<RhaiDispatcher>) -> Self { | ||||
|         Self { dispatcher } | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[async_trait] | ||||
| impl RhaiInterface for DispatcherInterface { | ||||
|     async fn submit_play_request(&self, play_request: &PlayRequest) -> Result<(), RhaiDispatcherError> { | ||||
|         self.dispatcher.submit_play_request(play_request).await | ||||
|     } | ||||
|      | ||||
|     async fn submit_play_request_and_await_result(&self, play_request: &PlayRequest) -> Result<String, RhaiDispatcherError> { | ||||
|         self.dispatcher.submit_play_request_and_await_result(play_request).await | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_dispatcher_interface_creation() { | ||||
|         // This test just verifies we can create the interface | ||||
|         // Note: Actual testing would require a properly configured RhaiDispatcher | ||||
|         // For now, we'll create a mock or skip the actual dispatcher creation | ||||
|          | ||||
|         // This is a placeholder test - adjust based on actual RhaiDispatcher constructor | ||||
|         // let dispatcher = Arc::new(RhaiDispatcher::new()); | ||||
|         // let interface = DispatcherInterface::new(dispatcher); | ||||
|          | ||||
|         // Just verify the test compiles for now | ||||
|         assert!(true); | ||||
|     } | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_dispatcher_interface_methods() { | ||||
|         // This test would verify the interface methods work correctly | ||||
|         // when a proper RhaiDispatcher is available | ||||
|          | ||||
|         let play_request = PlayRequest { | ||||
|             script: "let x = 5; x + 3".to_string(), | ||||
|         }; | ||||
|  | ||||
|         // Placeholder assertions - would test actual functionality with real dispatcher | ||||
|         assert_eq!(play_request.script, "let x = 5; x + 3"); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										111
									
								
								rhailib/_archive/orchestrator/src/interface/local.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								rhailib/_archive/orchestrator/src/interface/local.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,111 @@ | ||||
| //! Local interface implementation for in-process script execution | ||||
|  | ||||
| use crate::RhaiInterface; | ||||
| use async_trait::async_trait; | ||||
| use rhai_dispatcher::{PlayRequest, RhaiDispatcherError}; | ||||
|  | ||||
| /// Local interface for in-process script execution | ||||
| pub struct LocalInterface { | ||||
|     engine: rhai::Engine, | ||||
| } | ||||
|  | ||||
| impl LocalInterface { | ||||
|     /// Create a new local interface | ||||
|     pub fn new() -> Self { | ||||
|         let engine = rhai::Engine::new(); | ||||
|         Self { engine } | ||||
|     } | ||||
|      | ||||
|     /// Create a new local interface with custom engine | ||||
|     pub fn with_engine(engine: rhai::Engine) -> Self { | ||||
|         Self { engine } | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl Default for LocalInterface { | ||||
|     fn default() -> Self { | ||||
|         Self::new() | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[async_trait] | ||||
| impl RhaiInterface for LocalInterface { | ||||
|     async fn submit_play_request(&self, _play_request: &PlayRequest) -> Result<(), RhaiDispatcherError> { | ||||
|         // For local interface, fire-and-forget doesn't make much sense | ||||
|         // We'll just execute and ignore the result | ||||
|         let _ = self.submit_play_request_and_await_result(_play_request).await?; | ||||
|         Ok(()) | ||||
|     } | ||||
|      | ||||
|     async fn submit_play_request_and_await_result(&self, play_request: &PlayRequest) -> Result<String, RhaiDispatcherError> { | ||||
|         let mut scope = rhai::Scope::new(); | ||||
|          | ||||
|         // Execute the script | ||||
|         let result = self | ||||
|             .engine | ||||
|             .eval_with_scope::<rhai::Dynamic>(&mut scope, &play_request.script) | ||||
|             .map_err(|e| RhaiDispatcherError::TaskNotFound(format!("Script execution error: {}", e)))?; | ||||
|          | ||||
|         // Return the result as a string | ||||
|         if result.is_unit() { | ||||
|             Ok(String::new()) | ||||
|         } else { | ||||
|             Ok(result.to_string()) | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_local_interface_basic() { | ||||
|         let interface = LocalInterface::new(); | ||||
|         let play_request = PlayRequest { | ||||
|             script: "let x = 5; x + 3".to_string(), | ||||
|         }; | ||||
|  | ||||
|         let result = interface.submit_play_request_and_await_result(&play_request).await; | ||||
|         assert!(result.is_ok()); | ||||
|          | ||||
|         let output = result.unwrap(); | ||||
|         assert_eq!(output, "8"); | ||||
|     } | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_local_interface_fire_and_forget() { | ||||
|         let interface = LocalInterface::new(); | ||||
|         let play_request = PlayRequest { | ||||
|             script: "let x = 5; x + 3".to_string(), | ||||
|         }; | ||||
|  | ||||
|         let result = interface.submit_play_request(&play_request).await; | ||||
|         assert!(result.is_ok()); | ||||
|     } | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_local_interface_with_error() { | ||||
|         let interface = LocalInterface::new(); | ||||
|         let play_request = PlayRequest { | ||||
|             script: "invalid_syntax +++".to_string(), | ||||
|         }; | ||||
|  | ||||
|         let result = interface.submit_play_request_and_await_result(&play_request).await; | ||||
|         assert!(result.is_err()); | ||||
|     } | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_local_interface_empty_result() { | ||||
|         let interface = LocalInterface::new(); | ||||
|         let play_request = PlayRequest { | ||||
|             script: "let x = 42;".to_string(), | ||||
|         }; | ||||
|  | ||||
|         let result = interface.submit_play_request_and_await_result(&play_request).await; | ||||
|         assert!(result.is_ok()); | ||||
|          | ||||
|         let output = result.unwrap(); | ||||
|         assert_eq!(output, ""); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										9
									
								
								rhailib/_archive/orchestrator/src/interface/mod.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								rhailib/_archive/orchestrator/src/interface/mod.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| //! Interface implementations for different backends | ||||
|  | ||||
| pub mod local; | ||||
| pub mod ws; | ||||
| pub mod dispatcher; | ||||
|  | ||||
| pub use local::*; | ||||
| pub use ws::*; | ||||
| pub use dispatcher::*; | ||||
							
								
								
									
										117
									
								
								rhailib/_archive/orchestrator/src/interface/ws.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										117
									
								
								rhailib/_archive/orchestrator/src/interface/ws.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,117 @@ | ||||
| //! WebSocket interface implementation for remote script execution | ||||
|  | ||||
| use crate::RhaiInterface; | ||||
| use async_trait::async_trait; | ||||
| use rhai_dispatcher::{PlayRequest, RhaiDispatcherError}; | ||||
| use reqwest::Client; | ||||
| use serde_json::json; | ||||
|  | ||||
| /// WebSocket-based interface for remote script execution | ||||
| pub struct WsInterface { | ||||
|     client: Client, | ||||
|     base_url: String, | ||||
| } | ||||
|  | ||||
| impl WsInterface { | ||||
|     /// Create a new WebSocket interface | ||||
|     pub fn new(base_url: String) -> Self { | ||||
|         Self { | ||||
|             client: Client::new(), | ||||
|             base_url, | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[async_trait] | ||||
| impl RhaiInterface for WsInterface { | ||||
|     async fn submit_play_request(&self, play_request: &PlayRequest) -> Result<(), RhaiDispatcherError> { | ||||
|         let payload = json!({ | ||||
|             "script": play_request.script | ||||
|         }); | ||||
|  | ||||
|         let response = self | ||||
|             .client | ||||
|             .post(&format!("{}/submit", self.base_url)) | ||||
|             .json(&payload) | ||||
|             .send() | ||||
|             .await | ||||
|             .map_err(|e| RhaiDispatcherError::TaskNotFound(format!("Network error: {}", e)))?; | ||||
|  | ||||
|         if response.status().is_success() { | ||||
|             Ok(()) | ||||
|         } else { | ||||
|             let error_text = response | ||||
|                 .text() | ||||
|                 .await | ||||
|                 .unwrap_or_else(|_| "Unknown error".to_string()); | ||||
|             Err(RhaiDispatcherError::TaskNotFound(format!("HTTP error: {}", error_text))) | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     async fn submit_play_request_and_await_result(&self, play_request: &PlayRequest) -> Result<String, RhaiDispatcherError> { | ||||
|         let payload = json!({ | ||||
|             "script": play_request.script | ||||
|         }); | ||||
|  | ||||
|         let response = self | ||||
|             .client | ||||
|             .post(&format!("{}/execute", self.base_url)) | ||||
|             .json(&payload) | ||||
|             .send() | ||||
|             .await | ||||
|             .map_err(|e| RhaiDispatcherError::TaskNotFound(format!("Network error: {}", e)))?; | ||||
|  | ||||
|         if response.status().is_success() { | ||||
|             let result: String = response | ||||
|                 .text() | ||||
|                 .await | ||||
|                 .map_err(|e| RhaiDispatcherError::TaskNotFound(format!("Response parsing error: {}", e)))?; | ||||
|             Ok(result) | ||||
|         } else { | ||||
|             let error_text = response | ||||
|                 .text() | ||||
|                 .await | ||||
|                 .unwrap_or_else(|_| "Unknown error".to_string()); | ||||
|             Err(RhaiDispatcherError::TaskNotFound(format!("HTTP error: {}", error_text))) | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|  | ||||
|     #[test] | ||||
|     fn test_ws_interface_creation() { | ||||
|         let interface = WsInterface::new("http://localhost:8080".to_string()); | ||||
|         assert_eq!(interface.base_url, "http://localhost:8080"); | ||||
|     } | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_ws_interface_call_with_mock_server() { | ||||
|         // This test would require a mock HTTP server | ||||
|         // For now, just test that we can create the interface | ||||
|         let interface = WsInterface::new("http://localhost:8080".to_string()); | ||||
|          | ||||
|         let play_request = PlayRequest { | ||||
|             script: "let x = 1;".to_string(), | ||||
|         }; | ||||
|          | ||||
|         // This will fail without a real server, but that's expected in unit tests | ||||
|         let result = interface.submit_play_request_and_await_result(&play_request).await; | ||||
|         assert!(result.is_err()); // Expected to fail without server | ||||
|     } | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_ws_interface_fire_and_forget() { | ||||
|         let interface = WsInterface::new("http://localhost:8080".to_string()); | ||||
|          | ||||
|         let play_request = PlayRequest { | ||||
|             script: "let x = 1;".to_string(), | ||||
|         }; | ||||
|          | ||||
|         // This will fail without a real server, but that's expected in unit tests | ||||
|         let result = interface.submit_play_request(&play_request).await; | ||||
|         assert!(result.is_err()); // Expected to fail without server | ||||
|     } | ||||
| } | ||||
							
								
								
									
										35
									
								
								rhailib/_archive/orchestrator/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								rhailib/_archive/orchestrator/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| //! # Orchestrator | ||||
| //! | ||||
| //! A simple DAG-based workflow execution system that extends the heromodels flow structures | ||||
| //! to support workflows with dependencies and distributed script execution. | ||||
|  | ||||
| use async_trait::async_trait; | ||||
| use rhai_dispatcher::{PlayRequest, RhaiDispatcherError}; | ||||
|  | ||||
| pub mod interface; | ||||
| pub mod orchestrator; | ||||
|  | ||||
| pub use interface::*; | ||||
| pub use orchestrator::*; | ||||
|  | ||||
| /// Trait for executing Rhai scripts through different backends | ||||
| /// Uses the same signature as RhaiDispatcher for consistency | ||||
| #[async_trait] | ||||
| pub trait RhaiInterface { | ||||
|     /// Submit a play request without waiting for result (fire-and-forget) | ||||
|     async fn submit_play_request(&self, play_request: &PlayRequest) -> Result<(), RhaiDispatcherError>; | ||||
|      | ||||
|     /// Submit a play request and await the result | ||||
|     /// Returns just the output string on success | ||||
|     async fn submit_play_request_and_await_result(&self, play_request: &PlayRequest) -> Result<String, RhaiDispatcherError>; | ||||
| } | ||||
|  | ||||
| // Re-export the flow models from DSL | ||||
| pub use rhailib_dsl::flow::{OrchestratedFlow, OrchestratedFlowStep, OrchestratorError, FlowStatus}; | ||||
|  | ||||
| // Conversion from RhaiDispatcherError to OrchestratorError | ||||
| impl From<RhaiDispatcherError> for OrchestratorError { | ||||
|     fn from(err: RhaiDispatcherError) -> Self { | ||||
|         OrchestratorError::ExecutorError(err.to_string()) | ||||
|     } | ||||
| } | ||||
							
								
								
									
										418
									
								
								rhailib/_archive/orchestrator/src/orchestrator.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										418
									
								
								rhailib/_archive/orchestrator/src/orchestrator.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,418 @@ | ||||
| //! Main orchestrator implementation for DAG-based workflow execution | ||||
|  | ||||
| use crate::{ | ||||
|     OrchestratedFlow, OrchestratedFlowStep, OrchestratorError, FlowStatus, RhaiInterface, | ||||
| }; | ||||
| use rhai_dispatcher::PlayRequest; | ||||
| use futures::future::try_join_all; | ||||
| use std::collections::{HashMap, HashSet}; | ||||
| use std::sync::Arc; | ||||
| use tokio::sync::RwLock; | ||||
| use tracing::{debug, error, info, warn}; | ||||
|  | ||||
| /// Main orchestrator for executing DAG-based workflows | ||||
| pub struct Orchestrator<I: RhaiInterface> { | ||||
|     /// Interface for running scripts | ||||
|     interface: Arc<I>, | ||||
|      | ||||
|     /// Active flow executions | ||||
|     active_flows: Arc<RwLock<HashMap<u32, FlowExecution>>>, | ||||
| } | ||||
|  | ||||
| /// Represents an active flow execution | ||||
| #[derive(Debug, Clone)] | ||||
| pub struct FlowExecution { | ||||
|     /// The flow being executed | ||||
|     pub flow: OrchestratedFlow, | ||||
|      | ||||
|     /// Current status | ||||
|     pub status: FlowStatus, | ||||
|      | ||||
|     /// Completed step IDs | ||||
|     pub completed_steps: HashSet<u32>, | ||||
|      | ||||
|     /// Failed step IDs | ||||
|     pub failed_steps: HashSet<u32>, | ||||
|      | ||||
|     /// Step results | ||||
|     pub step_results: HashMap<u32, HashMap<String, String>>, | ||||
|      | ||||
|     /// Execution start time | ||||
|     pub started_at: chrono::DateTime<chrono::Utc>, | ||||
|      | ||||
|     /// Execution end time | ||||
|     pub completed_at: Option<chrono::DateTime<chrono::Utc>>, | ||||
| } | ||||
|  | ||||
| impl FlowExecution { | ||||
|     /// Create a new flow execution | ||||
|     pub fn new(flow: OrchestratedFlow) -> Self { | ||||
|         Self { | ||||
|             flow, | ||||
|             status: FlowStatus::Pending, | ||||
|             completed_steps: HashSet::new(), | ||||
|             failed_steps: HashSet::new(), | ||||
|             step_results: HashMap::new(), | ||||
|             started_at: chrono::Utc::now(), | ||||
|             completed_at: None, | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// Check if a step is ready to execute (all dependencies completed) | ||||
|     pub fn is_step_ready(&self, step: &OrchestratedFlowStep) -> bool { | ||||
|         if self.completed_steps.contains(&step.id()) || self.failed_steps.contains(&step.id()) { | ||||
|             return false; | ||||
|         } | ||||
|          | ||||
|         step.depends_on.iter().all(|dep_id| self.completed_steps.contains(dep_id)) | ||||
|     } | ||||
|      | ||||
|     /// Get all ready steps | ||||
|     pub fn get_ready_steps(&self) -> Vec<&OrchestratedFlowStep> { | ||||
|         self.flow | ||||
|             .orchestrated_steps | ||||
|             .iter() | ||||
|             .filter(|step| self.is_step_ready(step)) | ||||
|             .collect() | ||||
|     } | ||||
|      | ||||
|     /// Mark a step as completed | ||||
|     pub fn complete_step(&mut self, step_id: u32, outputs: HashMap<String, String>) { | ||||
|         self.completed_steps.insert(step_id); | ||||
|         self.step_results.insert(step_id, outputs); | ||||
|          | ||||
|         // Check if flow is complete | ||||
|         if self.completed_steps.len() == self.flow.orchestrated_steps.len() { | ||||
|             self.status = FlowStatus::Completed; | ||||
|             self.completed_at = Some(chrono::Utc::now()); | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// Mark a step as failed | ||||
|     pub fn fail_step(&mut self, step_id: u32) { | ||||
|         self.failed_steps.insert(step_id); | ||||
|         self.status = FlowStatus::Failed; | ||||
|         self.completed_at = Some(chrono::Utc::now()); | ||||
|     } | ||||
|      | ||||
|     /// Check if the flow execution is finished | ||||
|     pub fn is_finished(&self) -> bool { | ||||
|         matches!(self.status, FlowStatus::Completed | FlowStatus::Failed) | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl<I: RhaiInterface + Send + Sync + 'static> Orchestrator<I> { | ||||
|     /// Create a new orchestrator | ||||
|     pub fn new(interface: Arc<I>) -> Self { | ||||
|         Self { | ||||
|             interface, | ||||
|             active_flows: Arc::new(RwLock::new(HashMap::new())), | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// Start executing a flow | ||||
|     pub async fn execute_flow(&self, flow: OrchestratedFlow) -> Result<u32, OrchestratorError> { | ||||
|         let flow_id = flow.id(); | ||||
|         flow.validate_dag()?; | ||||
|          | ||||
|         info!("Starting execution of flow {} with {} steps", flow_id, flow.orchestrated_steps.len()); | ||||
|          | ||||
|         // Create flow execution | ||||
|         let mut execution = FlowExecution::new(flow); | ||||
|         execution.status = FlowStatus::Running; | ||||
|          | ||||
|         // Store the execution | ||||
|         { | ||||
|             let mut active_flows = self.active_flows.write().await; | ||||
|             active_flows.insert(flow_id, execution); | ||||
|         } | ||||
|          | ||||
|         // Start execution in background | ||||
|         let orchestrator = self.clone(); | ||||
|         tokio::spawn(async move { | ||||
|             if let Err(e) = orchestrator.execute_flow_steps(flow_id).await { | ||||
|                 error!("Flow {} execution failed: {}", flow_id, e); | ||||
|                  | ||||
|                 // Mark flow as failed | ||||
|                 let mut active_flows = orchestrator.active_flows.write().await; | ||||
|                 if let Some(execution) = active_flows.get_mut(&flow_id) { | ||||
|                     execution.status = FlowStatus::Failed; | ||||
|                     execution.completed_at = Some(chrono::Utc::now()); | ||||
|                 } | ||||
|             } | ||||
|         }); | ||||
|          | ||||
|         Ok(flow_id) | ||||
|     } | ||||
|      | ||||
|     /// Execute flow steps using DAG traversal | ||||
|     async fn execute_flow_steps(&self, flow_id: u32) -> Result<(), OrchestratorError> { | ||||
|         loop { | ||||
|             let ready_steps = { | ||||
|                 let active_flows = self.active_flows.read().await; | ||||
|                 let execution = active_flows | ||||
|                     .get(&flow_id) | ||||
|                     .ok_or(OrchestratorError::StepNotFound(flow_id))?; | ||||
|                  | ||||
|                 if execution.is_finished() { | ||||
|                     info!("Flow {} execution completed with status: {:?}", flow_id, execution.status); | ||||
|                     return Ok(()); | ||||
|                 } | ||||
|                  | ||||
|                 execution.get_ready_steps().into_iter().cloned().collect::<Vec<_>>() | ||||
|             }; | ||||
|              | ||||
|             if ready_steps.is_empty() { | ||||
|                 // Check if we're deadlocked | ||||
|                 let active_flows = self.active_flows.read().await; | ||||
|                 let execution = active_flows | ||||
|                     .get(&flow_id) | ||||
|                     .ok_or(OrchestratorError::StepNotFound(flow_id))?; | ||||
|                  | ||||
|                 if !execution.is_finished() { | ||||
|                     warn!("No ready steps found for flow {} - possible deadlock", flow_id); | ||||
|                     return Err(OrchestratorError::NoReadySteps); | ||||
|                 } | ||||
|                  | ||||
|                 return Ok(()); | ||||
|             } | ||||
|              | ||||
|             debug!("Executing {} ready steps for flow {}", ready_steps.len(), flow_id); | ||||
|              | ||||
|             // Execute ready steps concurrently | ||||
|             let step_futures = ready_steps.into_iter().map(|step| { | ||||
|                 let orchestrator = self.clone(); | ||||
|                 async move { | ||||
|                     orchestrator.execute_step(flow_id, step).await | ||||
|                 } | ||||
|             }); | ||||
|              | ||||
|             // Wait for all steps to complete | ||||
|             let results = try_join_all(step_futures).await?; | ||||
|              | ||||
|             // Update execution state | ||||
|             { | ||||
|                 let mut active_flows = self.active_flows.write().await; | ||||
|                 let execution = active_flows | ||||
|                     .get_mut(&flow_id) | ||||
|                     .ok_or(OrchestratorError::StepNotFound(flow_id))?; | ||||
|                  | ||||
|                 for (step_id, outputs) in results { | ||||
|                     execution.complete_step(step_id, outputs); | ||||
|                 } | ||||
|             } | ||||
|              | ||||
|             // Small delay to prevent tight loop | ||||
|             tokio::time::sleep(tokio::time::Duration::from_millis(10)).await; | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// Execute a single step | ||||
|     async fn execute_step( | ||||
|         &self, | ||||
|         flow_id: u32, | ||||
|         step: OrchestratedFlowStep, | ||||
|     ) -> Result<(u32, HashMap<String, String>), OrchestratorError> { | ||||
|         let step_id = step.id(); | ||||
|         info!("Executing step {} for flow {}", step_id, flow_id); | ||||
|          | ||||
|         // Prepare inputs with dependency outputs | ||||
|         let mut inputs = step.inputs.clone(); | ||||
|          | ||||
|         // Add outputs from dependency steps | ||||
|         { | ||||
|             let active_flows = self.active_flows.read().await; | ||||
|             let execution = active_flows | ||||
|                 .get(&flow_id) | ||||
|                 .ok_or(OrchestratorError::StepNotFound(flow_id))?; | ||||
|              | ||||
|             for dep_id in &step.depends_on { | ||||
|                 if let Some(dep_outputs) = execution.step_results.get(dep_id) { | ||||
|                     for (key, value) in dep_outputs { | ||||
|                         inputs.insert(format!("dep_{}_{}", dep_id, key), value.clone()); | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|          | ||||
|         // Create play request | ||||
|         let play_request = PlayRequest { | ||||
|             id: format!("{}_{}", flow_id, step_id), | ||||
|             worker_id: step.worker_id.clone(), | ||||
|             context_id: step.context_id.clone(), | ||||
|             script: step.script.clone(), | ||||
|             timeout: std::time::Duration::from_secs(30), // Default timeout | ||||
|         }; | ||||
|          | ||||
|         // Execute the script | ||||
|         match self.interface.submit_play_request_and_await_result(&play_request).await { | ||||
|             Ok(output) => { | ||||
|                 info!("Step {} completed successfully", step_id); | ||||
|                 let mut outputs = HashMap::new(); | ||||
|                 outputs.insert("result".to_string(), output); | ||||
|                 Ok((step_id, outputs)) | ||||
|             } | ||||
|             Err(e) => { | ||||
|                 error!("Step {} failed: {}", step_id, e); | ||||
|                  | ||||
|                 // Mark step as failed | ||||
|                 { | ||||
|                     let mut active_flows = self.active_flows.write().await; | ||||
|                     if let Some(execution) = active_flows.get_mut(&flow_id) { | ||||
|                         execution.fail_step(step_id); | ||||
|                     } | ||||
|                 } | ||||
|                  | ||||
|                 Err(OrchestratorError::StepFailed(step_id, Some(e.to_string()))) | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// Get the status of a flow execution | ||||
|     pub async fn get_flow_status(&self, flow_id: u32) -> Option<FlowExecution> { | ||||
|         let active_flows = self.active_flows.read().await; | ||||
|         active_flows.get(&flow_id).cloned() | ||||
|     } | ||||
|      | ||||
|     /// Cancel a flow execution | ||||
|     pub async fn cancel_flow(&self, flow_id: u32) -> Result<(), OrchestratorError> { | ||||
|         let mut active_flows = self.active_flows.write().await; | ||||
|         if let Some(execution) = active_flows.get_mut(&flow_id) { | ||||
|             execution.status = FlowStatus::Failed; | ||||
|             execution.completed_at = Some(chrono::Utc::now()); | ||||
|             info!("Flow {} cancelled", flow_id); | ||||
|             Ok(()) | ||||
|         } else { | ||||
|             Err(OrchestratorError::StepNotFound(flow_id)) | ||||
|         } | ||||
|     } | ||||
|      | ||||
|     /// List all active flows | ||||
|     pub async fn list_active_flows(&self) -> Vec<(u32, FlowStatus)> { | ||||
|         let active_flows = self.active_flows.read().await; | ||||
|         active_flows | ||||
|             .iter() | ||||
|             .map(|(id, execution)| (*id, execution.status.clone())) | ||||
|             .collect() | ||||
|     } | ||||
|      | ||||
|     /// Clean up completed flows | ||||
|     pub async fn cleanup_completed_flows(&self) { | ||||
|         let mut active_flows = self.active_flows.write().await; | ||||
|         active_flows.retain(|_, execution| !execution.is_finished()); | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl<I: RhaiInterface + Send + Sync> Clone for Orchestrator<I> { | ||||
|     fn clone(&self) -> Self { | ||||
|         Self { | ||||
|             interface: self.interface.clone(), | ||||
|             active_flows: self.active_flows.clone(), | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|     use crate::interface::LocalInterface; | ||||
|     use std::collections::HashMap; | ||||
|  | ||||
|     #[tokio::test] | ||||
|     async fn test_simple_flow_execution() { | ||||
|         let interface = Arc::new(LocalInterface::new()); | ||||
|         let orchestrator = Orchestrator::new(interface); | ||||
|          | ||||
|         // Create a simple flow with two steps | ||||
|         let step1 = OrchestratedFlowStep::new("step1") | ||||
|             .script("let result = 10;") | ||||
|             .context_id("test") | ||||
|             .worker_id("worker1"); | ||||
|              | ||||
|         let step2 = OrchestratedFlowStep::new("step2") | ||||
|             .script("let result = dep_1_result + 5;") | ||||
|             .depends_on(step1.id()) | ||||
|             .context_id("test") | ||||
|             .worker_id("worker1"); | ||||
|          | ||||
|         let flow = OrchestratedFlow::new("test_flow") | ||||
|             .add_step(step1) | ||||
|             .add_step(step2); | ||||
|          | ||||
|         // Execute the flow | ||||
|         let flow_id = orchestrator.execute_flow(flow).await.unwrap(); | ||||
|          | ||||
|         // Wait for completion | ||||
|         tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; | ||||
|          | ||||
|         let status = orchestrator.get_flow_status(flow_id).await.unwrap(); | ||||
|         assert_eq!(status.status, FlowStatus::Completed); | ||||
|         assert_eq!(status.completed_steps.len(), 2); | ||||
|     } | ||||
|      | ||||
|     #[tokio::test] | ||||
|     async fn test_parallel_execution() { | ||||
|         let interface = Arc::new(LocalInterface::new()); | ||||
|         let orchestrator = Orchestrator::new(interface); | ||||
|          | ||||
|         // Create a flow with parallel steps | ||||
|         let step1 = OrchestratedFlowStep::new("step1") | ||||
|             .script("let result = 10;") | ||||
|             .context_id("test") | ||||
|             .worker_id("worker1"); | ||||
|              | ||||
|         let step2 = OrchestratedFlowStep::new("step2") | ||||
|             .script("let result = 20;") | ||||
|             .context_id("test") | ||||
|             .worker_id("worker2"); | ||||
|              | ||||
|         let step3 = OrchestratedFlowStep::new("step3") | ||||
|             .script("let result = dep_1_result + dep_2_result;") | ||||
|             .depends_on(step1.id()) | ||||
|             .depends_on(step2.id()) | ||||
|             .context_id("test") | ||||
|             .worker_id("worker3"); | ||||
|          | ||||
|         let flow = OrchestratedFlow::new("parallel_flow") | ||||
|             .add_step(step1) | ||||
|             .add_step(step2) | ||||
|             .add_step(step3); | ||||
|          | ||||
|         // Execute the flow | ||||
|         let flow_id = orchestrator.execute_flow(flow).await.unwrap(); | ||||
|          | ||||
|         // Wait for completion | ||||
|         tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; | ||||
|          | ||||
|         let status = orchestrator.get_flow_status(flow_id).await.unwrap(); | ||||
|         assert_eq!(status.status, FlowStatus::Completed); | ||||
|         assert_eq!(status.completed_steps.len(), 3); | ||||
|     } | ||||
|      | ||||
|     #[test] | ||||
|     fn test_flow_execution_state() { | ||||
|         let step1 = OrchestratedFlowStep::new("step1").script("let x = 1;"); | ||||
|         let step2 = OrchestratedFlowStep::new("step2") | ||||
|             .script("let y = 2;") | ||||
|             .depends_on(step1.id()); | ||||
|          | ||||
|         let flow = OrchestratedFlow::new("test_flow") | ||||
|             .add_step(step1.clone()) | ||||
|             .add_step(step2.clone()); | ||||
|          | ||||
|         let mut execution = FlowExecution::new(flow); | ||||
|          | ||||
|         // Initially, only step1 should be ready | ||||
|         assert!(execution.is_step_ready(&step1)); | ||||
|         assert!(!execution.is_step_ready(&step2)); | ||||
|          | ||||
|         // After completing step1, step2 should be ready | ||||
|         execution.complete_step(step1.id(), HashMap::new()); | ||||
|         assert!(!execution.is_step_ready(&step1)); // Already completed | ||||
|         assert!(execution.is_step_ready(&step2)); | ||||
|          | ||||
|         // After completing step2, flow should be complete | ||||
|         execution.complete_step(step2.id(), HashMap::new()); | ||||
|         assert_eq!(execution.status, FlowStatus::Completed); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										42
									
								
								rhailib/_archive/orchestrator/src/services.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								rhailib/_archive/orchestrator/src/services.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,42 @@ | ||||
| //! Main orchestrator implementation for DAG-based workflow execution | ||||
|  | ||||
| use crate::{ | ||||
|     OrchestratedFlow, OrchestratedFlowStep, OrchestratorError, FlowStatus, RhaiInterface, ScriptRequest, | ||||
| }; | ||||
| use futures::future::try_join_all; | ||||
| use std::collections::{HashMap, HashSet}; | ||||
| use std::sync::Arc; | ||||
| use tokio::sync::RwLock; | ||||
| use tracing::{debug, error, info, warn}; | ||||
|  | ||||
| impl<I: RhaiInterface + Send + Sync + 'static> Orchestrator<I> { | ||||
|     /// Get a flow by ID | ||||
|     pub fn get_flow(&self, flow_id: u32) -> Result<OrchestratedFlow, OrchestratorError> { | ||||
|         self.interface | ||||
|             .new_play_request() | ||||
|             .script(format!("json_encode(get_flow({}))", flow_id)) | ||||
|             .submit_play_request_and_await_result() | ||||
|             .await | ||||
|             .map(|result| serde_json::from_str(&result).unwrap()) | ||||
|     } | ||||
|  | ||||
|     pub fn get_flows(&self) -> Result<Vec<OrchestratedFlow>, OrchestratorError> { | ||||
|         self.interface | ||||
|             .new_play_request() | ||||
|             .script("json_encode(get_flows())") | ||||
|             .submit_play_request_and_await_result() | ||||
|             .await | ||||
|             .map(|result| serde_json::from_str(&result).unwrap()) | ||||
|     } | ||||
|  | ||||
|     pub fn get_active_flows(&self) -> Result<Vec<OrchestratedFlow>, OrchestratorError> { | ||||
|         self.interface | ||||
|             .new_play_request() | ||||
|             .script("json_encode(get_flows())") | ||||
|             .submit_play_request_and_await_result() | ||||
|             .await | ||||
|             .map(|result| serde_json::from_str(&result).unwrap()) | ||||
|              | ||||
|     } | ||||
|  | ||||
| } | ||||
							
								
								
									
										2
									
								
								rhailib/_archive/worker/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								rhailib/_archive/worker/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| /target | ||||
| worker_rhai_temp_db | ||||
							
								
								
									
										29
									
								
								rhailib/_archive/worker/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								rhailib/_archive/worker/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| [package] | ||||
| name = "rhailib_worker" | ||||
| version = "0.1.0" | ||||
| edition = "2021" | ||||
|  | ||||
| [lib] | ||||
| name = "rhailib_worker" # Can be different from package name, or same | ||||
| path = "src/lib.rs" | ||||
|  | ||||
| [[bin]] | ||||
| name = "worker" | ||||
| path = "cmd/worker.rs" | ||||
|  | ||||
| # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | ||||
|  | ||||
| [dependencies] | ||||
| redis = { version = "0.25.0", features = ["tokio-comp"] } | ||||
| rhai = { version = "1.18.0", default-features = false, features = ["sync", "decimal", "std"] } # Added "decimal" for broader script support | ||||
| serde = { version = "1.0", features = ["derive"] } | ||||
| serde_json = "1.0" | ||||
| tokio = { version = "1", features = ["macros", "rt-multi-thread", "time"] } | ||||
| log = "0.4" | ||||
| env_logger = "0.10" | ||||
| clap = { version = "4.4", features = ["derive"] } | ||||
| uuid = { version = "1.6", features = ["v4", "serde"] } # Though task_id is string, uuid might be useful | ||||
| chrono = { version = "0.4", features = ["serde"] } | ||||
| rhai_dispatcher = { path = "../dispatcher" } | ||||
| rhailib_engine = { path = "../engine" } | ||||
| heromodels = { path = "../../../db/heromodels", features = ["rhai"] } | ||||
							
								
								
									
										75
									
								
								rhailib/_archive/worker/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										75
									
								
								rhailib/_archive/worker/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,75 @@ | ||||
| # Rhai Worker | ||||
|  | ||||
| The `rhai_worker` crate implements a standalone worker service that listens for Rhai script execution tasks from a Redis queue, executes them, and posts results back to Redis. It is designed to be spawned as a separate OS process by an orchestrator like the `launcher` crate. | ||||
|  | ||||
| ## Features | ||||
|  | ||||
| -   **Redis Queue Consumption**: Listens to a specific Redis list (acting as a task queue) for incoming task IDs. The queue is determined by the `--circle-public-key` argument. | ||||
| -   **Rhai Script Execution**: Executes Rhai scripts retrieved from Redis based on task IDs. | ||||
| -   **Task State Management**: Updates task status (`processing`, `completed`, `error`) and stores results in Redis hashes. | ||||
| -   **Script Scope Injection**: Automatically injects two important constants into the Rhai script's scope: | ||||
|     -   `CONTEXT_ID`: The public key of the worker's own circle. | ||||
|     -   `CALLER_ID`: The public key of the entity that requested the script execution. | ||||
| -   **Asynchronous Operations**: Built with `tokio` for non-blocking Redis communication. | ||||
| -   **Graceful Error Handling**: Captures errors during script execution and stores them for the client. | ||||
|  | ||||
| ## Core Components | ||||
|  | ||||
| -   **`worker_lib` (Library Crate)**: | ||||
|     -   **`Args`**: A struct (using `clap`) for parsing command-line arguments: `--redis-url` and `--circle-public-key`. | ||||
|     -   **`run_worker_loop(engine: Engine, args: Args)`**: The main asynchronous function that: | ||||
|         -   Connects to Redis. | ||||
|         -   Continuously polls the designated Redis queue (`rhai_tasks:<circle_public_key>`) using `BLPOP`. | ||||
|         -   Upon receiving a `task_id`, it fetches the task details from a Redis hash. | ||||
|         -   It injects `CALLER_ID` and `CONTEXT_ID` into the script's scope. | ||||
|         -   It executes the script and updates the task status in Redis with the output or error. | ||||
| -   **`worker` (Binary Crate - `cmd/worker.rs`)**: | ||||
|     -   The main executable entry point. It parses command-line arguments, initializes a Rhai engine, and invokes `run_worker_loop`. | ||||
|  | ||||
| ## How It Works | ||||
|  | ||||
| 1.  The worker executable is launched by an external process (e.g., `launcher`), which passes the required command-line arguments. | ||||
|     ```bash | ||||
|     # This is typically done programmatically by a parent process. | ||||
|     /path/to/worker --redis-url redis://127.0.0.1/ --circle-public-key 02...abc | ||||
|     ``` | ||||
| 2.  The `run_worker_loop` connects to Redis and starts listening to its designated task queue (e.g., `rhai_tasks:02...abc`). | ||||
| 3.  A `rhai_dispatcher` submits a task by pushing a `task_id` to this queue and storing the script and other details in a Redis hash. | ||||
| 4.  The worker's `BLPOP` command picks up the `task_id`. | ||||
| 5.  The worker retrieves the script from the corresponding `rhai_task_details:<task_id>` hash. | ||||
| 6.  It updates the task's status to "processing". | ||||
| 7.  The Rhai script is executed within a scope that contains both `CONTEXT_ID` and `CALLER_ID`. | ||||
| 8.  After execution, the status is updated to "completed" (with output) or "error" (with an error message). | ||||
| 9.  The worker then goes back to listening for the next task. | ||||
|  | ||||
| ## Prerequisites | ||||
|  | ||||
| -   A running Redis instance accessible by the worker. | ||||
| -   An orchestrator process (like `launcher`) to spawn the worker. | ||||
| -   A `rhai_dispatcher` (or another system) to populate the Redis queues. | ||||
|  | ||||
| ## Building and Running | ||||
|  | ||||
| The worker is intended to be built as a dependency and run by another program. | ||||
|  | ||||
| 1.  **Build the worker:** | ||||
|     ```bash | ||||
|     # From the root of the rhailib project | ||||
|     cargo build --package worker | ||||
|     ``` | ||||
|     The binary will be located at `target/debug/worker`. | ||||
|  | ||||
| 2.  **Running the worker:** | ||||
|     The worker is not typically run manually. The `launcher` crate is responsible for spawning it with the correct arguments. If you need to run it manually for testing, you must provide the required arguments: | ||||
|     ```bash | ||||
|     ./target/debug/worker --redis-url redis://127.0.0.1/ --circle-public-key <a_valid_hex_public_key> | ||||
|     ``` | ||||
|  | ||||
| ## Dependencies | ||||
|  | ||||
| Key dependencies include: | ||||
| -   `redis`: For asynchronous Redis communication. | ||||
| -   `rhai`: The Rhai script engine. | ||||
| -   `clap`: For command-line argument parsing. | ||||
| -   `tokio`: For the asynchronous runtime. | ||||
| -   `log`, `env_logger`: For logging. | ||||
							
								
								
									
										113
									
								
								rhailib/_archive/worker/cmd/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								rhailib/_archive/worker/cmd/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,113 @@ | ||||
| # Rhai Worker Binary | ||||
|  | ||||
| A command-line worker for executing Rhai scripts from Redis task queues. | ||||
|  | ||||
| ## Binary: `worker` | ||||
|  | ||||
| ### Installation | ||||
|  | ||||
| Build the binary: | ||||
| ```bash | ||||
| cargo build --bin worker --release | ||||
| ``` | ||||
|  | ||||
| ### Usage | ||||
|  | ||||
| ```bash | ||||
| # Basic usage - requires circle public key | ||||
| worker --circle-public-key <CIRCLE_PUBLIC_KEY> | ||||
|  | ||||
| # Custom Redis URL | ||||
| worker -c <CIRCLE_PUBLIC_KEY> --redis-url redis://localhost:6379/1 | ||||
|  | ||||
| # Custom worker ID and database path | ||||
| worker -c <CIRCLE_PUBLIC_KEY> --worker-id my_worker --db-path /tmp/worker_db | ||||
|  | ||||
| # Preserve tasks for debugging/benchmarking | ||||
| worker -c <CIRCLE_PUBLIC_KEY> --preserve-tasks | ||||
|  | ||||
| # Remove timestamps from logs | ||||
| worker -c <CIRCLE_PUBLIC_KEY> --no-timestamp | ||||
|  | ||||
| # Increase verbosity | ||||
| worker -c <CIRCLE_PUBLIC_KEY> -v    # Debug logging | ||||
| worker -c <CIRCLE_PUBLIC_KEY> -vv   # Full debug | ||||
| worker -c <CIRCLE_PUBLIC_KEY> -vvv  # Trace logging | ||||
| ``` | ||||
|  | ||||
| ### Command-Line Options | ||||
|  | ||||
| | Option | Short | Default | Description | | ||||
| |--------|-------|---------|-------------| | ||||
| | `--circle-public-key` | `-c` | **Required** | Circle public key to listen for tasks | | ||||
| | `--redis-url` | `-r` | `redis://localhost:6379` | Redis connection URL | | ||||
| | `--worker-id` | `-w` | `worker_1` | Unique worker identifier | | ||||
| | `--preserve-tasks` | | `false` | Preserve task details after completion | | ||||
| | `--db-path` | | `worker_rhai_temp_db` | Database path for Rhai engine | | ||||
| | `--no-timestamp` | | `false` | Remove timestamps from log output | | ||||
| | `--verbose` | `-v` | | Increase verbosity (stackable) | | ||||
|  | ||||
| ### Features | ||||
|  | ||||
| - **Task Queue Processing**: Listens to Redis queues for Rhai script execution tasks | ||||
| - **Performance Optimized**: Configured for maximum Rhai engine performance | ||||
| - **Graceful Shutdown**: Supports shutdown signals for clean termination | ||||
| - **Flexible Logging**: Configurable verbosity and timestamp control | ||||
| - **Database Integration**: Uses heromodels for data persistence | ||||
| - **Task Cleanup**: Optional task preservation for debugging/benchmarking | ||||
|  | ||||
| ### How It Works | ||||
|  | ||||
| 1. **Queue Listening**: Worker listens on Redis queue `rhailib:{circle_public_key}` | ||||
| 2. **Task Processing**: Receives task IDs, fetches task details from Redis | ||||
| 3. **Script Execution**: Executes Rhai scripts with configured engine | ||||
| 4. **Result Handling**: Updates task status and sends results to reply queues | ||||
| 5. **Cleanup**: Optionally cleans up task details after completion | ||||
|  | ||||
| ### Configuration Examples | ||||
|  | ||||
| #### Development Worker | ||||
| ```bash | ||||
| # Simple development worker | ||||
| worker -c dev_circle_123 | ||||
|  | ||||
| # Development with verbose logging (no timestamps) | ||||
| worker -c dev_circle_123 -v --no-timestamp | ||||
| ``` | ||||
|  | ||||
| #### Production Worker | ||||
| ```bash | ||||
| # Production worker with custom configuration | ||||
| worker \ | ||||
|   --circle-public-key prod_circle_456 \ | ||||
|   --redis-url redis://redis-server:6379/0 \ | ||||
|   --worker-id prod_worker_1 \ | ||||
|   --db-path /var/lib/worker/db \ | ||||
|   --preserve-tasks | ||||
| ``` | ||||
|  | ||||
| #### Benchmarking Worker | ||||
| ```bash | ||||
| # Worker optimized for benchmarking | ||||
| worker \ | ||||
|   --circle-public-key bench_circle_789 \ | ||||
|   --preserve-tasks \ | ||||
|   --no-timestamp \ | ||||
|   -vv | ||||
| ``` | ||||
|  | ||||
| ### Error Handling | ||||
|  | ||||
| The worker provides clear error messages for: | ||||
| - Missing or invalid circle public key | ||||
| - Redis connection failures | ||||
| - Script execution errors | ||||
| - Database access issues | ||||
|  | ||||
| ### Dependencies | ||||
|  | ||||
| - `rhailib_engine`: Rhai engine with heromodels integration | ||||
| - `redis`: Redis client for task queue management | ||||
| - `rhai`: Script execution engine | ||||
| - `clap`: Command-line argument parsing | ||||
| - `env_logger`: Logging infrastructure | ||||
							
								
								
									
										95
									
								
								rhailib/_archive/worker/cmd/worker.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										95
									
								
								rhailib/_archive/worker/cmd/worker.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,95 @@ | ||||
| use clap::Parser; | ||||
| use rhailib_engine::create_heromodels_engine; | ||||
| use rhailib_worker::spawn_rhai_worker; | ||||
| use tokio::sync::mpsc; | ||||
|  | ||||
| #[derive(Parser, Debug)] | ||||
| #[command(author, version, about, long_about = None)] | ||||
| struct Args { | ||||
|     /// Worker ID for identification | ||||
|     #[arg(short, long)] | ||||
|     worker_id: String, | ||||
|  | ||||
|     /// Redis URL | ||||
|     #[arg(short, long, default_value = "redis://localhost:6379")] | ||||
|     redis_url: String, | ||||
|  | ||||
|     /// Preserve task details after completion (for benchmarking) | ||||
|     #[arg(long, default_value = "false")] | ||||
|     preserve_tasks: bool, | ||||
|  | ||||
|     /// Root directory for engine database | ||||
|     #[arg(long, default_value = "worker_rhai_temp_db")] | ||||
|     db_path: String, | ||||
|  | ||||
|     /// Disable timestamps in log output | ||||
|     #[arg(long, help = "Remove timestamps from log output")] | ||||
|     no_timestamp: bool, | ||||
| } | ||||
|  | ||||
| #[tokio::main] | ||||
| async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> { | ||||
|     let args = Args::parse(); | ||||
|  | ||||
|     // Configure env_logger with or without timestamps | ||||
|     if args.no_timestamp { | ||||
|         env_logger::Builder::from_default_env() | ||||
|             .format_timestamp(None) | ||||
|             .init(); | ||||
|     } else { | ||||
|         env_logger::init(); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     log::info!("Rhai Worker (binary) starting with performance-optimized engine."); | ||||
|     log::info!( | ||||
|         "Worker ID: {}, Redis: {}", | ||||
|         args.worker_id, | ||||
|         args.redis_url | ||||
|     ); | ||||
|  | ||||
|     let mut engine = create_heromodels_engine(); | ||||
|  | ||||
|     // Performance optimizations for benchmarking | ||||
|     engine.set_max_operations(0); // Unlimited operations for performance testing | ||||
|     engine.set_max_expr_depths(0, 0); // Unlimited expression depth | ||||
|     engine.set_max_string_size(0); // Unlimited string size | ||||
|     engine.set_max_array_size(0); // Unlimited array size | ||||
|     engine.set_max_map_size(0); // Unlimited map size | ||||
|  | ||||
|     // Enable full optimization for maximum performance | ||||
|     engine.set_optimization_level(rhai::OptimizationLevel::Full); | ||||
|  | ||||
|     log::info!("Engine configured for maximum performance"); | ||||
|  | ||||
|     // Create shutdown channel (for graceful shutdown, though not used in benchmarks) | ||||
|     let (_shutdown_tx, shutdown_rx) = mpsc::channel::<()>(1); | ||||
|  | ||||
|     // Spawn the worker | ||||
|     let worker_handle = spawn_rhai_worker( | ||||
|         args.worker_id, | ||||
|         args.db_path, | ||||
|         engine, | ||||
|         args.redis_url, | ||||
|         shutdown_rx, | ||||
|         args.preserve_tasks, | ||||
|     ); | ||||
|  | ||||
|     // Wait for the worker to complete | ||||
|     match worker_handle.await { | ||||
|         Ok(result) => match result { | ||||
|             Ok(_) => { | ||||
|                 log::info!("Worker completed successfully"); | ||||
|                 Ok(()) | ||||
|             } | ||||
|             Err(e) => { | ||||
|                 log::error!("Worker failed: {}", e); | ||||
|                 Err(e) | ||||
|             } | ||||
|         }, | ||||
|         Err(e) => { | ||||
|             log::error!("Worker task panicked: {}", e); | ||||
|             Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>) | ||||
|         } | ||||
|     } | ||||
| } | ||||
							
								
								
									
										53
									
								
								rhailib/_archive/worker/docs/ARCHITECTURE.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								rhailib/_archive/worker/docs/ARCHITECTURE.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | ||||
| # Architecture of the `rhailib_worker` Crate | ||||
|  | ||||
| The `rhailib_worker` crate implements a distributed task execution system for Rhai scripts, providing scalable, reliable script processing through Redis-based task queues. Workers are decoupled from contexts, allowing a single worker to process tasks for multiple contexts (circles). | ||||
|  | ||||
| ## Core Architecture | ||||
|  | ||||
| ```mermaid | ||||
| graph TD | ||||
|     A[Worker Process] --> B[Task Queue Processing] | ||||
|     A --> C[Script Execution Engine] | ||||
|     A --> D[Result Management] | ||||
|      | ||||
|     B --> B1[Redis Queue Monitoring] | ||||
|     B --> B2[Task Deserialization] | ||||
|     B --> B3[Priority Handling] | ||||
|      | ||||
|     C --> C1[Rhai Engine Integration] | ||||
|     C --> C2[Context Management] | ||||
|     C --> C3[Error Handling] | ||||
|      | ||||
|     D --> D1[Result Serialization] | ||||
|     D --> D2[Reply Queue Management] | ||||
|     D --> D3[Status Updates] | ||||
| ``` | ||||
|  | ||||
| ## Key Components | ||||
|  | ||||
| ### Task Processing Pipeline | ||||
| - **Queue Monitoring**: Continuous Redis queue polling for new tasks | ||||
| - **Task Execution**: Secure Rhai script execution with proper context | ||||
| - **Result Handling**: Comprehensive result and error management | ||||
|  | ||||
| ### Engine Integration | ||||
| - **Rhailib Engine**: Full integration with rhailib_engine for DSL access | ||||
| - **Context Injection**: Proper authentication and database context setup | ||||
| - **Security**: Isolated execution environment with access controls | ||||
|  | ||||
| ### Scalability Features | ||||
| - **Horizontal Scaling**: Multiple worker instances for load distribution | ||||
| - **Queue-based Architecture**: Reliable task distribution via Redis | ||||
| - **Fault Tolerance**: Robust error handling and recovery mechanisms | ||||
|  | ||||
| ## Dependencies | ||||
|  | ||||
| - **Redis Integration**: Task queue management and communication | ||||
| - **Rhai Engine**: Script execution with full DSL capabilities | ||||
| - **Client Integration**: Shared data structures with rhai_dispatcher | ||||
| - **Heromodels**: Database and business logic integration | ||||
| - **Async Runtime**: Tokio for high-performance concurrent processing | ||||
|  | ||||
| ## Deployment Patterns | ||||
|  | ||||
| Workers can be deployed as standalone processes, containerized services, or embedded components, providing flexibility for various deployment scenarios from development to production. | ||||
							
								
								
									
										259
									
								
								rhailib/_archive/worker/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										259
									
								
								rhailib/_archive/worker/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,259 @@ | ||||
| use chrono::Utc; | ||||
| use log::{debug, error, info}; | ||||
| use redis::AsyncCommands; | ||||
| use rhai::{Dynamic, Engine}; | ||||
| use rhai_dispatcher::RhaiTaskDetails; // Import for constructing the reply message | ||||
| use serde_json; | ||||
| use std::collections::HashMap; | ||||
| use tokio::sync::mpsc; // For shutdown signal | ||||
| use tokio::task::JoinHandle; // For serializing the reply message | ||||
|  | ||||
| const NAMESPACE_PREFIX: &str = "rhailib:"; | ||||
| const BLPOP_TIMEOUT_SECONDS: usize = 5; | ||||
|  | ||||
| // This function updates specific fields in the Redis hash. | ||||
| // It doesn't need to know the full RhaiTaskDetails struct, only the field names. | ||||
| async fn update_task_status_in_redis( | ||||
|     conn: &mut redis::aio::MultiplexedConnection, | ||||
|     task_id: &str, | ||||
|     status: &str, | ||||
|     output: Option<String>, | ||||
|     error_msg: Option<String>, | ||||
| ) -> redis::RedisResult<()> { | ||||
|     let task_key = format!("{}{}", NAMESPACE_PREFIX, task_id); | ||||
|     let mut updates: Vec<(&str, String)> = vec![ | ||||
|         ("status", status.to_string()), | ||||
|         ("updatedAt", Utc::now().timestamp().to_string()), | ||||
|     ]; | ||||
|     if let Some(out) = output { | ||||
|         updates.push(("output", out)); | ||||
|     } | ||||
|     if let Some(err) = error_msg { | ||||
|         updates.push(("error", err)); | ||||
|     } | ||||
|     debug!( | ||||
|         "Updating task {} in Redis with status: {}, updates: {:?}", | ||||
|         task_id, status, updates | ||||
|     ); | ||||
|     conn.hset_multiple::<_, _, _, ()>(&task_key, &updates) | ||||
|         .await?; | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| pub fn spawn_rhai_worker( | ||||
|     worker_id: String, | ||||
|     db_path: String, | ||||
|     mut engine: Engine, | ||||
|     redis_url: String, | ||||
|     mut shutdown_rx: mpsc::Receiver<()>, // Add shutdown receiver | ||||
|     preserve_tasks: bool,                // Flag to control task cleanup | ||||
| ) -> JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>> { | ||||
|     tokio::spawn(async move { | ||||
|         let queue_key = format!("{}{}", NAMESPACE_PREFIX, worker_id); | ||||
|         info!( | ||||
|             "Rhai Worker for Worker ID '{}' starting. Connecting to Redis at {}. Listening on queue: {}. Waiting for tasks or shutdown signal.", | ||||
|             worker_id, redis_url, queue_key | ||||
|         ); | ||||
|  | ||||
|         let redis_client = match redis::Client::open(redis_url.as_str()) { | ||||
|             Ok(client) => client, | ||||
|             Err(e) => { | ||||
|                 error!( | ||||
|                     "Worker for Worker ID '{}': Failed to open Redis client: {}", | ||||
|                     worker_id, e | ||||
|                 ); | ||||
|                 return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>); | ||||
|             } | ||||
|         }; | ||||
|         let mut redis_conn = match redis_client.get_multiplexed_async_connection().await { | ||||
|             Ok(conn) => conn, | ||||
|             Err(e) => { | ||||
|                 error!( | ||||
|                     "Worker for Worker ID '{}': Failed to get Redis connection: {}", | ||||
|                     worker_id, e | ||||
|                 ); | ||||
|                 return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>); | ||||
|             } | ||||
|         }; | ||||
|         info!( | ||||
|             "Worker for Worker ID '{}' successfully connected to Redis.", | ||||
|             worker_id | ||||
|         ); | ||||
|  | ||||
|         loop { | ||||
|             let blpop_keys = vec![queue_key.clone()]; | ||||
|             tokio::select! { | ||||
|                 // Listen for shutdown signal | ||||
|                 _ = shutdown_rx.recv() => { | ||||
|                     info!("Worker for Worker ID '{}': Shutdown signal received. Terminating loop.", worker_id.clone()); | ||||
|                     break; | ||||
|                 } | ||||
|                 // Listen for tasks from Redis | ||||
|                 blpop_result = redis_conn.blpop(&blpop_keys, BLPOP_TIMEOUT_SECONDS as f64) => { | ||||
|                     debug!("Worker for Worker ID '{}': Attempting BLPOP on queue: {}", worker_id.clone(), queue_key); | ||||
|                     let response: Option<(String, String)> = match blpop_result { | ||||
|                         Ok(resp) => resp, | ||||
|                         Err(e) => { | ||||
|                             error!("Worker '{}': Redis BLPOP error on queue {}: {}. Worker for this circle might stop.", worker_id, queue_key, e); | ||||
|                             return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>); | ||||
|                         } | ||||
|                     }; | ||||
|  | ||||
|                     if let Some((_queue_name_recv, task_id)) = response { | ||||
|                         info!("Worker '{}' received task_id: {} from queue: {}", worker_id, task_id, _queue_name_recv); | ||||
|                         debug!("Worker '{}', Task {}: Processing started.", worker_id, task_id); | ||||
|  | ||||
|                         let task_details_key = format!("{}{}", NAMESPACE_PREFIX, task_id); | ||||
|                         debug!("Worker '{}', Task {}: Attempting HGETALL from key: {}", worker_id, task_id, task_details_key); | ||||
|  | ||||
|                         let task_details_map_result: Result<HashMap<String, String>, _> = | ||||
|                             redis_conn.hgetall(&task_details_key).await; | ||||
|  | ||||
|                         match task_details_map_result { | ||||
|                             Ok(details_map) => { | ||||
|                                 debug!("Worker '{}', Task {}: HGETALL successful. Details: {:?}", worker_id, task_id, details_map); | ||||
|                                 let script_content_opt = details_map.get("script").cloned(); | ||||
|                                 let created_at_str_opt = details_map.get("createdAt").cloned(); | ||||
|                                 let caller_id = details_map.get("callerId").cloned().expect("callerId field missing from Redis hash"); | ||||
|  | ||||
|                                 let context_id = details_map.get("contextId").cloned().expect("contextId field missing from Redis hash"); | ||||
|                                 if context_id.is_empty() { | ||||
|                                     error!("Worker '{}', Task {}: contextId field missing from Redis hash", worker_id, task_id); | ||||
|                                     return Err("contextId field missing from Redis hash".into()); | ||||
|                                 } | ||||
|                                 if caller_id.is_empty() { | ||||
|                                     error!("Worker '{}', Task {}: callerId field missing from Redis hash", worker_id, task_id); | ||||
|                                     return Err("callerId field missing from Redis hash".into()); | ||||
|                                 } | ||||
|  | ||||
|                                 if let Some(script_content) = script_content_opt { | ||||
|                                     info!("Worker '{}' processing task_id: {}. Script: {:.50}...", context_id, task_id, script_content); | ||||
|                                     debug!("Worker for Context ID '{}', Task {}: Attempting to update status to 'processing'.", context_id, task_id); | ||||
|                                     if let Err(e) = update_task_status_in_redis(&mut redis_conn, &task_id, "processing", None, None).await { | ||||
|                                         error!("Worker for Context ID '{}', Task {}: Failed to update status to 'processing': {}", context_id, task_id, e); | ||||
|                                     } else { | ||||
|                                         debug!("Worker for Context ID '{}', Task {}: Status updated to 'processing'.", context_id, task_id); | ||||
|                                     } | ||||
|  | ||||
|                                     let mut db_config = rhai::Map::new(); | ||||
|                                     db_config.insert("DB_PATH".into(), db_path.clone().into()); | ||||
|                                     db_config.insert("CALLER_ID".into(), caller_id.clone().into()); | ||||
|                                     db_config.insert("CONTEXT_ID".into(), context_id.clone().into()); | ||||
|                                     engine.set_default_tag(Dynamic::from(db_config)); // Or pass via CallFnOptions | ||||
|  | ||||
|                                     debug!("Worker for Context ID '{}', Task {}: Evaluating script with Rhai engine.", context_id, task_id); | ||||
|  | ||||
|                                     let mut final_status = "error".to_string(); // Default to error | ||||
|                                     let mut final_output: Option<String> = None; | ||||
|                                     let mut final_error_msg: Option<String> = None; | ||||
|  | ||||
|                                     match engine.eval::<rhai::Dynamic>(&script_content) { | ||||
|                                         Ok(result) => { | ||||
|                                             let output_str = if result.is::<String>() { | ||||
|                                                 // If the result is a string, we can unwrap it directly. | ||||
|                                                 // This moves `result`, which is fine because it's the last time we use it in this branch. | ||||
|                                                 result.into_string().unwrap() | ||||
|                                             } else { | ||||
|                                                 result.to_string() | ||||
|                                             }; | ||||
|                                         info!("Worker for Context ID '{}' task {} completed. Output: {}", context_id, task_id, output_str); | ||||
|                                         final_status = "completed".to_string(); | ||||
|                                         final_output = Some(output_str); | ||||
|                                     } | ||||
|                                         Err(e) => { | ||||
|                                             let error_str = format!("{:?}", *e); | ||||
|                                             error!("Worker for Context ID '{}' task {} script evaluation failed. Error: {}", context_id, task_id, error_str); | ||||
|                                             final_error_msg = Some(error_str); | ||||
|                                             // final_status remains "error" | ||||
|                                         } | ||||
|                                     } | ||||
|  | ||||
|                                     debug!("Worker for Context ID '{}', Task {}: Attempting to update status to '{}'.", context_id, task_id, final_status); | ||||
|                                     if let Err(e) = update_task_status_in_redis( | ||||
|                                         &mut redis_conn, | ||||
|                                         &task_id, | ||||
|                                         &final_status, | ||||
|                                         final_output.clone(), // Clone for task hash update | ||||
|                                         final_error_msg.clone(), // Clone for task hash update | ||||
|                                     ).await { | ||||
|                                         error!("Worker for Context ID '{}', Task {}: Failed to update final status to '{}': {}", context_id, task_id, final_status, e); | ||||
|                                     } else { | ||||
|                                         debug!("Worker for Context ID '{}', Task {}: Final status updated to '{}'.", context_id, task_id, final_status); | ||||
|                                     } | ||||
|  | ||||
|                                     // Send to reply queue if specified | ||||
|  | ||||
|                                     let created_at = created_at_str_opt | ||||
|                                         .and_then(|s| chrono::DateTime::parse_from_rfc3339(&s).ok()) | ||||
|                                         .map(|dt| dt.with_timezone(&Utc)) | ||||
|                                         .unwrap_or_else(Utc::now); // Fallback, though createdAt should exist | ||||
|  | ||||
|                                     let reply_details = RhaiTaskDetails { | ||||
|                                         task_id: task_id.to_string(), // Add the task_id | ||||
|                                         script: script_content.clone(), // Include script for context in reply | ||||
|                                         status: final_status, // The final status | ||||
|                                         output: final_output, // The final output | ||||
|                                         error: final_error_msg, // The final error | ||||
|                                         created_at, // Original creation time | ||||
|                                         updated_at: Utc::now(), // Time of this final update/reply | ||||
|                                         caller_id: caller_id.clone(), | ||||
|                                         context_id: context_id.clone(), | ||||
|                                         worker_id: worker_id.clone(), | ||||
|                                     }; | ||||
|                                     let reply_queue_key = format!("{}:reply:{}", NAMESPACE_PREFIX, task_id); | ||||
|                                     match serde_json::to_string(&reply_details) { | ||||
|                                         Ok(reply_json) => { | ||||
|                                             let lpush_result: redis::RedisResult<i64> = redis_conn.lpush(&reply_queue_key, &reply_json).await; | ||||
|                                             match lpush_result { | ||||
|                                                 Ok(_) => debug!("Worker for Context ID '{}', Task {}: Successfully sent result to reply queue {}", context_id, task_id, reply_queue_key), | ||||
|                                                 Err(e_lpush) => error!("Worker for Context ID '{}', Task {}: Failed to LPUSH result to reply queue {}: {}", context_id, task_id, reply_queue_key, e_lpush), | ||||
|                                             } | ||||
|                                         } | ||||
|                                         Err(e_json) => { | ||||
|                                             error!("Worker for Context ID '{}', Task {}: Failed to serialize reply details for queue {}: {}", context_id, task_id, reply_queue_key, e_json); | ||||
|                                         } | ||||
|                                     } | ||||
|                                     // Clean up task details based on preserve_tasks flag | ||||
|                                     if !preserve_tasks { | ||||
|                                         // The worker is responsible for cleaning up the task details hash. | ||||
|                                         if let Err(e) = redis_conn.del::<_, ()>(&task_details_key).await { | ||||
|                                             error!("Worker for Context ID '{}', Task {}: Failed to delete task details key '{}': {}", context_id, task_id, task_details_key, e); | ||||
|                                         } else { | ||||
|                                             debug!("Worker for Context ID '{}', Task {}: Cleaned up task details key '{}'.", context_id, task_id, task_details_key); | ||||
|                                         } | ||||
|                                     } else { | ||||
|                                         debug!("Worker for Context ID '{}', Task {}: Preserving task details (preserve_tasks=true)", context_id, task_id); | ||||
|                                     } | ||||
|                                 } else { // Script content not found in hash | ||||
|                                     error!( | ||||
|                                         "Worker for Context ID '{}', Task {}: Script content not found in Redis hash. Details map: {:?}", | ||||
|                                         context_id, task_id, details_map | ||||
|                                     ); | ||||
|                                     // Clean up invalid task details based on preserve_tasks flag | ||||
|                                     if !preserve_tasks { | ||||
|                                     // Even if the script is not found, the worker should clean up the invalid task hash. | ||||
|                                     if let Err(e) = redis_conn.del::<_, ()>(&task_details_key).await { | ||||
|                                         error!("Worker for Context ID '{}', Task {}: Failed to delete invalid task details key '{}': {}", context_id, task_id, task_details_key, e); | ||||
|                                     } | ||||
|                                 } else { | ||||
|                                     debug!("Worker for Context ID '{}', Task {}: Preserving invalid task details (preserve_tasks=true)", context_id, task_id); | ||||
|                                 } | ||||
|                             } | ||||
|                         } | ||||
|                     Err(e) => { | ||||
|                         error!( | ||||
|                             "Worker '{}', Task {}: Failed to fetch details (HGETALL) from Redis for key {}. Error: {:?}", | ||||
|                             worker_id, task_id, task_details_key, e | ||||
|                         ); | ||||
|                     } | ||||
|                 } | ||||
|                     } else { | ||||
|                         debug!("Worker '{}': BLPOP timed out on queue {}. No new tasks. Checking for shutdown signal again.", &worker_id, &queue_key); | ||||
|                     } | ||||
|                 } // End of blpop_result match | ||||
|             } // End of tokio::select! | ||||
|         } // End of loop | ||||
|         info!("Worker '{}' has shut down.", worker_id); | ||||
|         Ok(()) | ||||
|     }) | ||||
| } | ||||
		Reference in New Issue
	
	Block a user