move repos into monorepo

This commit is contained in:
Timur Gordon
2025-11-13 20:44:00 +01:00
commit 4b23e5eb7f
204 changed files with 33737 additions and 0 deletions

View File

@@ -0,0 +1,32 @@
[package]
name = "runner-osiris"
version.workspace = true
edition.workspace = true
description = "Osiris Runner - Database-backed runner"
license = "MIT OR Apache-2.0"
[[bin]]
name = "runner_osiris"
path = "src/main.rs"
[dependencies]
# Runner library
hero-runner = { path = "../../../lib/runner" }
hero-job = { path = "../../../lib/models/job" }
# Core dependencies
anyhow.workspace = true
tokio.workspace = true
log.workspace = true
env_logger.workspace = true
clap.workspace = true
# Rhai scripting
rhai = { version = "1.21.0", features = ["std", "sync", "serde"] }
# Osiris dependencies
osiris = { package = "osiris-core", path = "../../../lib/osiris/core" }
heromodels = { git = "https://git.ourworld.tf/herocode/db.git" }
heromodels_core = { git = "https://git.ourworld.tf/herocode/db.git" }
heromodels-derive = { git = "https://git.ourworld.tf/herocode/db.git" }
rhailib_dsl = { git = "https://git.ourworld.tf/herocode/rhailib.git" }

View File

@@ -0,0 +1,294 @@
/// OSIRIS Rhai Engine
///
/// Creates a Rhai engine configured with OSIRIS contexts and methods.
use osiris::context::OsirisContext;
use osiris::objects::note::rhai::register_note_functions;
use osiris::objects::event::rhai::register_event_functions;
use osiris::objects::heroledger::rhai::register_heroledger_modules;
use osiris::objects::kyc::rhai::register_kyc_modules;
use osiris::objects::flow::rhai::register_flow_modules;
use osiris::objects::communication::rhai::register_communication_modules;
use osiris::objects::money::rhai::register_money_modules;
use osiris::objects::legal::rhai::register_legal_modules;
use osiris::objects::supervisor::rhai::register_supervisor_modules;
use rhai::{Engine, def_package, FuncRegistration};
use rhai::packages::{Package, StandardPackage};
/// Register get_context function in a Rhai engine with signatory-based access control
///
/// Simple logic:
/// - Context is a list of public keys (participants)
/// - To get_context, at least one participant must be a signatory
/// - No state tracking, no caching - creates fresh context each time
pub fn register_context_api(engine: &mut rhai::Engine) {
// Register get_context function with signatory-based access control
// Usage: get_context(['pk1', 'pk2', 'pk3'])
engine.register_fn("get_context", move |context: rhai::NativeCallContext, participants: rhai::Array| -> Result<OsirisContext, Box<rhai::EvalAltResult>> {
// Extract SIGNATORIES from context tag
let tag_map = context
.tag()
.and_then(|tag| tag.read_lock::<rhai::Map>())
.ok_or_else(|| Box::new(rhai::EvalAltResult::ErrorRuntime("Context tag must be a Map.".into(), context.position())))?;
let signatories_dynamic = tag_map.get("SIGNATORIES")
.ok_or_else(|| Box::new(rhai::EvalAltResult::ErrorRuntime("'SIGNATORIES' not found in context tag Map.".into(), context.position())))?;
// Convert SIGNATORIES array to Vec<String>
let signatories_array = signatories_dynamic.clone().into_array()
.map_err(|e| Box::new(rhai::EvalAltResult::ErrorRuntime(format!("SIGNATORIES must be an array: {}", e).into(), context.position())))?;
let signatories: Vec<String> = signatories_array.into_iter()
.map(|s| s.into_string())
.collect::<Result<Vec<_>, _>>()
.map_err(|e| Box::new(rhai::EvalAltResult::ErrorRuntime(format!("SIGNATORIES must contain strings: {}", e).into(), context.position())))?;
// Convert participants array to Vec<String>
let participant_keys: Vec<String> = participants.into_iter()
.map(|p| p.into_string())
.collect::<Result<Vec<_>, _>>()
.map_err(|e| Box::new(rhai::EvalAltResult::ErrorRuntime(format!("Participants must be strings: {}", e).into(), context.position())))?;
// Verify at least one participant is a signatory
let has_signatory = participant_keys.iter().any(|p| signatories.contains(p));
if !has_signatory {
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("Access denied: none of the participants are signatories. Signatories: {}", signatories.join(", ")).into(),
context.position()
)));
}
// Create context directly with participants
OsirisContext::builder()
.participants(participant_keys)
.build()
.map_err(|e| format!("Failed to create context: {}", e).into())
});
}
// Define the OSIRIS package
def_package! {
/// OSIRIS package with all OSIRIS types and functions
pub OsirisPackage(module) : StandardPackage {
// Register OsirisContext type with all its methods
module.set_custom_type::<OsirisContext>("OsirisContext");
// Register OsirisContext methods
FuncRegistration::new("participants")
.set_into_module(module, |ctx: &mut OsirisContext| ctx.participants());
FuncRegistration::new("context_id")
.set_into_module(module, |ctx: &mut OsirisContext| ctx.context_id());
// Typed save methods - all named "save" for function overloading using generic save_object
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, note: osiris::objects::Note| ctx.save_object(note));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, event: osiris::objects::Event| ctx.save_object(event));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, user: osiris::objects::heroledger::user::User| ctx.save_object(user));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, group: osiris::objects::heroledger::group::Group| ctx.save_object(group));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, account: osiris::objects::heroledger::money::Account| ctx.save_object(account));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, zone: osiris::objects::heroledger::dnsrecord::DNSZone| ctx.save_object(zone));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, kyc_info: osiris::objects::KycInfo| ctx.save_object(kyc_info));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, kyc_session: osiris::objects::KycSession| ctx.save_object(kyc_session));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, flow_template: osiris::objects::FlowTemplate| ctx.save_object(flow_template));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, flow_instance: osiris::objects::FlowInstance| ctx.save_object(flow_instance));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, verification: osiris::objects::Verification| ctx.save_object(verification));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, email_client: osiris::objects::communication::email::EmailClient| ctx.save_object(email_client));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, mail_template: osiris::objects::communication::email::MailTemplate| ctx.save_object(mail_template));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, account: osiris::objects::Account| ctx.save_object(account));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, asset: osiris::objects::Asset| ctx.save_object(asset));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, transaction: osiris::objects::Transaction| ctx.save_object(transaction));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, contract: osiris::objects::Contract| ctx.save_object(contract));
// Supervisor objects
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, api_key: osiris::objects::supervisor::ApiKey| ctx.save_object(api_key));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, runner: osiris::objects::supervisor::Runner| ctx.save_object(runner));
FuncRegistration::new("save")
.set_into_module(module, |ctx: &mut OsirisContext, job_metadata: osiris::objects::supervisor::JobMetadata| ctx.save_object(job_metadata));
FuncRegistration::new("list")
.set_into_module(module, |ctx: &mut OsirisContext, collection: String| ctx.list(collection));
FuncRegistration::new("get")
.set_into_module(module, |ctx: &mut OsirisContext, collection: String, id: String| ctx.get(collection, id));
FuncRegistration::new("delete")
.set_into_module(module, |ctx: &mut OsirisContext, collection: String, id: String| ctx.delete(collection, id));
// Register Note functions
register_note_functions(module);
// Register Event functions
register_event_functions(module);
// Register HeroLedger modules (User, Group, Account, DNSZone)
register_heroledger_modules(module);
// Register KYC modules (KycClient, KycSession)
register_kyc_modules(module);
// Register Flow modules (FlowTemplate, FlowInstance)
register_flow_modules(module);
// Register Communication modules (Verification, EmailClient)
register_communication_modules(module);
// Register Money modules (Account, Asset, Transaction, PaymentClient)
register_money_modules(module);
// Register Legal modules (Contract)
register_legal_modules(module);
// Register Supervisor modules (ApiKey, Runner, JobMetadata)
register_supervisor_modules(module);
// Register get_context function with signatory-based access control
FuncRegistration::new("get_context")
.set_into_module(module, |context: rhai::NativeCallContext, participants: rhai::Array| -> Result<OsirisContext, Box<rhai::EvalAltResult>> {
// Extract SIGNATORIES from context tag
let tag_map = context
.tag()
.and_then(|tag| tag.read_lock::<rhai::Map>())
.ok_or_else(|| Box::new(rhai::EvalAltResult::ErrorRuntime("Context tag must be a Map.".into(), context.position())))?;
let signatories_dynamic = tag_map.get("SIGNATORIES")
.ok_or_else(|| Box::new(rhai::EvalAltResult::ErrorRuntime("'SIGNATORIES' not found in context tag Map.".into(), context.position())))?;
// Convert SIGNATORIES array to Vec<String>
let signatories_array = signatories_dynamic.clone().into_array()
.map_err(|e| Box::new(rhai::EvalAltResult::ErrorRuntime(format!("SIGNATORIES must be an array: {}", e).into(), context.position())))?;
let signatories: Vec<String> = signatories_array.into_iter()
.map(|s| s.into_string())
.collect::<Result<Vec<_>, _>>()
.map_err(|e| Box::new(rhai::EvalAltResult::ErrorRuntime(format!("SIGNATORIES must contain strings: {}", e).into(), context.position())))?;
// Convert participants array to Vec<String>
let participant_keys: Vec<String> = participants.into_iter()
.map(|p| p.into_string())
.collect::<Result<Vec<_>, _>>()
.map_err(|e| Box::new(rhai::EvalAltResult::ErrorRuntime(format!("Participants must be strings: {}", e).into(), context.position())))?;
// Verify at least one participant is a signatory
let has_signatory = participant_keys.iter().any(|p| signatories.contains(p));
if !has_signatory {
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("Access denied: none of the participants are signatories. Signatories: {}", signatories.join(", ")).into(),
context.position()
)));
}
// Create context directly with participants
OsirisContext::builder()
.participants(participant_keys)
.build()
.map_err(|e| format!("Failed to create context: {}", e).into())
});
}
}
/// Register all OSIRIS components into an engine
/// This is a convenience function that registers the complete OsirisPackage
pub fn register_osiris_full(engine: &mut Engine) {
let package = OsirisPackage::new();
package.register_into_engine(engine);
}
/// Create a single OSIRIS engine (for backward compatibility)
pub fn create_osiris_engine() -> Result<Engine, Box<dyn std::error::Error>> {
let mut engine = Engine::new_raw();
register_osiris_full(&mut engine);
Ok(engine)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_create_osiris_engine() {
let result = create_osiris_engine();
assert!(result.is_ok());
let mut engine = result.unwrap();
// Set up context tags with SIGNATORIES (like in runner_rust example)
let mut tag_map = rhai::Map::new();
// Create a proper Rhai array
let signatories: rhai::Array = vec![
rhai::Dynamic::from("pk1".to_string()),
rhai::Dynamic::from("pk2".to_string()),
rhai::Dynamic::from("pk3".to_string()),
];
tag_map.insert("SIGNATORIES".into(), rhai::Dynamic::from(signatories));
tag_map.insert("DB_PATH".into(), "/tmp/test_db".to_string().into());
tag_map.insert("CONTEXT_ID".into(), "test_context".to_string().into());
engine.set_default_tag(rhai::Dynamic::from(tag_map));
// Test get_context with valid signatories
let mut scope = rhai::Scope::new();
let test_result = engine.eval_with_scope::<rhai::Dynamic>(
&mut scope,
r#"
// All participants must be signatories
let ctx = get_context(["pk1", "pk2"]);
ctx.context_id()
"#
);
if let Err(ref e) = test_result {
eprintln!("Test error: {}", e);
}
assert!(test_result.is_ok(), "Failed to get context: {:?}", test_result.err());
assert_eq!(test_result.unwrap().to_string(), "pk1,pk2");
}
#[test]
fn test_engine_with_manager_access_denied() {
let result = create_osiris_engine();
assert!(result.is_ok());
let mut engine = result.unwrap();
// Set up context tags with SIGNATORIES
let mut tag_map = rhai::Map::new();
// Create a proper Rhai array
let signatories: rhai::Array = vec![
rhai::Dynamic::from("pk1".to_string()),
rhai::Dynamic::from("pk2".to_string()),
];
tag_map.insert("SIGNATORIES".into(), rhai::Dynamic::from(signatories));
tag_map.insert("DB_PATH".into(), "/tmp/test_db".to_string().into());
tag_map.insert("CONTEXT_ID".into(), "test_context".to_string().into());
engine.set_default_tag(rhai::Dynamic::from(tag_map));
// Test get_context with invalid participant (not a signatory)
let mut scope = rhai::Scope::new();
let test_result = engine.eval_with_scope::<rhai::Dynamic>(
&mut scope,
r#"
// pk3 is not a signatory, should fail
let ctx = get_context(["pk1", "pk3"]);
ctx.context_id()
"#
);
// Should fail because pk3 is not in SIGNATORIES
assert!(test_result.is_err());
let err_msg = test_result.unwrap_err().to_string();
assert!(err_msg.contains("Access denied") || err_msg.contains("not a signatory"));
}
}

View File

@@ -0,0 +1,117 @@
use hero_runner::{spawn_sync_runner, script_mode::execute_script_mode};
use clap::Parser;
use log::{error, info};
use tokio::sync::mpsc;
mod engine;
use engine::create_osiris_engine;
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// Runner ID
runner_id: String,
/// Redis URL (also used as HeroDB URL)
#[arg(short = 'r', long, default_value = "redis://localhost:6379")]
redis_url: String,
/// Base database ID for OSIRIS contexts
#[arg(long, default_value_t = 1)]
base_db_id: u16,
/// Script to execute in single-job mode (optional)
#[arg(short, long)]
script: Option<String>,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Initialize logging
env_logger::init();
let args = Args::parse();
// Check if we're in script mode
if let Some(script_content) = args.script {
info!("Running in script mode with runner ID: {}", args.runner_id);
let redis_url = args.redis_url.clone();
let base_db_id = args.base_db_id;
let result = execute_script_mode(
&script_content,
&args.runner_id,
args.redis_url,
std::time::Duration::from_secs(300), // Default timeout for OSIS
move || create_osiris_engine()
.expect("Failed to create OSIRIS engine"),
).await;
match result {
Ok(output) => {
println!("Script execution result:\n{}", output);
return Ok(());
}
Err(e) => {
error!("Script execution failed: {}", e);
return Err(e);
}
}
}
info!("Starting OSIS Sync Runner with ID: {}", args.runner_id);
info!("Redis URL: {}", args.redis_url);
// Create shutdown channel
let (shutdown_tx, shutdown_rx) = mpsc::channel::<()>(1);
// Setup signal handling for graceful shutdown
let shutdown_tx_clone = shutdown_tx.clone();
tokio::spawn(async move {
tokio::signal::ctrl_c().await.expect("Failed to listen for ctrl+c");
info!("Received Ctrl+C, initiating shutdown...");
let _ = shutdown_tx_clone.send(()).await;
});
// Spawn the sync runner with engine factory
let redis_url = args.redis_url.clone();
let base_db_id = args.base_db_id;
let runner_handle = spawn_sync_runner(
args.runner_id.clone(),
args.redis_url,
shutdown_rx,
move || create_osiris_engine()
.expect("Failed to create OSIRIS engine"),
);
info!("OSIS Sync Runner '{}' started successfully", args.runner_id);
// Wait for the runner to complete
match runner_handle.await {
Ok(Ok(())) => {
info!("OSIS Sync Runner '{}' shut down successfully", args.runner_id);
}
Ok(Err(e)) => {
error!("OSIS Sync Runner '{}' encountered an error: {}", args.runner_id, e);
return Err(e);
}
Err(e) => {
error!("Failed to join OSIS Sync Runner '{}' task: {}", args.runner_id, e);
return Err(Box::new(e));
}
}
Ok(())
}
/// Example: Run a Rhai script with OSIRIS support
pub fn run_osiris_script(
script: &str,
) -> Result<(), Box<dyn std::error::Error>> {
let engine = create_osiris_engine()?;
engine.run(script)?;
Ok(())
}

View File

@@ -0,0 +1,41 @@
[package]
name = "runner-sal"
version.workspace = true
edition.workspace = true
description = "SAL Runner - System Abstraction Layer runner"
license = "MIT OR Apache-2.0"
[[bin]]
name = "runner_sal"
path = "src/main.rs"
[dependencies]
# Runner library
hero-runner = { path = "../../../lib/runner" }
hero-job = { path = "../../../lib/models/job" }
# Core dependencies
anyhow.workspace = true
tokio.workspace = true
log.workspace = true
env_logger.workspace = true
clap.workspace = true
# Rhai and logging
rhai = { version = "1.21.0", features = ["std", "sync", "decimal", "internals", "serde"] }
hero_logger = { git = "https://git.ourworld.tf/herocode/baobab.git", branch = "logger" }
# SAL modules
sal-os = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-redisclient = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-postgresclient = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-process = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-virt = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-git = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-zinit-client = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-mycelium = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-text = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-net = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-kubernetes = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-vault = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }
sal-hetzner = { git = "https://git.ourworld.tf/herocode/herolib_rust.git" }

View File

@@ -0,0 +1,87 @@
# SAL Runner
The SAL (System Abstraction Layer) Runner is an asynchronous job processing engine that executes Rhai scripts with access to system-level operations and infrastructure management capabilities.
## Features
- **Asynchronous Processing**: Handles multiple jobs concurrently with configurable timeouts
- **Redis Integration**: Uses Redis for job queue management and coordination
- **System Operations**: Full access to SAL modules including OS, networking, containers, and cloud services
- **Graceful Shutdown**: Responds to SIGINT (Ctrl+C) for clean termination
- **Comprehensive Logging**: Detailed logging for monitoring and debugging
## Usage
```bash
cargo run --bin runner_sal -- <RUNNER_ID> [OPTIONS]
```
### Arguments
- `<RUNNER_ID>`: Unique identifier for this runner instance (required, positional)
### Options
- `-d, --db-path <PATH>`: Database file path (default: `/tmp/sal.db`)
- `-r, --redis-url <URL>`: Redis connection URL (default: `redis://localhost:6379`)
- `-t, --timeout <SECONDS>`: Default job timeout in seconds (default: `300`)
### Examples
```bash
# Basic usage with default settings
cargo run --bin runner_sal -- myrunner
# Custom Redis URL and database path
cargo run --bin runner_sal -- production-runner -r redis://prod-redis:6379 -d /var/lib/sal.db
# Custom timeout for long-running jobs
cargo run --bin runner_sal -- batch-runner -t 3600
```
## Available SAL Modules
The SAL runner provides access to the following system modules through Rhai scripts:
- **OS Operations**: File system, process management, system information
- **Redis Client**: Redis database operations and caching
- **PostgreSQL Client**: Database connectivity and queries
- **Process Management**: System process control and monitoring
- **Virtualization**: Container and VM management
- **Git Operations**: Version control system integration
- **Zinit Client**: Service management and initialization
- **Mycelium**: Networking and mesh connectivity
- **Text Processing**: String manipulation and text utilities
- **Network Operations**: HTTP requests, network utilities
- **Kubernetes**: Container orchestration and cluster management
- **Hetzner Cloud**: Cloud infrastructure management
## Architecture
The SAL runner uses an asynchronous architecture that:
1. Connects to Redis for job queue management
2. Creates a Rhai engine with all SAL modules registered
3. Processes jobs concurrently with configurable timeouts
4. Handles graceful shutdown on SIGINT
5. Provides comprehensive error handling and logging
## Error Handling
The runner provides detailed error messages for common issues:
- Redis connection failures
- Database access problems
- Script execution errors
- Timeout handling
- Resource cleanup on shutdown
## Logging
Set the `RUST_LOG` environment variable to control logging levels:
```bash
RUST_LOG=debug cargo run --bin runner_sal -- myrunner
```
Available log levels: `error`, `warn`, `info`, `debug`, `trace`

View File

@@ -0,0 +1,73 @@
use std::sync::{Arc, OnceLock};
// Re-export common Rhai types for convenience
pub use rhai::Engine;
// Re-export specific functions from sal-os package
// Re-export Redis client module registration function
// Re-export PostgreSQL client module registration function
// Re-export virt functions from sal-virt package
/// Engine factory for creating and sharing Rhai engines with SAL modules.
pub struct EngineFactory {
engine: Arc<Engine>,
}
impl EngineFactory {
/// Create a new engine factory with a configured Rhai engine.
pub fn new() -> Self {
let mut engine = Engine::new();
register_sal_modules(&mut engine);
// Logger
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, "sal_runner");
Self {
engine: Arc::new(engine),
}
}
/// Get a shared reference to the engine.
pub fn get_engine(&self) -> Arc<Engine> {
Arc::clone(&self.engine)
}
/// Get the global singleton engine factory.
pub fn global() -> &'static EngineFactory {
static FACTORY: OnceLock<EngineFactory> = OnceLock::new();
FACTORY.get_or_init(|| EngineFactory::new())
}
}
pub fn register_sal_modules(engine: &mut Engine) {
let _ = sal_os::rhai::register_os_module(engine);
let _ = sal_redisclient::rhai::register_redisclient_module(engine);
let _ = sal_postgresclient::rhai::register_postgresclient_module(engine);
let _ = sal_process::rhai::register_process_module(engine);
let _ = sal_virt::rhai::register_virt_module(engine);
let _ = sal_git::rhai::register_git_module(engine);
let _ = sal_zinit_client::rhai::register_zinit_module(engine);
let _ = sal_mycelium::rhai::register_mycelium_module(engine);
let _ = sal_text::rhai::register_text_module(engine);
let _ = sal_net::rhai::register_net_module(engine);
let _ = sal_kubernetes::rhai::register_kubernetes_module(engine);
let _ = sal_hetzner::rhai::register_hetzner_module(engine);
println!("SAL modules registered successfully.");
}
/// Create a new SAL engine instance.
pub fn create_sal_engine() -> Engine {
let mut engine = Engine::new();
register_sal_modules(&mut engine);
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, "sal_runner");
engine
}
/// Create a shared system engine using the factory.
pub fn create_shared_sal_engine() -> Arc<Engine> {
EngineFactory::global().get_engine()
}

108
bin/runners/sal/src/main.rs Normal file
View File

@@ -0,0 +1,108 @@
use hero_runner::{spawn_async_runner, script_mode::execute_script_mode};
use clap::Parser;
use log::{error, info};
use std::time::Duration;
use tokio::sync::mpsc;
mod engine;
use engine::create_sal_engine;
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// Runner ID
runner_id: String,
/// Database path
#[arg(short, long, default_value = "/tmp/sal.db")]
db_path: String,
/// Redis URL
#[arg(short = 'r', long, default_value = "redis://localhost:6379")]
redis_url: String,
/// Default timeout for jobs in seconds
#[arg(short, long, default_value_t = 300)]
timeout: u64,
/// Script to execute in single-job mode (optional)
#[arg(short, long)]
script: Option<String>,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Initialize logging
env_logger::init();
let args = Args::parse();
// Check if we're in script mode
if let Some(script_content) = args.script {
info!("Running in script mode with runner ID: {}", args.runner_id);
let result = execute_script_mode(
&script_content,
&args.runner_id,
args.redis_url,
Duration::from_secs(args.timeout),
create_sal_engine,
).await;
match result {
Ok(output) => {
println!("Script execution result:\n{}", output);
return Ok(());
}
Err(e) => {
error!("Script execution failed: {}", e);
return Err(e);
}
}
}
info!("Starting SAL Async Runner with ID: {}", args.runner_id);
info!("Database path: {}", args.db_path);
info!("Redis URL: {}", args.redis_url);
info!("Default timeout: {} seconds", args.timeout);
// Create shutdown channel
let (shutdown_tx, shutdown_rx) = mpsc::channel::<()>(1);
// Setup signal handling for graceful shutdown
let shutdown_tx_clone = shutdown_tx.clone();
tokio::spawn(async move {
tokio::signal::ctrl_c().await.expect("Failed to listen for ctrl+c");
info!("Received Ctrl+C, initiating shutdown...");
let _ = shutdown_tx_clone.send(()).await;
});
// Spawn the async runner with engine factory
let runner_handle = spawn_async_runner(
args.runner_id.clone(),
args.db_path,
args.redis_url,
shutdown_rx,
Duration::from_secs(args.timeout),
create_sal_engine,
);
info!("SAL Async Runner '{}' started successfully", args.runner_id);
// Wait for the runner to complete
match runner_handle.await {
Ok(Ok(())) => {
info!("SAL Async Runner '{}' shut down successfully", args.runner_id);
}
Ok(Err(e)) => {
error!("SAL Async Runner '{}' encountered an error: {}", args.runner_id, e);
return Err(e);
}
Err(e) => {
error!("Failed to join SAL Async Runner '{}' task: {}", args.runner_id, e);
return Err(Box::new(e));
}
}
Ok(())
}