Format codebase

Signed-off-by: Lee Smet <lee.smet@hotmail.com>
This commit is contained in:
Lee Smet
2025-08-29 11:22:42 +02:00
parent c1c1ae3bd1
commit 4d1cd3d910
11 changed files with 125 additions and 74 deletions

View File

@@ -1,13 +1,7 @@
pub mod supervisor_client;
pub mod mycelium_client; pub mod mycelium_client;
pub mod supervisor_client;
pub mod types; pub mod types;
pub use mycelium_client::{MyceliumClient, MyceliumClientError};
pub use supervisor_client::{SupervisorClient, SupervisorClientError};
pub use types::Destination; pub use types::Destination;
pub use supervisor_client::{
SupervisorClient,
SupervisorClientError,
};
pub use mycelium_client::{
MyceliumClient,
MyceliumClientError,
};

View File

@@ -6,13 +6,13 @@ use reqwest::Client as HttpClient;
use serde_json::{Value, json}; use serde_json::{Value, json};
use thiserror::Error; use thiserror::Error;
use crate::models::TransportStatus;
use crate::clients::Destination; use crate::clients::Destination;
use crate::models::TransportStatus;
/// Lightweight client for Mycelium JSON-RPC (send + query status) /// Lightweight client for Mycelium JSON-RPC (send + query status)
#[derive(Clone)] #[derive(Clone)]
pub struct MyceliumClient { pub struct MyceliumClient {
base_url: String, // e.g. http://127.0.0.1:8990 base_url: String, // e.g. http://127.0.0.1:8990
http: HttpClient, http: HttpClient,
id_counter: Arc<AtomicU64>, id_counter: Arc<AtomicU64>,
} }
@@ -58,20 +58,30 @@ impl MyceliumClient {
let body: Value = resp.json().await?; let body: Value = resp.json().await?;
if let Some(err) = body.get("error") { if let Some(err) = body.get("error") {
let code = err.get("code").and_then(|v| v.as_i64()).unwrap_or(0); let code = err.get("code").and_then(|v| v.as_i64()).unwrap_or(0);
let msg = err.get("message").and_then(|v| v.as_str()).unwrap_or("unknown error"); let msg = err
.get("message")
.and_then(|v| v.as_str())
.unwrap_or("unknown error");
if code == 408 { if code == 408 {
return Err(MyceliumClientError::TransportTimeout); return Err(MyceliumClientError::TransportTimeout);
} }
return Err(MyceliumClientError::RpcError(format!("code={code} msg={msg}"))); return Err(MyceliumClientError::RpcError(format!(
"code={code} msg={msg}"
)));
} }
if !status.is_success() { if !status.is_success() {
return Err(MyceliumClientError::RpcError(format!("HTTP {status}, body {body}"))); return Err(MyceliumClientError::RpcError(format!(
"HTTP {status}, body {body}"
)));
} }
Ok(body) Ok(body)
} }
/// Call messageStatus with an outbound message id (hex string) /// Call messageStatus with an outbound message id (hex string)
pub async fn message_status(&self, id_hex: &str) -> Result<TransportStatus, MyceliumClientError> { pub async fn message_status(
&self,
id_hex: &str,
) -> Result<TransportStatus, MyceliumClientError> {
let params = json!({ "id": id_hex }); let params = json!({ "id": id_hex });
let body = self.jsonrpc("messageStatus", params).await?; let body = self.jsonrpc("messageStatus", params).await?;
let result = body.get("result").ok_or_else(|| { let result = body.get("result").ok_or_else(|| {
@@ -83,7 +93,9 @@ impl MyceliumClient {
} else if let Some(s) = result.as_str() { } else if let Some(s) = result.as_str() {
s.to_string() s.to_string()
} else { } else {
return Err(MyceliumClientError::InvalidResponse(format!("unexpected result shape: {result}"))); return Err(MyceliumClientError::InvalidResponse(format!(
"unexpected result shape: {result}"
)));
}; };
Self::map_status(&status_str).ok_or_else(|| { Self::map_status(&status_str).ok_or_else(|| {
MyceliumClientError::InvalidResponse(format!("unknown status: {status_str}")) MyceliumClientError::InvalidResponse(format!("unknown status: {status_str}"))
@@ -143,7 +155,10 @@ impl MyceliumClient {
/// Helper to extract outbound message id from pushMessage result (InboundMessage or PushMessageResponseId) /// Helper to extract outbound message id from pushMessage result (InboundMessage or PushMessageResponseId)
pub fn extract_message_id_from_result(result: &Value) -> Option<String> { pub fn extract_message_id_from_result(result: &Value) -> Option<String> {
result.get("id").and_then(|v| v.as_str()).map(|s| s.to_string()) result
.get("id")
.and_then(|v| v.as_str())
.map(|s| s.to_string())
} }
} }
@@ -162,24 +177,39 @@ mod tests {
Some(10), Some(10),
); );
let msg1 = p1.get("message").unwrap(); let msg1 = p1.get("message").unwrap();
assert_eq!(msg1.get("topic").unwrap().as_str().unwrap(), "supervisor.rpc"); assert_eq!(
msg1.get("topic").unwrap().as_str().unwrap(),
"supervisor.rpc"
);
assert_eq!(msg1.get("payload").unwrap().as_str().unwrap(), "Zm9vYmFy"); assert_eq!(msg1.get("payload").unwrap().as_str().unwrap(), "Zm9vYmFy");
assert_eq!( assert_eq!(
msg1.get("dst").unwrap().get("ip").unwrap().as_str().unwrap(), msg1.get("dst")
.unwrap()
.get("ip")
.unwrap()
.as_str()
.unwrap(),
"2001:db8::1" "2001:db8::1"
); );
assert_eq!(p1.get("reply_timeout").unwrap().as_u64().unwrap(), 10); assert_eq!(p1.get("reply_timeout").unwrap().as_u64().unwrap(), 10);
// PK destination without timeout // PK destination without timeout
let p2 = MyceliumClient::build_push_params( let p2 = MyceliumClient::build_push_params(
&Destination::Pk("bb39b4a3a4efd70f3e05e37887677e02efbda14681d0acd3882bc0f754792c32".into()), &Destination::Pk(
"bb39b4a3a4efd70f3e05e37887677e02efbda14681d0acd3882bc0f754792c32".into(),
),
"supervisor.rpc", "supervisor.rpc",
"YmF6", // "baz" "YmF6", // "baz"
None, None,
); );
let msg2 = p2.get("message").unwrap(); let msg2 = p2.get("message").unwrap();
assert_eq!( assert_eq!(
msg2.get("dst").unwrap().get("pk").unwrap().as_str().unwrap(), msg2.get("dst")
.unwrap()
.get("pk")
.unwrap()
.as_str()
.unwrap(),
"bb39b4a3a4efd70f3e05e37887677e02efbda14681d0acd3882bc0f754792c32" "bb39b4a3a4efd70f3e05e37887677e02efbda14681d0acd3882bc0f754792c32"
); );
assert!(p2.get("reply_timeout").is_none()); assert!(p2.get("reply_timeout").is_none());
@@ -205,4 +235,4 @@ mod tests {
"fedcba9876543210" "fedcba9876543210"
); );
} }
} }

View File

@@ -151,7 +151,12 @@ impl SupervisorClient {
let result = self let result = self
.mycelium .mycelium
.push_message(&self.destination, &self.topic, &payload_b64, Some(reply_timeout_secs)) .push_message(
&self.destination,
&self.topic,
&payload_b64,
Some(reply_timeout_secs),
)
.await?; .await?;
// Expect an InboundMessage-like with a base64 payload containing the supervisor JSON-RPC response // Expect an InboundMessage-like with a base64 payload containing the supervisor JSON-RPC response
@@ -163,7 +168,11 @@ impl SupervisorClient {
one.get("payload") one.get("payload")
.and_then(|v| v.as_str()) .and_then(|v| v.as_str())
.map(|s| s.to_string()) .map(|s| s.to_string())
.ok_or_else(|| SupervisorClientError::InvalidResponse(format!("missing payload in result: {result}")))? .ok_or_else(|| {
SupervisorClientError::InvalidResponse(format!(
"missing payload in result: {result}"
))
})?
} else { } else {
return Err(SupervisorClientError::TransportTimeout); return Err(SupervisorClientError::TransportTimeout);
} }
@@ -174,15 +183,19 @@ impl SupervisorClient {
let raw = BASE64_STANDARD let raw = BASE64_STANDARD
.decode(payload_field.as_bytes()) .decode(payload_field.as_bytes())
.map_err(|e| SupervisorClientError::InvalidResponse(format!("invalid base64 payload: {e}")))?; .map_err(|e| {
SupervisorClientError::InvalidResponse(format!("invalid base64 payload: {e}"))
})?;
let rpc_resp: Value = serde_json::from_slice(&raw)?; let rpc_resp: Value = serde_json::from_slice(&raw)?;
if let Some(err) = rpc_resp.get("error") { if let Some(err) = rpc_resp.get("error") {
return Err(SupervisorClientError::RpcError(err.to_string())); return Err(SupervisorClientError::RpcError(err.to_string()));
} }
let res = rpc_resp let res = rpc_resp.get("result").ok_or_else(|| {
.get("result") SupervisorClientError::InvalidResponse(format!(
.ok_or_else(|| SupervisorClientError::InvalidResponse(format!("missing result in supervisor reply: {rpc_resp}")))?; "missing result in supervisor reply: {rpc_resp}"
))
})?;
Ok(res.clone()) Ok(res.clone())
} }

View File

@@ -6,4 +6,4 @@ pub enum Destination {
Ip(IpAddr), Ip(IpAddr),
/// 64-hex public key of the receiver node /// 64-hex public key of the receiver node
Pk(String), Pk(String),
} }

View File

@@ -1,8 +1,8 @@
pub mod models;
pub mod storage;
pub mod service;
mod time;
pub mod dag;
pub mod rpc;
pub mod clients; pub mod clients;
pub mod dag;
pub mod models;
pub mod router; pub mod router;
pub mod rpc;
pub mod service;
pub mod storage;
mod time;

View File

@@ -2,8 +2,8 @@ use clap::Parser;
use std::net::{IpAddr, SocketAddr}; use std::net::{IpAddr, SocketAddr};
use std::sync::Arc; use std::sync::Arc;
use tracing::{info, warn, error}; use tracing::{error, info, warn};
use tracing_subscriber::{fmt, EnvFilter}; use tracing_subscriber::{EnvFilter, fmt};
#[derive(Debug, Clone, Parser)] #[derive(Debug, Clone, Parser)]
#[command( #[command(
name = "herocoordinator", name = "herocoordinator",
@@ -75,14 +75,14 @@ struct Cli {
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let cli = Cli::parse(); let cli = Cli::parse();
// Initialize tracing subscriber (pretty formatter; controlled by RUST_LOG) // Initialize tracing subscriber (pretty formatter; controlled by RUST_LOG)
let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")); let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
tracing_subscriber::fmt() tracing_subscriber::fmt()
.with_env_filter(filter) .with_env_filter(filter)
.pretty() .pretty()
.with_target(true) .with_target(true)
.with_level(true) .with_level(true)
.init(); .init();
let http_addr = SocketAddr::new(cli.api_http_ip, cli.api_http_port); let http_addr = SocketAddr::new(cli.api_http_ip, cli.api_http_port);
let ws_addr = SocketAddr::new(cli.api_ws_ip, cli.api_ws_port); let ws_addr = SocketAddr::new(cli.api_ws_ip, cli.api_ws_port);

View File

@@ -3,12 +3,12 @@ use std::{collections::HashSet, sync::Arc};
use serde_json::{Value, json}; use serde_json::{Value, json};
use tokio::sync::Semaphore; use tokio::sync::Semaphore;
use tracing::{info, warn, error};
use crate::{ use crate::{
clients::{Destination, SupervisorClient, MyceliumClient}, clients::{Destination, MyceliumClient, SupervisorClient},
models::{Job, JobStatus, Message, MessageStatus, ScriptType, TransportStatus}, models::{Job, JobStatus, Message, MessageStatus, ScriptType, TransportStatus},
service::AppService, service::AppService,
}; };
use tracing::{error, info, warn};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct RouterConfig { pub struct RouterConfig {
@@ -71,7 +71,8 @@ pub fn start_router(service: AppService, cfg: RouterConfig) -> Vec<tokio::task::
// Ensure permit is dropped at end of task // Ensure permit is dropped at end of task
let _permit = permit; let _permit = permit;
if let Err(e) = if let Err(e) =
deliver_one(&service_task, &cfg_task, ctx_id, &key, mycelium).await deliver_one(&service_task, &cfg_task, ctx_id, &key, mycelium)
.await
{ {
error!(context_id=ctx_id, key=%key, error=%e, "Delivery error"); error!(context_id=ctx_id, key=%key, error=%e, "Delivery error");
} }
@@ -256,7 +257,13 @@ async fn deliver_one(
sup_topic.clone(), sup_topic.clone(),
None, None,
); );
match sup.job_result_sync(job_id.to_string(), job_result_reply_timeout).await { match sup
.job_result_sync(
job_id.to_string(),
job_result_reply_timeout,
)
.await
{
Ok(result_map) => { Ok(result_map) => {
// Persist the result into the Job.result map (merge) // Persist the result into the Job.result map (merge)
let _ = service_poll let _ = service_poll
@@ -268,7 +275,13 @@ async fn deliver_one(
) )
.await; .await;
// Log which key was stored (success or error) // Log which key was stored (success or error)
let key = result_map.keys().next().cloned().unwrap_or_else(|| "unknown".to_string()); let key = result_map
.keys()
.next()
.cloned()
.unwrap_or_else(|| {
"unknown".to_string()
});
let _ = service_poll let _ = service_poll
.append_message_logs( .append_message_logs(
context_id, context_id,
@@ -337,10 +350,7 @@ async fn deliver_one(
context_id, context_id,
caller_id, caller_id,
id, id,
vec![format!( vec![format!("job.status sync error: {}", e)],
"job.status sync error: {}",
e
)],
) )
.await; .await;
} }
@@ -434,7 +444,6 @@ fn map_supervisor_job_status(s: &str) -> Option<(JobStatus, bool)> {
} }
} }
/// Auto-discover contexts periodically and ensure a router loop exists for each. /// Auto-discover contexts periodically and ensure a router loop exists for each.
/// Returns a JoinHandle of the discovery task (router loops are detached). /// Returns a JoinHandle of the discovery task (router loops are detached).
pub fn start_router_auto(service: AppService, cfg: RouterConfig) -> tokio::task::JoinHandle<()> { pub fn start_router_auto(service: AppService, cfg: RouterConfig) -> tokio::task::JoinHandle<()> {
@@ -452,7 +461,7 @@ pub fn start_router_auto(service: AppService, cfg: RouterConfig) -> tokio::task:
}; };
let _ = start_router(service.clone(), cfg_ctx); let _ = start_router(service.clone(), cfg_ctx);
active.insert(ctx_id); active.insert(ctx_id);
info!(context_id=ctx_id, "Started loop for context"); info!(context_id = ctx_id, "Started loop for context");
} }
} }
} }

View File

@@ -410,11 +410,7 @@ pub fn build_module(state: Arc<AppState>) -> RpcModule<()> {
let state = state.clone(); let state = state.clone();
async move { async move {
let p: ActorLoadParams = params.parse().map_err(invalid_params_err)?; let p: ActorLoadParams = params.parse().map_err(invalid_params_err)?;
let actor = state let actor = state.service.load_actor(p.id).await.map_err(storage_err)?;
.service
.load_actor(p.id)
.await
.map_err(storage_err)?;
Ok::<_, ErrorObjectOwned>(actor) Ok::<_, ErrorObjectOwned>(actor)
} }
}) })

View File

@@ -694,7 +694,7 @@ impl AppService {
Ok(()) Ok(())
} }
/// Bypass-permission variant to update a job status with transition validation. /// Bypass-permission variant to update a job status with transition validation.
/// This skips the executor permission check but enforces the same state transition rules. /// This skips the executor permission check but enforces the same state transition rules.
pub async fn update_job_status_unchecked( pub async fn update_job_status_unchecked(
&self, &self,

View File

@@ -1,4 +1,3 @@
pub mod redis; pub mod redis;
pub use redis::RedisDriver; pub use redis::RedisDriver;

View File

@@ -6,10 +6,11 @@ use serde::de::DeserializeOwned;
use serde_json::{Map as JsonMap, Value}; use serde_json::{Map as JsonMap, Value};
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tracing::{error, warn, info, debug, trace};
use crate::models::{ use crate::models::{
Actor, Context, Flow, FlowStatus, Job, JobStatus, Message, MessageStatus, Runner, TransportStatus, Actor, Context, Flow, FlowStatus, Job, JobStatus, Message, MessageStatus, Runner,
TransportStatus,
}; };
use tracing::{debug, error, info, trace, warn};
type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>;
@@ -219,7 +220,7 @@ impl RedisDriver {
let key = Self::actor_key(id); let key = Self::actor_key(id);
self.hget_model(db, &key).await self.hget_model(db, &key).await
} }
/// Save an Actor globally in DB 0 (Actor is context-independent) /// Save an Actor globally in DB 0 (Actor is context-independent)
pub async fn save_actor_global(&self, actor: &Actor) -> Result<()> { pub async fn save_actor_global(&self, actor: &Actor) -> Result<()> {
let json = serde_json::to_value(actor)?; let json = serde_json::to_value(actor)?;
let id = json let id = json
@@ -717,10 +718,15 @@ impl RedisDriver {
/// Register a context id in the global set "contexts" stored in DB 0. /// Register a context id in the global set "contexts" stored in DB 0.
pub async fn register_context_id(&self, id: u32) -> Result<()> { pub async fn register_context_id(&self, id: u32) -> Result<()> {
let mut cm = self.manager_for_db(0).await?; let mut cm = self.manager_for_db(0).await?;
let _: i64 = redis::cmd("SADD").arg("contexts").arg(id).query_async(&mut cm).await.map_err(|e| { let _: i64 = redis::cmd("SADD")
error!(db=0, context_id=%id, error=%e, "SADD contexts failed"); .arg("contexts")
e .arg(id)
})?; .query_async(&mut cm)
.await
.map_err(|e| {
error!(db=0, context_id=%id, error=%e, "SADD contexts failed");
e
})?;
Ok(()) Ok(())
} }
@@ -728,10 +734,14 @@ impl RedisDriver {
pub async fn list_context_ids(&self) -> Result<Vec<u32>> { pub async fn list_context_ids(&self) -> Result<Vec<u32>> {
let mut cm = self.manager_for_db(0).await?; let mut cm = self.manager_for_db(0).await?;
// Using SMEMBERS and parsing into u32 // Using SMEMBERS and parsing into u32
let vals: Vec<String> = redis::cmd("SMEMBERS").arg("contexts").query_async(&mut cm).await.map_err(|e| { let vals: Vec<String> = redis::cmd("SMEMBERS")
error!(db=0, error=%e, "SMEMBERS contexts failed"); .arg("contexts")
e .query_async(&mut cm)
})?; .await
.map_err(|e| {
error!(db=0, error=%e, "SMEMBERS contexts failed");
e
})?;
let mut out = Vec::with_capacity(vals.len()); let mut out = Vec::with_capacity(vals.len());
for v in vals { for v in vals {
if let Ok(n) = v.parse::<u32>() { if let Ok(n) = v.parse::<u32>() {