rhai rpc queue worker and client
This commit is contained in:
1
rhai_client/.gitignore
vendored
Normal file
1
rhai_client/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/target
|
17
rhai_client/Cargo.toml
Normal file
17
rhai_client/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "rhai_client"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
redis = { version = "0.25.0", features = ["tokio-comp"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
log = "0.4"
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] } # For async main in examples, and general async
|
||||
|
||||
[dev-dependencies] # For examples later
|
||||
env_logger = "0.10"
|
||||
rhai = "1.18.0" # For examples that might need to show engine setup
|
212
rhai_client/src/lib.rs
Normal file
212
rhai_client/src/lib.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
use chrono::Utc;
|
||||
use log::{debug, info, warn, error}; // Added error
|
||||
use redis::AsyncCommands;
|
||||
use tokio::time::{sleep, Instant}; // For polling with timeout
|
||||
use std::time::Duration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value; // For client_rpc_id, though not directly used by this client's submit method
|
||||
use uuid::Uuid;
|
||||
|
||||
const REDIS_TASK_DETAILS_PREFIX: &str = "rhai_task_details:";
|
||||
const REDIS_QUEUE_PREFIX: &str = "rhai_tasks:";
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct RhaiTaskDetails {
|
||||
pub script: String,
|
||||
pub status: String, // "pending", "processing", "completed", "error"
|
||||
#[serde(rename = "clientRpcId")]
|
||||
pub client_rpc_id: Option<Value>, // Kept for compatibility with worker/server, but optional for client
|
||||
pub output: Option<String>,
|
||||
pub error: Option<String>, // Renamed from error_message for consistency
|
||||
#[serde(rename = "createdAt")]
|
||||
pub created_at: chrono::DateTime<chrono::Utc>,
|
||||
#[serde(rename = "updatedAt")]
|
||||
pub updated_at: chrono::DateTime<chrono::Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum RhaiClientError {
|
||||
RedisError(redis::RedisError),
|
||||
SerializationError(serde_json::Error),
|
||||
Timeout(String), // task_id that timed out
|
||||
TaskNotFound(String), // task_id not found after submission (should be rare)
|
||||
}
|
||||
|
||||
impl From<redis::RedisError> for RhaiClientError {
|
||||
fn from(err: redis::RedisError) -> Self {
|
||||
RhaiClientError::RedisError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for RhaiClientError {
|
||||
fn from(err: serde_json::Error) -> Self {
|
||||
RhaiClientError::SerializationError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RhaiClientError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
RhaiClientError::RedisError(e) => write!(f, "Redis error: {}", e),
|
||||
RhaiClientError::SerializationError(e) => write!(f, "Serialization error: {}", e),
|
||||
RhaiClientError::Timeout(task_id) => write!(f, "Timeout waiting for task {} to complete", task_id),
|
||||
RhaiClientError::TaskNotFound(task_id) => write!(f, "Task {} not found after submission", task_id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for RhaiClientError {}
|
||||
|
||||
pub struct RhaiClient {
|
||||
redis_client: redis::Client,
|
||||
}
|
||||
|
||||
impl RhaiClient {
|
||||
pub fn new(redis_url: &str) -> Result<Self, RhaiClientError> {
|
||||
let client = redis::Client::open(redis_url)?;
|
||||
Ok(Self { redis_client: client })
|
||||
}
|
||||
|
||||
pub async fn submit_script(
|
||||
&self,
|
||||
circle_name: &str,
|
||||
script: String,
|
||||
client_rpc_id: Option<Value>, // Optional: if the caller has an RPC ID to associate
|
||||
) -> Result<String, RhaiClientError> {
|
||||
let mut conn = self.redis_client.get_multiplexed_async_connection().await?;
|
||||
|
||||
let task_id = Uuid::new_v4().to_string();
|
||||
let now = Utc::now();
|
||||
|
||||
let task_details = RhaiTaskDetails {
|
||||
script,
|
||||
status: "pending".to_string(),
|
||||
client_rpc_id,
|
||||
output: None,
|
||||
error: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
};
|
||||
|
||||
let task_key = format!("{}{}", REDIS_TASK_DETAILS_PREFIX, task_id);
|
||||
let queue_key = format!("{}{}", REDIS_QUEUE_PREFIX, circle_name.replace(" ", "_").to_lowercase());
|
||||
|
||||
debug!(
|
||||
"Submitting task_id: {} for circle: {} to queue: {}. Details: {:?}",
|
||||
task_id, circle_name, queue_key, task_details
|
||||
);
|
||||
|
||||
// Using HSET_MULTIPLE for efficiency if redis-rs supports it directly for struct fields.
|
||||
// Otherwise, individual HSETs are fine.
|
||||
// For simplicity and directness with redis-rs async, individual HSETs are used here.
|
||||
conn.hset::<_, _, _, ()>(&task_key, "script", &task_details.script).await?;
|
||||
conn.hset::<_, _, _, ()>(&task_key, "status", &task_details.status).await?;
|
||||
if let Some(rpc_id_val) = &task_details.client_rpc_id {
|
||||
conn.hset::<_, _, _, ()>(&task_key, "clientRpcId", serde_json::to_string(rpc_id_val)?).await?;
|
||||
} else {
|
||||
// Ensure the field exists even if null, or decide if it should be omitted
|
||||
conn.hset::<_, _, _, ()>(&task_key, "clientRpcId", Value::Null.to_string()).await?;
|
||||
}
|
||||
conn.hset::<_, _, _, ()>(&task_key, "createdAt", task_details.created_at.to_rfc3339()).await?;
|
||||
conn.hset::<_, _, _, ()>(&task_key, "updatedAt", task_details.updated_at.to_rfc3339()).await?;
|
||||
// output and error fields are initially None, so they might not be set here or set as empty strings/null
|
||||
|
||||
conn.lpush::<_, _, ()>(&queue_key, &task_id).await?;
|
||||
|
||||
Ok(task_id)
|
||||
}
|
||||
|
||||
// Optional: A method to check task status, similar to what circle_server_ws polling does.
|
||||
// This could be useful for a client that wants to poll for results itself.
|
||||
pub async fn get_task_status(&self, task_id: &str) -> Result<Option<RhaiTaskDetails>, RhaiClientError> {
|
||||
let mut conn = self.redis_client.get_multiplexed_async_connection().await?;
|
||||
let task_key = format!("{}{}", REDIS_TASK_DETAILS_PREFIX, task_id);
|
||||
|
||||
let result_map: Option<std::collections::HashMap<String, String>> = conn.hgetall(&task_key).await?;
|
||||
|
||||
match result_map {
|
||||
Some(map) => {
|
||||
// Reconstruct RhaiTaskDetails from HashMap
|
||||
// This is a simplified reconstruction; ensure all fields are handled robustly
|
||||
let details = RhaiTaskDetails {
|
||||
script: map.get("script").cloned().unwrap_or_default(),
|
||||
status: map.get("status").cloned().unwrap_or_default(),
|
||||
client_rpc_id: map.get("clientRpcId")
|
||||
.and_then(|s| serde_json::from_str(s).ok())
|
||||
.or(Some(Value::Null)), // Default to Value::Null if missing or parse error
|
||||
output: map.get("output").cloned(),
|
||||
error: map.get("error").cloned(),
|
||||
created_at: map.get("createdAt")
|
||||
.and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok())
|
||||
.map(|dt| dt.with_timezone(&Utc))
|
||||
.unwrap_or_else(Utc::now), // Provide a default
|
||||
updated_at: map.get("updatedAt")
|
||||
.and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok())
|
||||
.map(|dt| dt.with_timezone(&Utc))
|
||||
.unwrap_or_else(Utc::now), // Provide a default
|
||||
};
|
||||
Ok(Some(details))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn submit_script_and_await_result(
|
||||
&self,
|
||||
circle_name: &str,
|
||||
script: String,
|
||||
client_rpc_id: Option<Value>,
|
||||
timeout: Duration,
|
||||
poll_interval: Duration,
|
||||
) -> Result<RhaiTaskDetails, RhaiClientError> {
|
||||
let task_id = self.submit_script(circle_name, script, client_rpc_id).await?;
|
||||
info!("Task {} submitted. Polling for result with timeout {:?}...", task_id, timeout);
|
||||
|
||||
let start_time = Instant::now();
|
||||
loop {
|
||||
if start_time.elapsed() > timeout {
|
||||
warn!("Timeout waiting for task {}", task_id);
|
||||
return Err(RhaiClientError::Timeout(task_id.clone()));
|
||||
}
|
||||
|
||||
match self.get_task_status(&task_id).await {
|
||||
Ok(Some(details)) => {
|
||||
debug!("Polled task {}: status = {}", task_id, details.status);
|
||||
if details.status == "completed" || details.status == "error" {
|
||||
info!("Task {} finished with status: {}", task_id, details.status);
|
||||
return Ok(details);
|
||||
}
|
||||
// else status is "pending" or "processing", continue polling
|
||||
}
|
||||
Ok(None) => {
|
||||
// This case should ideally not happen if submit_script succeeded and worker is running,
|
||||
// unless the task details were manually deleted from Redis.
|
||||
warn!("Task {} not found during polling. This might indicate an issue.", task_id);
|
||||
// Depending on desired robustness, could retry a few times or return an error immediately.
|
||||
// For now, let it continue polling up to timeout, or return a specific error.
|
||||
// If it persists, it's effectively a timeout or a lost task.
|
||||
// Let's consider it a lost task if it's not found after a short while post-submission.
|
||||
if start_time.elapsed() > Duration::from_secs(5) { // Arbitrary short duration
|
||||
return Err(RhaiClientError::TaskNotFound(task_id.clone()));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
// Log error but continue polling unless it's a critical Redis error
|
||||
error!("Error polling task {}: {}. Will retry.", task_id, e);
|
||||
}
|
||||
}
|
||||
sleep(poll_interval).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
// use super::*;
|
||||
// Basic tests can be added later, especially once examples are in place.
|
||||
// For now, ensuring it compiles is the priority.
|
||||
#[test]
|
||||
fn it_compiles() {
|
||||
assert_eq!(2 + 2, 4);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user