fix: refactor rhai functions to use Map parameters

This commit is contained in:
Sameh Abouel-saad
2025-08-28 03:43:00 +03:00
parent 536779f521
commit e114404ca7
3 changed files with 105 additions and 40 deletions

View File

@@ -47,6 +47,7 @@ pub fn register_rfs_module(engine: &mut Engine) -> Result<(), Box<EvalAltResult>
// Register block management functions
engine.register_fn("rfs_list_blocks", rfs_list_blocks);
engine.register_fn("rfs_list_blocks", rfs_list_blocks);
engine.register_fn("rfs_upload_block", rfs_upload_block);
engine.register_fn("rfs_check_block", rfs_check_block);
engine.register_fn("rfs_get_block_downloads", rfs_get_block_downloads);
@@ -344,7 +345,7 @@ fn rfs_health_check() -> Result<String, Box<EvalAltResult>> {
///
/// # Returns
/// JSON string containing block information
fn rfs_list_blocks(
fn rfs_list_blocks_impl(
page: Option<rhai::INT>,
per_page: Option<rhai::INT>,
) -> Result<String, Box<EvalAltResult>> {
@@ -643,7 +644,7 @@ fn rfs_get_blocks_by_hash(hash: &str) -> Result<String, Box<EvalAltResult>> {
///
/// # Returns
/// JSON string containing user's blocks information
fn rfs_get_user_blocks(
fn rfs_get_user_blocks_impl(
page: Option<rhai::INT>,
per_page: Option<rhai::INT>,
) -> Result<String, Box<EvalAltResult>> {
@@ -734,13 +735,31 @@ fn rfs_upload_block(file_hash: &str, index: rhai::INT, data: rhai::Blob) -> Resu
})
}
/// Upload a file to the RFS server
/// * `index` - The index of the block in the file
/// * `data` - The block data as a byte array
///
/// # Returns
/// The hash of the uploaded block
/// Rhai-facing adapter: accept params map with optional keys: page, per_page
fn rfs_get_user_blocks(params: Map) -> Result<String, Box<EvalAltResult>> {
let page = params
.get("page")
.and_then(|d| d.clone().try_cast::<rhai::INT>());
let per_page = params
.get("per_page")
.and_then(|d| d.clone().try_cast::<rhai::INT>());
rfs_get_user_blocks_impl(page, per_page)
}
/// Rhai-facing adapter: accept params map with optional keys: page, per_page
fn rfs_list_blocks(params: Map) -> Result<String, Box<EvalAltResult>> {
// Extract optional page and per_page from the map
let page = params
.get("page")
.and_then(|d| d.clone().try_cast::<rhai::INT>());
let per_page = params
.get("per_page")
.and_then(|d| d.clone().try_cast::<rhai::INT>());
rfs_list_blocks_impl(page, per_page)
}
// =============================================================================
// File Operations
@@ -1116,7 +1135,7 @@ fn rfs_download_flist(flist_path: &str, output_path: &str) -> Result<String, Box
///
/// # Returns
/// JSON string containing the final FList state
fn rfs_wait_for_flist_creation(
fn rfs_wait_for_flist_creation_impl(
job_id: &str,
timeout_seconds: Option<rhai::INT>,
poll_interval_ms: Option<rhai::INT>,
@@ -1152,15 +1171,31 @@ fn rfs_wait_for_flist_creation(
Ok(state) => {
// Convert state to JSON string for Rhai
serde_json::to_string(&state).map_err(|e| {
eprintln!("[rfs_wait_for_flist_creation] serialize error: {}", e);
Box::new(EvalAltResult::ErrorRuntime(
format!("Failed to serialize FList state: {}", e).into(),
rhai::Position::NONE,
))
})
}
Err(e) => Err(Box::new(EvalAltResult::ErrorRuntime(
format!("Failed to wait for FList creation: {}", e).into(),
rhai::Position::NONE,
))),
Err(e) => {
eprintln!("[rfs_wait_for_flist_creation] error: {}", e);
Err(Box::new(EvalAltResult::ErrorRuntime(
format!("Failed to wait for FList creation: {}", e).into(),
rhai::Position::NONE,
)))
}
}
}
/// Rhai-facing adapter: accept params map with optional keys: timeout_seconds, poll_interval_ms
fn rfs_wait_for_flist_creation(job_id: &str, params: Map) -> Result<String, Box<EvalAltResult>> {
let timeout_seconds = params
.get("timeout_seconds")
.and_then(|d| d.clone().try_cast::<rhai::INT>());
let poll_interval_ms = params
.get("poll_interval_ms")
.and_then(|d| d.clone().try_cast::<rhai::INT>());
rfs_wait_for_flist_creation_impl(job_id, timeout_seconds, poll_interval_ms)
}