move rhailib to herolib
This commit is contained in:
388
rhailib/research/rhai_engine_ui/src/app.rs
Normal file
388
rhailib/research/rhai_engine_ui/src/app.rs
Normal file
@@ -0,0 +1,388 @@
|
||||
use gloo_net::http::Request;
|
||||
use gloo_timers::callback::Interval;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use wasm_bindgen_futures::spawn_local;
|
||||
use web_sys::HtmlInputElement;
|
||||
use yew::prelude::*;
|
||||
use yew::{html, Component, Context, Html, TargetCast};
|
||||
|
||||
// --- Data Structures (placeholders, to be refined based on backend API) ---
|
||||
|
||||
#[derive(Clone, PartialEq, Serialize, Deserialize, Debug)]
|
||||
pub struct QueueStats {
|
||||
pub current_size: u32,
|
||||
pub color_code: String, // e.g., "green", "yellow", "red"
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Serialize, Deserialize, Debug)]
|
||||
pub struct TaskSummary {
|
||||
pub hash: String,
|
||||
pub created_at: i64,
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub struct TaskDetails {
|
||||
pub hash: String,
|
||||
pub created_at: i64,
|
||||
pub status: String,
|
||||
pub script_content: String,
|
||||
pub result: Option<String>,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
// Combined structure for initial fetch
|
||||
#[derive(Clone, PartialEq, Serialize, Deserialize, Debug)]
|
||||
pub struct WorkerDataResponse {
|
||||
pub queue_stats: Option<QueueStats>,
|
||||
pub tasks: Vec<TaskSummary>,
|
||||
}
|
||||
|
||||
// --- Component ---
|
||||
|
||||
pub enum Msg {
|
||||
UpdateWorkerName(String),
|
||||
FetchData,
|
||||
SetWorkerData(Result<WorkerDataResponse, String>),
|
||||
SetQueueStats(Result<QueueStats, String>),
|
||||
ViewTaskDetails(String), // Task hash
|
||||
SetTaskDetails(Result<TaskDetails, String>),
|
||||
ClearTaskDetails,
|
||||
IntervalTick, // For interval timer, to trigger queue stats fetch
|
||||
}
|
||||
|
||||
pub struct App {
|
||||
worker_name_input: String,
|
||||
worker_name_to_monitor: Option<String>,
|
||||
tasks_list: Vec<TaskSummary>,
|
||||
current_queue_stats: Option<QueueStats>,
|
||||
selected_task_details: Option<TaskDetails>,
|
||||
error_message: Option<String>,
|
||||
is_loading_initial_data: bool,
|
||||
is_loading_task_details: bool,
|
||||
queue_poll_timer: Option<Interval>,
|
||||
}
|
||||
|
||||
impl Component for App {
|
||||
type Message = Msg;
|
||||
type Properties = ();
|
||||
|
||||
fn create(_ctx: &Context<Self>) -> Self {
|
||||
Self {
|
||||
worker_name_input: "".to_string(),
|
||||
worker_name_to_monitor: None,
|
||||
tasks_list: Vec::new(),
|
||||
current_queue_stats: None,
|
||||
selected_task_details: None,
|
||||
error_message: None,
|
||||
is_loading_initial_data: false,
|
||||
is_loading_task_details: false,
|
||||
queue_poll_timer: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool {
|
||||
match msg {
|
||||
Msg::UpdateWorkerName(name) => {
|
||||
self.worker_name_input = name;
|
||||
true
|
||||
}
|
||||
Msg::FetchData => {
|
||||
if self.worker_name_input.trim().is_empty() {
|
||||
self.error_message = Some("Please enter a worker name.".to_string());
|
||||
return true;
|
||||
}
|
||||
let worker_name = self.worker_name_input.trim().to_string();
|
||||
self.worker_name_to_monitor = Some(worker_name.clone());
|
||||
self.error_message = None;
|
||||
self.tasks_list.clear();
|
||||
self.current_queue_stats = None;
|
||||
self.selected_task_details = None;
|
||||
self.is_loading_initial_data = true;
|
||||
|
||||
let link = ctx.link().clone();
|
||||
let tasks_url = format!("/api/worker/{}/tasks_and_stats", worker_name);
|
||||
spawn_local(async move {
|
||||
match Request::get(&tasks_url).send().await {
|
||||
Ok(response) => {
|
||||
if response.ok() {
|
||||
match response.json::<WorkerDataResponse>().await {
|
||||
Ok(data) => link.send_message(Msg::SetWorkerData(Ok(data))),
|
||||
Err(e) => link.send_message(Msg::SetWorkerData(Err(format!(
|
||||
"Failed to parse worker data: {}",
|
||||
e
|
||||
)))),
|
||||
}
|
||||
} else {
|
||||
link.send_message(Msg::SetWorkerData(Err(format!(
|
||||
"API error: {} {}",
|
||||
response.status(),
|
||||
response.status_text()
|
||||
))));
|
||||
}
|
||||
}
|
||||
Err(e) => link.send_message(Msg::SetWorkerData(Err(format!(
|
||||
"Network error fetching worker data: {}",
|
||||
e
|
||||
)))),
|
||||
}
|
||||
});
|
||||
|
||||
// Set up polling for queue stats
|
||||
let link_for_timer = ctx.link().clone();
|
||||
let timer = Interval::new(5000, move || {
|
||||
// Poll every 5 seconds
|
||||
link_for_timer.send_message(Msg::IntervalTick);
|
||||
});
|
||||
if let Some(old_timer) = self.queue_poll_timer.take() {
|
||||
old_timer.cancel(); // Cancel previous timer if any
|
||||
}
|
||||
self.queue_poll_timer = Some(timer);
|
||||
true
|
||||
}
|
||||
Msg::IntervalTick => {
|
||||
if let Some(worker_name) = &self.worker_name_to_monitor {
|
||||
let queue_stats_url = format!("/api/worker/{}/queue_stats", worker_name);
|
||||
let link = ctx.link().clone();
|
||||
spawn_local(async move {
|
||||
match Request::get(&queue_stats_url).send().await {
|
||||
Ok(response) => {
|
||||
if response.ok() {
|
||||
match response.json::<QueueStats>().await {
|
||||
Ok(stats) => {
|
||||
link.send_message(Msg::SetQueueStats(Ok(stats)))
|
||||
}
|
||||
Err(e) => link.send_message(Msg::SetQueueStats(Err(
|
||||
format!("Failed to parse queue stats: {}", e),
|
||||
))),
|
||||
}
|
||||
} else {
|
||||
link.send_message(Msg::SetQueueStats(Err(format!(
|
||||
"API error (queue_stats): {} {}",
|
||||
response.status(),
|
||||
response.status_text()
|
||||
))));
|
||||
}
|
||||
}
|
||||
Err(e) => link.send_message(Msg::SetQueueStats(Err(format!(
|
||||
"Network error fetching queue stats: {}",
|
||||
e
|
||||
)))),
|
||||
}
|
||||
});
|
||||
}
|
||||
false // No direct re-render, SetQueueStats will trigger it
|
||||
}
|
||||
Msg::SetWorkerData(Ok(data)) => {
|
||||
self.tasks_list = data.tasks;
|
||||
self.current_queue_stats = data.queue_stats;
|
||||
self.error_message = None;
|
||||
self.is_loading_initial_data = false;
|
||||
true
|
||||
}
|
||||
Msg::SetWorkerData(Err(err_msg)) => {
|
||||
self.error_message = Some(err_msg);
|
||||
self.is_loading_initial_data = false;
|
||||
if let Some(timer) = self.queue_poll_timer.take() {
|
||||
timer.cancel();
|
||||
}
|
||||
true
|
||||
}
|
||||
Msg::SetQueueStats(Ok(stats)) => {
|
||||
self.current_queue_stats = Some(stats);
|
||||
// Don't clear main error message here, as this is a background update
|
||||
true
|
||||
}
|
||||
Msg::SetQueueStats(Err(err_msg)) => {
|
||||
log::error!("Failed to update queue stats: {}", err_msg);
|
||||
// Optionally show a non-blocking error for queue stats
|
||||
self.current_queue_stats = None;
|
||||
true
|
||||
}
|
||||
Msg::ViewTaskDetails(hash) => {
|
||||
self.is_loading_task_details = true;
|
||||
self.selected_task_details = None; // Clear previous details
|
||||
let task_details_url = format!("/api/task/{}", hash);
|
||||
let link = ctx.link().clone();
|
||||
spawn_local(async move {
|
||||
match Request::get(&task_details_url).send().await {
|
||||
Ok(response) => {
|
||||
if response.ok() {
|
||||
match response.json::<TaskDetails>().await {
|
||||
Ok(details) => {
|
||||
link.send_message(Msg::SetTaskDetails(Ok(details)))
|
||||
}
|
||||
Err(e) => link.send_message(Msg::SetTaskDetails(Err(format!(
|
||||
"Failed to parse task details: {}",
|
||||
e
|
||||
)))),
|
||||
}
|
||||
} else {
|
||||
link.send_message(Msg::SetTaskDetails(Err(format!(
|
||||
"API error (task_details): {} {}",
|
||||
response.status(),
|
||||
response.status_text()
|
||||
))));
|
||||
}
|
||||
}
|
||||
Err(e) => link.send_message(Msg::SetTaskDetails(Err(format!(
|
||||
"Network error fetching task details: {}",
|
||||
e
|
||||
)))),
|
||||
}
|
||||
});
|
||||
true
|
||||
}
|
||||
Msg::SetTaskDetails(Ok(details)) => {
|
||||
self.selected_task_details = Some(details);
|
||||
self.error_message = None; // Clear general error if task details load
|
||||
self.is_loading_task_details = false;
|
||||
true
|
||||
}
|
||||
Msg::SetTaskDetails(Err(err_msg)) => {
|
||||
self.error_message = Some(format!("Error loading task details: {}", err_msg));
|
||||
self.selected_task_details = None;
|
||||
self.is_loading_task_details = false;
|
||||
true
|
||||
}
|
||||
Msg::ClearTaskDetails => {
|
||||
self.selected_task_details = None;
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn view(&self, ctx: &Context<Self>) -> Html {
|
||||
let link = ctx.link();
|
||||
let on_worker_name_input = link.callback(|e: InputEvent| {
|
||||
let input: HtmlInputElement = e.target_unchecked_into();
|
||||
Msg::UpdateWorkerName(input.value())
|
||||
});
|
||||
|
||||
html! {
|
||||
<div class="container">
|
||||
<h1>{ "Rhai Worker Monitor" }</h1>
|
||||
|
||||
<div class="input-group">
|
||||
<input type="text"
|
||||
placeholder="Enter Worker Name (e.g., worker_default)"
|
||||
value={self.worker_name_input.clone()}
|
||||
oninput={on_worker_name_input.clone()}
|
||||
disabled={self.is_loading_initial_data}
|
||||
onkeypress={link.callback(move |e: KeyboardEvent| {
|
||||
if e.key() == "Enter" { Msg::FetchData } else { Msg::UpdateWorkerName(e.target_unchecked_into::<HtmlInputElement>().value()) }
|
||||
})}
|
||||
/>
|
||||
<button onclick={link.callback(|_| Msg::FetchData)} disabled={self.is_loading_initial_data || self.worker_name_input.trim().is_empty()}>
|
||||
{ if self.is_loading_initial_data { "Loading..." } else { "Load Worker Data" } }
|
||||
</button>
|
||||
</div>
|
||||
|
||||
if let Some(err) = &self.error_message {
|
||||
<p class="error">{ err }</p>
|
||||
}
|
||||
|
||||
if self.worker_name_to_monitor.is_some() && !self.is_loading_initial_data && self.error_message.is_none() {
|
||||
<h2>{ format!("Monitoring: {}", self.worker_name_to_monitor.as_ref().unwrap()) }</h2>
|
||||
|
||||
<h3>{ "Queue Status" }</h3>
|
||||
<div class="queue-visualization">
|
||||
{
|
||||
if let Some(stats) = &self.current_queue_stats {
|
||||
// TODO: Implement actual color coding and bar visualization
|
||||
html! { <p>{format!("Tasks in queue: {} ({})", stats.current_size, stats.color_code)}</p> }
|
||||
} else {
|
||||
html! { <p>{ "Loading queue stats..." }</p> }
|
||||
}
|
||||
}
|
||||
</div>
|
||||
|
||||
<h3>{ "Tasks" }</h3>
|
||||
{ self.view_tasks_table(ctx) }
|
||||
{ self.view_selected_task_details(ctx) }
|
||||
|
||||
} else if self.is_loading_initial_data {
|
||||
<p>{ "Loading worker data..." }</p>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl App {
|
||||
fn view_tasks_table(&self, ctx: &Context<Self>) -> Html {
|
||||
if self.tasks_list.is_empty()
|
||||
&& self.worker_name_to_monitor.is_some()
|
||||
&& !self.is_loading_initial_data
|
||||
{
|
||||
return html! { <p>{ "No tasks found for this worker, or worker not found." }</p> };
|
||||
}
|
||||
if !self.tasks_list.is_empty() {
|
||||
html! {
|
||||
<table class="task-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>{ "Hash (click to view)" }</th>
|
||||
<th>{ "Created At (UTC)" }</th>
|
||||
<th>{ "Status" }</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{ for self.tasks_list.iter().map(|task| self.view_task_row(ctx, task)) }
|
||||
</tbody>
|
||||
</table>
|
||||
}
|
||||
} else {
|
||||
html! {}
|
||||
}
|
||||
}
|
||||
|
||||
fn view_task_row(&self, ctx: &Context<Self>, task: &TaskSummary) -> Html {
|
||||
let task_hash_clone = task.hash.clone();
|
||||
let created_at_str = chrono::DateTime::from_timestamp(task.created_at, 0).map_or_else(
|
||||
|| "Invalid date".to_string(),
|
||||
|dt| dt.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
);
|
||||
html! {
|
||||
<tr onclick={ctx.link().callback(move |_| Msg::ViewTaskDetails(task_hash_clone.clone()))}
|
||||
style="cursor: pointer;">
|
||||
<td>{ task.hash.chars().take(12).collect::<String>() }{ "..." }</td>
|
||||
<td>{ created_at_str }</td>
|
||||
<td>{ &task.status }</td>
|
||||
</tr>
|
||||
}
|
||||
}
|
||||
|
||||
fn view_selected_task_details(&self, ctx: &Context<Self>) -> Html {
|
||||
if self.is_loading_task_details {
|
||||
return html! { <p>{ "Loading task details..." }</p> };
|
||||
}
|
||||
if let Some(details) = &self.selected_task_details {
|
||||
let created_at_str = chrono::DateTime::from_timestamp(details.created_at, 0)
|
||||
.map_or_else(
|
||||
|| "Invalid date".to_string(),
|
||||
|dt| dt.format("%Y-%m-%d %H:%M:%S UTC").to_string(),
|
||||
);
|
||||
html! {
|
||||
<div class="task-details-modal">
|
||||
<h4>{ format!("Task Details: {}", details.hash) }</h4>
|
||||
<p><strong>{ "Created At: " }</strong>{ created_at_str }</p>
|
||||
<p><strong>{ "Status: " }</strong>{ &details.status }</p>
|
||||
<p><strong>{ "Script Content:" }</strong></p>
|
||||
<pre>{ &details.script_content }</pre>
|
||||
if let Some(result) = &details.result {
|
||||
<p><strong>{ "Result:" }</strong></p>
|
||||
<pre>{ result }</pre>
|
||||
}
|
||||
if let Some(error) = &details.error {
|
||||
<p><strong>{ "Error:" }</strong></p>
|
||||
<pre style="color: red;">{ error }</pre>
|
||||
}
|
||||
<button onclick={ctx.link().callback(|_| Msg::ClearTaskDetails)}>{ "Close Details" }</button>
|
||||
</div>
|
||||
}
|
||||
} else {
|
||||
html! {}
|
||||
}
|
||||
}
|
||||
}
|
184
rhailib/research/rhai_engine_ui/src/main.rs
Normal file
184
rhailib/research/rhai_engine_ui/src/main.rs
Normal file
@@ -0,0 +1,184 @@
|
||||
// The 'app' module is shared between the server and the client.
|
||||
mod app;
|
||||
|
||||
// --- SERVER-SIDE CODE --- //
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
mod server {
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::{Method, StatusCode},
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
use deadpool_redis::{Config, Pool, Runtime};
|
||||
use redis::{from_redis_value, AsyncCommands, FromRedisValue, Value};
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::net::SocketAddr;
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
// Import the shared application state and data structures
|
||||
use crate::app::{QueueStats, TaskDetails, TaskSummary, WorkerDataResponse};
|
||||
|
||||
const REDIS_TASK_DETAILS_PREFIX: &str = "rhai_task_details:";
|
||||
const REDIS_QUEUE_PREFIX: &str = "rhai_tasks:";
|
||||
|
||||
// The main function to run the server
|
||||
pub async fn run() {
|
||||
let redis_url = env::var("REDIS_URL").unwrap_or_else(|_| "redis://127.0.0.1/".to_string());
|
||||
let cfg = Config::from_url(redis_url);
|
||||
let pool = cfg
|
||||
.create_pool(Some(Runtime::Tokio1))
|
||||
.expect("Failed to create Redis pool");
|
||||
|
||||
let cors = CorsLayer::new()
|
||||
.allow_methods([Method::GET])
|
||||
.allow_origin(Any);
|
||||
|
||||
let app = Router::new()
|
||||
.route(
|
||||
"/api/worker/:worker_name/tasks_and_stats",
|
||||
get(get_worker_data),
|
||||
)
|
||||
.route("/api/worker/:worker_name/queue_stats", get(get_queue_stats))
|
||||
.route("/api/task/:hash", get(get_task_details))
|
||||
.nest_service("/", ServeDir::new("dist"))
|
||||
.with_state(pool)
|
||||
.layer(cors);
|
||||
|
||||
let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
|
||||
println!("Backend server listening on http://{}", addr);
|
||||
println!("Serving static files from './dist' directory.");
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
}
|
||||
|
||||
// --- API Handlers (Live Redis Data) ---
|
||||
|
||||
async fn get_worker_data(
|
||||
State(pool): State<Pool>,
|
||||
Path(worker_name): Path<String>,
|
||||
) -> Result<Json<WorkerDataResponse>, (StatusCode, String)> {
|
||||
let mut conn = pool.get().await.map_err(internal_error)?;
|
||||
let queue_key = format!("{}{}", REDIS_QUEUE_PREFIX, worker_name);
|
||||
|
||||
let task_ids: Vec<String> = conn
|
||||
.lrange(&queue_key, 0, -1)
|
||||
.await
|
||||
.map_err(internal_error)?;
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for task_id in task_ids {
|
||||
let task_key = format!("{}{}", REDIS_TASK_DETAILS_PREFIX, task_id);
|
||||
let task_details: redis::Value =
|
||||
conn.hgetall(&task_key).await.map_err(internal_error)?;
|
||||
if let Ok(summary) = task_summary_from_redis_value(&task_details) {
|
||||
tasks.push(summary);
|
||||
}
|
||||
}
|
||||
|
||||
let queue_stats = get_queue_stats_internal(&mut conn, &worker_name).await?;
|
||||
|
||||
Ok(Json(WorkerDataResponse {
|
||||
tasks,
|
||||
queue_stats: Some(queue_stats),
|
||||
}))
|
||||
}
|
||||
|
||||
async fn get_queue_stats(
|
||||
State(pool): State<Pool>,
|
||||
Path(worker_name): Path<String>,
|
||||
) -> Result<Json<QueueStats>, (StatusCode, String)> {
|
||||
let mut conn = pool.get().await.map_err(internal_error)?;
|
||||
let stats = get_queue_stats_internal(&mut conn, &worker_name).await?;
|
||||
Ok(Json(stats))
|
||||
}
|
||||
|
||||
async fn get_task_details(
|
||||
State(pool): State<Pool>,
|
||||
Path(hash): Path<String>,
|
||||
) -> Result<Json<TaskDetails>, (StatusCode, String)> {
|
||||
let mut conn = pool.get().await.map_err(internal_error)?;
|
||||
let task_key = format!("{}{}", REDIS_TASK_DETAILS_PREFIX, hash);
|
||||
let task_details: redis::Value = conn.hgetall(&task_key).await.map_err(internal_error)?;
|
||||
let details = task_details_from_redis_value(&task_details).map_err(internal_error)?;
|
||||
Ok(Json(details))
|
||||
}
|
||||
|
||||
// --- Internal Helper Functions ---
|
||||
|
||||
async fn get_queue_stats_internal(
|
||||
conn: &mut deadpool_redis::Connection,
|
||||
worker_name: &str,
|
||||
) -> Result<QueueStats, (StatusCode, String)> {
|
||||
let queue_key = format!("{}{}", REDIS_QUEUE_PREFIX, worker_name);
|
||||
let size: u32 = conn.llen(&queue_key).await.map_err(internal_error)?;
|
||||
let color_code = match size {
|
||||
0..=10 => "green",
|
||||
11..=50 => "yellow",
|
||||
_ => "red",
|
||||
}
|
||||
.to_string();
|
||||
Ok(QueueStats {
|
||||
current_size: size,
|
||||
color_code,
|
||||
})
|
||||
}
|
||||
|
||||
fn internal_error<E: std::error::Error>(err: E) -> (StatusCode, String) {
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, err.to_string())
|
||||
}
|
||||
|
||||
fn task_summary_from_redis_value(v: &Value) -> redis::RedisResult<TaskSummary> {
|
||||
let map: HashMap<String, String> = from_redis_value(v)?;
|
||||
Ok(TaskSummary {
|
||||
hash: map.get("hash").cloned().unwrap_or_default(),
|
||||
created_at: map
|
||||
.get("createdAt")
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or_default(),
|
||||
status: map
|
||||
.get("status")
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
fn task_details_from_redis_value(v: &Value) -> redis::RedisResult<TaskDetails> {
|
||||
let map: HashMap<String, String> = from_redis_value(v)?;
|
||||
Ok(TaskDetails {
|
||||
hash: map.get("hash").cloned().unwrap_or_default(),
|
||||
created_at: map
|
||||
.get("createdAt")
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or_default(),
|
||||
status: map
|
||||
.get("status")
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string()),
|
||||
script_content: map.get("script").cloned().unwrap_or_default(),
|
||||
result: map.get("output").cloned(),
|
||||
error: map.get("error").cloned(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// --- MAIN ENTRY POINTS --- //
|
||||
|
||||
// Main function for the server binary
|
||||
#[cfg(feature = "server")]
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
server::run().await;
|
||||
}
|
||||
|
||||
// Main function for the WASM client (compiles when 'server' feature is not enabled)
|
||||
#[cfg(not(feature = "server"))]
|
||||
fn main() {
|
||||
wasm_logger::init(wasm_logger::Config::default());
|
||||
log::info!("Rhai Worker UI starting...");
|
||||
yew::Renderer::<app::App>::new().render();
|
||||
}
|
Reference in New Issue
Block a user