32 Commits

Author SHA1 Message Date
Maxime Van Hees
f8436a726e cleanup 2 2025-09-09 14:25:09 +02:00
Maxime Van Hees
182b0edeb7 cleanup 2025-09-09 10:31:07 +02:00
Maxime Van Hees
f5670f20be rewrite builder pattern + clean script as template 2025-09-03 15:38:52 +02:00
Maxime Van Hees
0f4ed1d64d working VM setup 2025-09-02 15:17:52 +02:00
Maxime Van Hees
f4512b66cf wip 2025-09-01 16:12:50 +02:00
Maxime Van Hees
da3da0ae30 working ipv6 ip assignment + ssh with login/passwd 2025-08-28 15:19:37 +02:00
Maxime Van Hees
784f87db97 WIP2 2025-08-27 16:03:32 +02:00
Maxime Van Hees
773db2238d working version 1 2025-08-26 17:46:42 +02:00
Maxime Van Hees
e8a369e3a2 WIP2 2025-08-26 17:43:20 +02:00
Maxime Van Hees
4b4f3371b0 WIP: automating VM deployment 2025-08-26 16:50:59 +02:00
Maxime Van Hees
1bb731711b (unstable) pushing WIP 2025-08-25 15:25:00 +02:00
Maxime Van Hees
af89ef0149 networking VMs (WIP) 2025-08-21 18:57:20 +02:00
Maxime Van Hees
768e3e176d fixed overlapping workspace roots 2025-08-21 16:20:15 +02:00
Timur Gordon
aa0248ef17 move rhailib to herolib 2025-08-21 14:32:24 +02:00
Maxime Van Hees
aab2b6f128 fixed cloud hypervisor issues + updated test script (working now) 2025-08-21 13:32:03 +02:00
Maxime Van Hees
d735316b7f cloud-hypervisor SAL + rhai test script for it 2025-08-20 18:01:21 +02:00
Maxime Van Hees
d1c80863b8 fixed test script errors 2025-08-20 15:42:12 +02:00
Maxime Van Hees
169c62da47 Merge branch 'development' of https://git.ourworld.tf/herocode/herolib_rust into development 2025-08-20 14:45:57 +02:00
Maxime Van Hees
33a5f24981 qcow2 SAL + rhai script to test functionality 2025-08-20 14:44:29 +02:00
Timur Gordon
d7562ce466 add data packages and remove empty submodule 2025-08-07 12:13:37 +02:00
ca736d62f3 /// 2025-08-06 03:27:49 +02:00
Maxime Van Hees
078c6f723b merging changes 2025-08-05 20:28:20 +02:00
Maxime Van Hees
9fdb8d8845 integrated hetzner client in repo + showcase of using scope for 'cleaner' scripts 2025-08-05 20:27:14 +02:00
8203a3b1ff Merge branch 'development' of git.ourworld.tf:herocode/herolib_rust into development 2025-08-05 16:39:01 +02:00
1770ac561e ... 2025-08-05 16:39:00 +02:00
Maxime Van Hees
eed6dbf8dc added robot hetzner code to research for later importing it into codebase 2025-08-05 16:32:29 +02:00
4cd4e04028 ... 2025-08-05 16:22:25 +02:00
8cc828fc0e ...... 2025-08-05 16:21:33 +02:00
56af312aad ... 2025-08-05 16:04:55 +02:00
dfd6931c5b ... 2025-08-05 16:00:24 +02:00
6e01f99958 ... 2025-08-05 15:43:13 +02:00
0c02d0e99f ... 2025-08-05 15:33:03 +02:00
537 changed files with 35025 additions and 891 deletions

View File

@@ -12,23 +12,25 @@ readme = "README.md"
[workspace]
members = [
".",
"vault",
"git",
"redisclient",
"mycelium",
"text",
"os",
"net",
"zinit_client",
"process",
"virt",
"zos",
"postgresclient",
"kubernetes",
"packages/clients/myceliumclient",
"packages/clients/postgresclient",
"packages/clients/redisclient",
"packages/clients/zinitclient",
"packages/core/net",
"packages/core/text",
"packages/crypt/vault",
"packages/data/ourdb",
"packages/data/radixtree",
"packages/data/tst",
"packages/system/git",
"packages/system/kubernetes",
"packages/system/os",
"packages/system/process",
"packages/system/virt",
"rhai",
"rhailib",
"herodo",
"service_manager",
"packages/clients/hetznerclient",
]
resolver = "2"
@@ -50,7 +52,7 @@ log = "0.4"
once_cell = "1.18.0"
rand = "0.8.5"
regex = "1.8.1"
reqwest = { version = "0.12.15", features = ["json"] }
reqwest = { version = "0.12.15", features = ["json", "blocking"] }
rhai = { version = "1.12.0", features = ["sync"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@@ -71,6 +73,10 @@ chacha20poly1305 = "0.10.1"
k256 = { version = "0.13.4", features = ["ecdsa", "ecdh"] }
sha2 = "0.10.7"
hex = "0.4"
bincode = { version = "2.0.1", features = ["serde"] }
pbkdf2 = "0.12.2"
getrandom = { version = "0.3.3", features = ["wasm_js"] }
tera = "1.19.0"
# Ethereum dependencies
ethers = { version = "2.0.7", features = ["legacy"] }
@@ -87,27 +93,52 @@ windows = { version = "0.61.1", features = [
zinit-client = "0.4.0"
urlencoding = "2.1.3"
tokio-test = "0.4.4"
kube = { version = "0.95.0", features = ["client", "config", "derive"] }
k8s-openapi = { version = "0.23.0", features = ["latest"] }
tokio-retry = "0.3.0"
governor = "0.6.3"
tower = { version = "0.5.2", features = ["timeout", "limit"] }
serde_yaml = "0.9"
postgres-types = "0.2.5"
r2d2 = "0.8.10"
# SAL dependencies
sal-git = { path = "packages/system/git" }
sal-kubernetes = { path = "packages/system/kubernetes" }
sal-redisclient = { path = "packages/clients/redisclient" }
sal-mycelium = { path = "packages/clients/myceliumclient" }
sal-hetzner = { path = "packages/clients/hetznerclient" }
sal-text = { path = "packages/core/text" }
sal-os = { path = "packages/system/os" }
sal-net = { path = "packages/core/net" }
sal-zinit-client = { path = "packages/clients/zinitclient" }
sal-process = { path = "packages/system/process" }
sal-virt = { path = "packages/system/virt" }
sal-postgresclient = { path = "packages/clients/postgresclient" }
sal-vault = { path = "packages/crypt/vault" }
sal-rhai = { path = "rhai" }
sal-service-manager = { path = "_archive/service_manager" }
[dependencies]
thiserror = "2.0.12" # For error handling in the main Error enum
tokio = { workspace = true } # For async examples
thiserror = { workspace = true }
tokio = { workspace = true }
# Optional dependencies - users can choose which modules to include
sal-git = { path = "git", optional = true }
sal-kubernetes = { path = "kubernetes", optional = true }
sal-redisclient = { path = "redisclient", optional = true }
sal-mycelium = { path = "mycelium", optional = true }
sal-text = { path = "text", optional = true }
sal-os = { path = "os", optional = true }
sal-net = { path = "net", optional = true }
sal-zinit-client = { path = "zinit_client", optional = true }
sal-process = { path = "process", optional = true }
sal-virt = { path = "virt", optional = true }
sal-postgresclient = { path = "postgresclient", optional = true }
sal-vault = { path = "vault", optional = true }
sal-rhai = { path = "rhai", optional = true }
sal-service-manager = { path = "service_manager", optional = true }
zinit-client.workspace = true
sal-git = { workspace = true, optional = true }
sal-kubernetes = { workspace = true, optional = true }
sal-redisclient = { workspace = true, optional = true }
sal-mycelium = { workspace = true, optional = true }
sal-hetzner = { workspace = true, optional = true }
sal-text = { workspace = true, optional = true }
sal-os = { workspace = true, optional = true }
sal-net = { workspace = true, optional = true }
sal-zinit-client = { workspace = true, optional = true }
sal-process = { workspace = true, optional = true }
sal-virt = { workspace = true, optional = true }
sal-postgresclient = { workspace = true, optional = true }
sal-vault = { workspace = true, optional = true }
sal-rhai = { workspace = true, optional = true }
sal-service-manager = { workspace = true, optional = true }
[features]
default = []
@@ -117,6 +148,7 @@ git = ["dep:sal-git"]
kubernetes = ["dep:sal-kubernetes"]
redisclient = ["dep:sal-redisclient"]
mycelium = ["dep:sal-mycelium"]
hetzner = ["dep:sal-hetzner"]
text = ["dep:sal-text"]
os = ["dep:sal-os"]
net = ["dep:sal-net"]
@@ -126,18 +158,19 @@ virt = ["dep:sal-virt"]
postgresclient = ["dep:sal-postgresclient"]
vault = ["dep:sal-vault"]
rhai = ["dep:sal-rhai"]
service_manager = ["dep:sal-service-manager"]
# service_manager is removed as it's not a direct member anymore
# Convenience feature groups
core = ["os", "process", "text", "net"]
clients = ["redisclient", "postgresclient", "zinit_client", "mycelium"]
infrastructure = ["git", "vault", "kubernetes", "virt", "service_manager"]
clients = ["redisclient", "postgresclient", "zinit_client", "mycelium", "hetzner"]
infrastructure = ["git", "vault", "kubernetes", "virt"]
scripting = ["rhai"]
all = [
"git",
"kubernetes",
"redisclient",
"mycelium",
"hetzner",
"text",
"os",
"net",
@@ -147,7 +180,6 @@ all = [
"postgresclient",
"vault",
"rhai",
"service_manager",
]
# Examples

228
README.md
View File

@@ -1,148 +1,136 @@
# SAL (System Abstraction Layer)
# Herocode Herolib Rust Repository
**Version 0.1.0** - A modular Rust library for cross-platform system operations and automation.
## Overview
SAL provides a unified interface for system operations with Rhai scripting support through the `herodo` tool.
This repository contains the **Herocode Herolib** Rust library and a collection of scripts, examples, and utilities for building, testing, and publishing the SAL (System Abstraction Layer) crates. The repository includes:
## Installation
- **Rust crates** for various system components (e.g., `os`, `process`, `text`, `git`, `vault`, `kubernetes`, etc.).
- **Rhai scripts** and test suites for each crate.
- **Utility scripts** to automate common development tasks.
### Individual Packages (Recommended)
## Scripts
The repository provides three primary helper scripts located in the repository root:
| Script | Description | Typical Usage |
|--------|-------------|--------------|
| `scripts/publish-all.sh` | Publishes all SAL crates to **crates.io** in the correct dependency order. Handles version bumping, dependency updates, dryrun mode, and ratelimiting. | `./scripts/publish-all.sh [--dry-run] [--wait <seconds>] [--version <ver>]` |
| `build_herodo.sh` | Builds the `herodo` binary from the `herodo` package and optionally runs a specified Rhai script. | `./build_herodo.sh [script_name]` |
| `run_rhai_tests.sh` | Executes all Rhai test suites across the repository, logging results and providing a summary. | `./run_rhai_tests.sh` |
Below are detailed usage instructions for each script.
---
## 1. `scripts/publish-all.sh`
### Purpose
- Publishes each SAL crate in the correct dependency order.
- Updates crate versions (if `--version` is supplied).
- Updates path dependencies to version dependencies before publishing.
- Supports **dryrun** mode to preview actions without publishing.
- Handles ratelimiting between crate publishes.
### Options
| Option | Description |
|--------|-------------|
| `--dry-run` | Shows what would be published without actually publishing. |
| `--wait <seconds>` | Wait time between publishes (default: 15s). |
| `--version <ver>` | Set a new version for all crates (updates `Cargo.toml` files). |
| `-h, --help` | Show help message. |
### Example Usage
```bash
# Core functionality
cargo add sal-os sal-process sal-text sal-net
# Dry run no crates will be published
./scripts/publish-all.sh --dry-run
# Infrastructure
cargo add sal-git sal-vault sal-kubernetes sal-virt
# Publish with a custom wait time and version bump
./scripts/publish-all.sh --wait 30 --version 1.2.3
# Database clients
cargo add sal-redisclient sal-postgresclient sal-zinit-client
# Scripting
cargo add sal-rhai
# Normal publish (no dryrun)
./scripts/publish-all.sh
```
### Meta-package with Features
### Notes
- Must be run from the repository root (where `Cargo.toml` lives).
- Requires `cargo` and a loggedin `cargo` session (`cargo login`).
- The script automatically updates dependencies in each crates `Cargo.toml` to use the new version before publishing.
---
## 2. `build_herodo.sh`
### Purpose
- Builds the `herodo` binary from the `herodo` package.
- Copies the binary to a systemwide location (`/usr/local/bin`) if run as root, otherwise to `~/hero/bin`.
- Optionally runs a specified Rhai script after building.
### Usage
```bash
cargo add sal --features core # os, process, text, net
cargo add sal --features infrastructure # git, vault, kubernetes, virt
cargo add sal --features all # everything
# Build only
./build_herodo.sh
# Build and run a specific Rhai script (e.g., `example`):
./build_herodo.sh example
```
### Herodo Script Runner
### Details
- The script changes to its own directory, builds the `herodo` crate (`cargo build`), and copies the binary.
- If a script name is provided, it looks for the script in:
- `src/rhaiexamples/<name>.rhai`
- `src/herodo/scripts/<name>.rhai`
- If the script is not found, the script exits with an error.
---
## 3. `run_rhai_tests.sh`
### Purpose
- Runs **all** Rhai test suites across the repository.
- Supports both the legacy `rhai_tests` directory and the newer `*/tests/rhai` layout.
- Logs output to `run_rhai_tests.log` and prints a summary.
### Usage
```bash
cargo install herodo
```
## Quick Start
### Rust Library Usage
```rust
use sal_os::fs;
use sal_process::run;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let files = fs::list_files(".")?;
println!("Found {} files", files.len());
let result = run::command("echo hello")?;
println!("Output: {}", result.stdout);
Ok(())
}
```
### Herodo Scripting
```bash
# Create script
cat > example.rhai << 'EOF'
let files = find_files(".", "*.rs");
print("Found " + files.len() + " Rust files");
let result = run("echo 'Hello from SAL!'");
print("Output: " + result.stdout);
EOF
# Run script
herodo example.rhai
```
## Available Packages
| Package | Description |
|---------|-------------|
| [`sal-os`](https://crates.io/crates/sal-os) | Operating system operations |
| [`sal-process`](https://crates.io/crates/sal-process) | Process management |
| [`sal-text`](https://crates.io/crates/sal-text) | Text processing |
| [`sal-net`](https://crates.io/crates/sal-net) | Network operations |
| [`sal-git`](https://crates.io/crates/sal-git) | Git repository management |
| [`sal-vault`](https://crates.io/crates/sal-vault) | Cryptographic operations |
| [`sal-kubernetes`](https://crates.io/crates/sal-kubernetes) | Kubernetes management |
| [`sal-virt`](https://crates.io/crates/sal-virt) | Virtualization tools |
| [`sal-redisclient`](https://crates.io/crates/sal-redisclient) | Redis client |
| [`sal-postgresclient`](https://crates.io/crates/sal-postgresclient) | PostgreSQL client |
| [`sal-zinit-client`](https://crates.io/crates/sal-zinit-client) | Zinit process supervisor |
| [`sal-mycelium`](https://crates.io/crates/sal-mycelium) | Mycelium network client |
| [`sal-service-manager`](https://crates.io/crates/sal-service-manager) | Service management |
| [`sal-rhai`](https://crates.io/crates/sal-rhai) | Rhai scripting integration |
| [`sal`](https://crates.io/crates/sal) | Meta-crate with features |
| [`herodo`](https://crates.io/crates/herodo) | Script executor binary |
## Building & Testing
```bash
# Build all packages
cargo build --workspace
# Run tests
cargo test --workspace
# Run Rhai integration tests
# Run all tests
./run_rhai_tests.sh
```
## Core Features
### Output
- **System Operations**: File/directory management, environment access, OS commands
- **Process Management**: Create, monitor, and control system processes
- **Containerization**: Buildah and nerdctl integration
- **Version Control**: Git repository operations
- **Database Clients**: Redis and PostgreSQL support
- **Networking**: HTTP, TCP, SSH connectivity utilities
- **Cryptography**: Key management, encryption, digital signatures
- **Text Processing**: String manipulation and templating
- **Scripting**: Rhai script execution via `herodo`
- Colored console output for readability.
- Log file (`run_rhai_tests.log`) contains full output for later review.
- Summary includes total modules, passed, and failed counts.
- Exit code `0` if all tests pass, `1` otherwise.
## Herodo Scripting
---
`herodo` executes Rhai scripts with access to all SAL modules:
## General Development Workflow
```bash
herodo script.rhai # Run single script
herodo script.rhai arg1 arg2 # With arguments
herodo /path/to/scripts/ # Run all .rhai files in directory
```
1. **Build**: Use `build_herodo.sh` to compile the `herodo` binary.
2. **Test**: Run `run_rhai_tests.sh` to ensure all Rhai scripts pass.
3. **Publish**: When ready to release, use `scripts/publish-all.sh` (with `--dry-run` first to verify).
### Example Script
## Prerequisites
```rhai
// File operations
let files = find_files(".", "*.rs");
print("Found " + files.len() + " Rust files");
// Process execution
let result = run("echo 'Hello SAL!'");
print("Output: " + result.stdout);
// Redis operations
redis_set("status", "running");
let status = redis_get("status");
print("Status: " + status);
```
- **Rust toolchain** (`cargo`, `rustc`) installed.
- **Rhai** interpreter (`herodo`) built and available.
- **Git** for version control.
- **Cargo login** for publishing to crates.io.
## License
Licensed under the Apache License 2.0. See [LICENSE](LICENSE) for details.
See `LICENSE` for details.
---
**Happy coding!**

0
cargo_instructions.md Normal file
View File

View File

@@ -1,6 +1,7 @@
// Example of using the network modules in SAL through Rhai
// Shows TCP port checking, HTTP URL validation, and SSH command execution
// Function to print section header
fn section(title) {
print("\n");
@@ -19,14 +20,14 @@ let host = "localhost";
let port = 22;
print(`Checking if port ${port} is open on ${host}...`);
let is_open = tcp.check_port(host, port);
print(`Port ${port} is ${is_open ? "open" : "closed"}`);
print(`Port ${port} is ${if is_open { "open" } else { "closed" }}`);
// Check multiple ports
let ports = [22, 80, 443];
print(`Checking multiple ports on ${host}...`);
let port_results = tcp.check_ports(host, ports);
for result in port_results {
print(`Port ${result.port} is ${result.is_open ? "open" : "closed"}`);
print(`Port ${result.port} is ${if result.is_open { "open" } else { "closed" }}`);
}
// HTTP connectivity checks
@@ -39,7 +40,7 @@ let http = net::new_http_connector();
let url = "https://www.example.com";
print(`Checking if ${url} is reachable...`);
let is_reachable = http.check_url(url);
print(`${url} is ${is_reachable ? "reachable" : "unreachable"}`);
print(`${url} is ${if is_reachable { "reachable" } else { "unreachable" }}`);
// Check the status code of a URL
print(`Checking status code of ${url}...`);
@@ -68,7 +69,7 @@ if is_open {
let ssh = net::new_ssh_builder()
.host("localhost")
.port(22)
.user(os::get_env("USER") || "root")
.user(if os::get_env("USER") != () { os::get_env("USER") } else { "root" })
.timeout(10)
.build();

View File

@@ -1,7 +1,7 @@
print("Running a basic command using run().do()...");
print("Running a basic command using run().execute()...");
// Execute a simple command
let result = run("echo Hello from run_basic!").do();
let result = run("echo Hello from run_basic!").execute();
// Print the command result
print(`Command: echo Hello from run_basic!`);
@@ -13,6 +13,6 @@ print(`Stderr:\n${result.stderr}`);
// Example of a command that might fail (if 'nonexistent_command' doesn't exist)
// This will halt execution by default because ignore_error() is not used.
// print("Running a command that will fail (and should halt)...");
// let fail_result = run("nonexistent_command").do(); // This line will cause the script to halt if the command doesn't exist
// let fail_result = run("nonexistent_command").execute(); // This line will cause the script to halt if the command doesn't exist
print("Basic run() example finished.");

View File

@@ -2,7 +2,7 @@ print("Running a command that will fail, but ignoring the error...");
// Run a command that exits with a non-zero code (will fail)
// Using .ignore_error() prevents the script from halting
let result = run("exit 1").ignore_error().do();
let result = run("exit 1").ignore_error().execute();
print(`Command finished.`);
print(`Success: ${result.success}`); // This should be false
@@ -22,7 +22,7 @@ print("\nScript continued execution after the potentially failing command.");
// Example of a command that might fail due to OS error (e.g., command not found)
// This *might* still halt depending on how the underlying Rust function handles it,
// as ignore_error() primarily prevents halting on *command* non-zero exit codes.
// let os_error_result = run("nonexistent_command_123").ignore_error().do();
// let os_error_result = run("nonexistent_command_123").ignore_error().execute();
// print(`OS Error Command Success: ${os_error_result.success}`);
// print(`OS Error Command Exit Code: ${os_error_result.code}`);

View File

@@ -1,4 +1,4 @@
print("Running a command using run().log().do()...");
print("Running a command using run().log().execute()...");
// The .log() method will print the command string to the console before execution.
// This is useful for debugging or tracing which commands are being run.

View File

@@ -1,8 +1,8 @@
print("Running a command using run().silent().do()...\n");
print("Running a command using run().silent().execute()...\n");
// This command will print to standard output and standard error
// However, because .silent() is used, the output will not appear in the console directly
let result = run("echo 'This should be silent stdout.'; echo 'This should be silent stderr.' >&2; exit 0").silent().do();
let result = run("echo 'This should be silent stdout.'; echo 'This should be silent stderr.' >&2; exit 0").silent().execute();
// The output is still captured in the CommandResult
print(`Command finished.`);
@@ -12,7 +12,7 @@ print(`Captured Stdout:\\n${result.stdout}`);
print(`Captured Stderr:\\n${result.stderr}`);
// Example of a silent command that fails (but won't halt because we only suppress output)
// let fail_result = run("echo 'This is silent failure stderr.' >&2; exit 1").silent().do();
// let fail_result = run("echo 'This is silent failure stderr.' >&2; exit 1").silent().execute();
// print(`Failed command finished (silent):`);
// print(`Success: ${fail_result.success}`);
// print(`Exit Code: ${fail_result.code}`);

View File

@@ -3,7 +3,7 @@
//! This library loads the Rhai engine, registers all SAL modules,
//! and executes Rhai scripts from a specified directory in sorted order.
use rhai::Engine;
use rhai::{Engine, Scope};
use std::error::Error;
use std::fs;
use std::path::{Path, PathBuf};
@@ -30,6 +30,19 @@ pub fn run(script_path: &str) -> Result<(), Box<dyn Error>> {
// Create a new Rhai engine
let mut engine = Engine::new();
// TODO: if we create a scope here we could clean up all the different functionsand types regsitered wit the engine
// We should generalize the way we add things to the scope for each module sepeartely
let mut scope = Scope::new();
// Conditionally add Hetzner client only when env config is present
if let Ok(cfg) = sal::hetzner::config::Config::from_env() {
let hetzner_client = sal::hetzner::api::Client::new(cfg);
scope.push("hetzner", hetzner_client);
}
// This makes it easy to call e.g. `hetzner.get_server()` or `mycelium.get_connected_peers()`
// --> without the need of manually created a client for each one first
// --> could be conditionally compiled to only use those who we need (we only push the things to the scope that we actually need to run the script)
// Register println function for output
engine.register_fn("println", |s: &str| println!("{}", s));
@@ -78,19 +91,20 @@ pub fn run(script_path: &str) -> Result<(), Box<dyn Error>> {
let script = fs::read_to_string(&script_file)?;
// Execute the script
match engine.eval::<rhai::Dynamic>(&script) {
Ok(result) => {
println!("Script executed successfully");
if !result.is_unit() {
println!("Result: {}", result);
}
}
Err(err) => {
eprintln!("Error executing script: {}", err);
// Exit with error code when a script fails
process::exit(1);
}
}
// match engine.eval::<rhai::Dynamic>(&script) {
// Ok(result) => {
// println!("Script executed successfully");
// if !result.is_unit() {
// println!("Result: {}", result);
// }
// }
// Err(err) => {
// eprintln!("Error executing script: {}", err);
// // Exit with error code when a script fails
// process::exit(1);
// }
// }
engine.run_with_scope(&mut scope, &script)?;
}
println!("\nAll scripts executed successfully!");

View File

@@ -0,0 +1,12 @@
[package]
name = "sal-hetzner"
version = "0.1.0"
edition = "2024"
[dependencies]
prettytable = "0.10.0"
reqwest.workspace = true
rhai = { workspace = true, features = ["serde"] }
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
thiserror.workspace = true

View File

@@ -0,0 +1,54 @@
use std::fmt;
use serde::Deserialize;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum AppError {
#[error("Request failed: {0}")]
RequestError(#[from] reqwest::Error),
#[error("API error: {0}")]
ApiError(ApiError),
#[error("Deserialization Error: {0:?}")]
SerdeJsonError(#[from] serde_json::Error),
}
#[derive(Debug, Deserialize)]
pub struct ApiError {
pub status: u16,
pub message: String,
}
impl From<reqwest::blocking::Response> for ApiError {
fn from(value: reqwest::blocking::Response) -> Self {
ApiError {
status: value.status().into(),
message: value.text().unwrap_or("The API call returned an error.".to_string()),
}
}
}
impl fmt::Display for ApiError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
#[derive(Deserialize)]
struct HetznerApiError {
code: String,
message: String,
}
#[derive(Deserialize)]
struct HetznerApiErrorWrapper {
error: HetznerApiError,
}
if let Ok(wrapper) = serde_json::from_str::<HetznerApiErrorWrapper>(&self.message) {
write!(
f,
"Status: {}, Code: {}, Message: {}",
self.status, wrapper.error.code, wrapper.error.message
)
} else {
write!(f, "Status: {}: {}", self.status, self.message)
}
}
}

View File

@@ -0,0 +1,513 @@
pub mod error;
pub mod models;
use self::models::{
Boot, Rescue, Server, SshKey, ServerAddonProduct, ServerAddonProductWrapper,
AuctionServerProduct, AuctionServerProductWrapper, AuctionTransaction,
AuctionTransactionWrapper, BootWrapper, Cancellation, CancellationWrapper,
OrderServerBuilder, OrderServerProduct, OrderServerProductWrapper, RescueWrapped,
ServerWrapper, SshKeyWrapper, Transaction, TransactionWrapper,
ServerAddonTransaction, ServerAddonTransactionWrapper,
OrderServerAddonBuilder,
};
use crate::api::error::ApiError;
use crate::config::Config;
use error::AppError;
use reqwest::blocking::Client as HttpClient;
use serde_json::json;
#[derive(Clone)]
pub struct Client {
http_client: HttpClient,
config: Config,
}
impl Client {
pub fn new(config: Config) -> Self {
Self {
http_client: HttpClient::new(),
config,
}
}
fn handle_response<T>(&self, response: reqwest::blocking::Response) -> Result<T, AppError>
where
T: serde::de::DeserializeOwned,
{
let status = response.status();
let body = response.text()?;
if status.is_success() {
serde_json::from_str::<T>(&body).map_err(Into::into)
} else {
Err(AppError::ApiError(ApiError {
status: status.as_u16(),
message: body,
}))
}
}
pub fn get_server(&self, server_number: i32) -> Result<Server, AppError> {
let response = self
.http_client
.get(format!("{}/server/{}", self.config.api_url, server_number))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: ServerWrapper = self.handle_response(response)?;
Ok(wrapped.server)
}
pub fn get_servers(&self) -> Result<Vec<Server>, AppError> {
let response = self
.http_client
.get(format!("{}/server", self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<ServerWrapper> = self.handle_response(response)?;
let servers = wrapped.into_iter().map(|sw| sw.server).collect();
Ok(servers)
}
pub fn update_server_name(&self, server_number: i32, name: &str) -> Result<Server, AppError> {
let params = [("server_name", name)];
let response = self
.http_client
.post(format!("{}/server/{}", self.config.api_url, server_number))
.basic_auth(&self.config.username, Some(&self.config.password))
.form(&params)
.send()?;
let wrapped: ServerWrapper = self.handle_response(response)?;
Ok(wrapped.server)
}
pub fn get_cancellation_data(&self, server_number: i32) -> Result<Cancellation, AppError> {
let response = self
.http_client
.get(format!(
"{}/server/{}/cancellation",
self.config.api_url, server_number
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: CancellationWrapper = self.handle_response(response)?;
Ok(wrapped.cancellation)
}
pub fn cancel_server(
&self,
server_number: i32,
cancellation_date: &str,
) -> Result<Cancellation, AppError> {
let params = [("cancellation_date", cancellation_date)];
let response = self
.http_client
.post(format!(
"{}/server/{}/cancellation",
self.config.api_url, server_number
))
.basic_auth(&self.config.username, Some(&self.config.password))
.form(&params)
.send()?;
let wrapped: CancellationWrapper = self.handle_response(response)?;
Ok(wrapped.cancellation)
}
pub fn withdraw_cancellation(&self, server_number: i32) -> Result<(), AppError> {
self.http_client
.delete(format!(
"{}/server/{}/cancellation",
self.config.api_url, server_number
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
Ok(())
}
pub fn get_ssh_keys(&self) -> Result<Vec<SshKey>, AppError> {
let response = self
.http_client
.get(format!("{}/key", self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<SshKeyWrapper> = self.handle_response(response)?;
let keys = wrapped.into_iter().map(|sk| sk.key).collect();
Ok(keys)
}
pub fn get_ssh_key(&self, fingerprint: &str) -> Result<SshKey, AppError> {
let response = self
.http_client
.get(format!("{}/key/{}", self.config.api_url, fingerprint))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: SshKeyWrapper = self.handle_response(response)?;
Ok(wrapped.key)
}
pub fn add_ssh_key(&self, name: &str, data: &str) -> Result<SshKey, AppError> {
let params = [("name", name), ("data", data)];
let response = self
.http_client
.post(format!("{}/key", self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.form(&params)
.send()?;
let wrapped: SshKeyWrapper = self.handle_response(response)?;
Ok(wrapped.key)
}
pub fn update_ssh_key_name(&self, fingerprint: &str, name: &str) -> Result<SshKey, AppError> {
let params = [("name", name)];
let response = self
.http_client
.post(format!("{}/key/{}", self.config.api_url, fingerprint))
.basic_auth(&self.config.username, Some(&self.config.password))
.form(&params)
.send()?;
let wrapped: SshKeyWrapper = self.handle_response(response)?;
Ok(wrapped.key)
}
pub fn delete_ssh_key(&self, fingerprint: &str) -> Result<(), AppError> {
self.http_client
.delete(format!("{}/key/{}", self.config.api_url, fingerprint))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
Ok(())
}
pub fn get_boot_configuration(&self, server_number: i32) -> Result<Boot, AppError> {
let response = self
.http_client
.get(format!("{}/boot/{}", self.config.api_url, server_number))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: BootWrapper = self.handle_response(response)?;
Ok(wrapped.boot)
}
pub fn get_rescue_boot_configuration(&self, server_number: i32) -> Result<Rescue, AppError> {
let response = self
.http_client
.get(format!(
"{}/boot/{}/rescue",
self.config.api_url, server_number
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: RescueWrapped = self.handle_response(response)?;
Ok(wrapped.rescue)
}
pub fn enable_rescue_mode(
&self,
server_number: i32,
os: &str,
authorized_keys: Option<&[String]>,
) -> Result<Rescue, AppError> {
let mut params = vec![("os", os)];
if let Some(keys) = authorized_keys {
for key in keys {
params.push(("authorized_key[]", key));
}
}
let response = self
.http_client
.post(format!(
"{}/boot/{}/rescue",
self.config.api_url, server_number
))
.basic_auth(&self.config.username, Some(&self.config.password))
.form(&params)
.send()?;
let wrapped: RescueWrapped = self.handle_response(response)?;
Ok(wrapped.rescue)
}
pub fn disable_rescue_mode(&self, server_number: i32) -> Result<Rescue, AppError> {
let response = self
.http_client
.delete(format!(
"{}/boot/{}/rescue",
self.config.api_url, server_number
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: RescueWrapped = self.handle_response(response)?;
Ok(wrapped.rescue)
}
pub fn get_server_products(
&self,
) -> Result<Vec<OrderServerProduct>, AppError> {
let response = self
.http_client
.get(format!("{}/order/server/product", &self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<OrderServerProductWrapper> = self.handle_response(response)?;
let products = wrapped.into_iter().map(|sop| sop.product).collect();
Ok(products)
}
pub fn get_server_product_by_id(
&self,
product_id: &str,
) -> Result<OrderServerProduct, AppError> {
let response = self
.http_client
.get(format!(
"{}/order/server/product/{}",
&self.config.api_url, product_id
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: OrderServerProductWrapper = self.handle_response(response)?;
Ok(wrapped.product)
}
pub fn order_server(&self, order: OrderServerBuilder) -> Result<Transaction, AppError> {
let mut params = json!({
"product_id": order.product_id,
"dist": order.dist,
"location": order.location,
"authorized_key": order.authorized_keys.unwrap_or_default(),
});
if let Some(addons) = order.addons {
params["addon"] = json!(addons);
}
if let Some(test) = order.test {
if test {
params["test"] = json!(test);
}
}
let response = self
.http_client
.post(format!("{}/order/server/transaction", &self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.json(&params)
.send()?;
let wrapped: TransactionWrapper = self.handle_response(response)?;
Ok(wrapped.transaction)
}
pub fn get_transaction_by_id(&self, transaction_id: &str) -> Result<Transaction, AppError> {
let response = self
.http_client
.get(format!(
"{}/order/server/transaction/{}",
&self.config.api_url, transaction_id
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: TransactionWrapper = self.handle_response(response)?;
Ok(wrapped.transaction)
}
pub fn get_transactions(&self) -> Result<Vec<Transaction>, AppError> {
let response = self
.http_client
.get(format!("{}/order/server/transaction", &self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<TransactionWrapper> = self.handle_response(response)?;
let transactions = wrapped.into_iter().map(|t| t.transaction).collect();
Ok(transactions)
}
pub fn get_auction_server_products(&self) -> Result<Vec<AuctionServerProduct>, AppError> {
let response = self
.http_client
.get(format!(
"{}/order/server_market/product",
&self.config.api_url
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<AuctionServerProductWrapper> = self.handle_response(response)?;
let products = wrapped.into_iter().map(|asp| asp.product).collect();
Ok(products)
}
pub fn get_auction_server_product_by_id(&self, product_id: &str) -> Result<AuctionServerProduct, AppError> {
let response = self
.http_client
.get(format!("{}/order/server_market/product/{}", &self.config.api_url, product_id))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: AuctionServerProductWrapper = self.handle_response(response)?;
Ok(wrapped.product)
}
pub fn get_auction_transactions(&self) -> Result<Vec<AuctionTransaction>, AppError> {
let response = self
.http_client
.get(format!("{}/order/server_market/transaction", &self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<AuctionTransactionWrapper> = self.handle_response(response)?;
let transactions = wrapped.into_iter().map(|t| t.transaction).collect();
Ok(transactions)
}
pub fn get_auction_transaction_by_id(&self, transaction_id: &str) -> Result<AuctionTransaction, AppError> {
let response = self
.http_client
.get(format!("{}/order/server_market/transaction/{}", &self.config.api_url, transaction_id))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: AuctionTransactionWrapper = self.handle_response(response)?;
Ok(wrapped.transaction)
}
pub fn get_server_addon_products(
&self,
server_number: i64,
) -> Result<Vec<ServerAddonProduct>, AppError> {
let response = self
.http_client
.get(format!(
"{}/order/server_addon/{}/product",
&self.config.api_url, server_number
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<ServerAddonProductWrapper> = self.handle_response(response)?;
let products = wrapped.into_iter().map(|sap| sap.product).collect();
Ok(products)
}
pub fn order_auction_server(
&self,
product_id: i64,
authorized_keys: Vec<String>,
dist: Option<String>,
arch: Option<String>,
lang: Option<String>,
comment: Option<String>,
addons: Option<Vec<String>>,
test: Option<bool>,
) -> Result<AuctionTransaction, AppError> {
let mut params: Vec<(&str, String)> = Vec::new();
params.push(("product_id", product_id.to_string()));
for key in &authorized_keys {
params.push(("authorized_key[]", key.clone()));
}
if let Some(dist) = dist {
params.push(("dist", dist));
}
if let Some(arch) = arch {
params.push(("@deprecated arch", arch));
}
if let Some(lang) = lang {
params.push(("lang", lang));
}
if let Some(comment) = comment {
params.push(("comment", comment));
}
if let Some(addons) = addons {
for addon in addons {
params.push(("addon[]", addon));
}
}
if let Some(test) = test {
params.push(("test", test.to_string()));
}
let response = self
.http_client
.post(format!("{}/order/server_market/transaction", &self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.form(&params)
.send()?;
let wrapped: AuctionTransactionWrapper = self.handle_response(response)?;
Ok(wrapped.transaction)
}
pub fn get_server_addon_transactions(&self) -> Result<Vec<ServerAddonTransaction>, AppError> {
let response = self
.http_client
.get(format!("{}/order/server_addon/transaction", &self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: Vec<ServerAddonTransactionWrapper> = self.handle_response(response)?;
let transactions = wrapped.into_iter().map(|satw| satw.transaction).collect();
Ok(transactions)
}
pub fn get_server_addon_transaction_by_id(
&self,
transaction_id: &str,
) -> Result<ServerAddonTransaction, AppError> {
let response = self
.http_client
.get(format!(
"{}/order/server_addon/transaction/{}",
&self.config.api_url, transaction_id
))
.basic_auth(&self.config.username, Some(&self.config.password))
.send()?;
let wrapped: ServerAddonTransactionWrapper = self.handle_response(response)?;
Ok(wrapped.transaction)
}
pub fn order_server_addon(
&self,
order: OrderServerAddonBuilder,
) -> Result<ServerAddonTransaction, AppError> {
let mut params = json!({
"server_number": order.server_number,
"product_id": order.product_id,
});
if let Some(reason) = order.reason {
params["reason"] = json!(reason);
}
if let Some(gateway) = order.gateway {
params["gateway"] = json!(gateway);
}
if let Some(test) = order.test {
if test {
params["test"] = json!(test);
}
}
let response = self
.http_client
.post(format!("{}/order/server_addon/transaction", &self.config.api_url))
.basic_auth(&self.config.username, Some(&self.config.password))
.form(&params)
.send()?;
let wrapped: ServerAddonTransactionWrapper = self.handle_response(response)?;
Ok(wrapped.transaction)
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,25 @@
use std::env;
#[derive(Clone)]
pub struct Config {
pub username: String,
pub password: String,
pub api_url: String,
}
impl Config {
pub fn from_env() -> Result<Self, String> {
let username = env::var("HETZNER_USERNAME")
.map_err(|_| "HETZNER_USERNAME environment variable not set".to_string())?;
let password = env::var("HETZNER_PASSWORD")
.map_err(|_| "HETZNER_PASSWORD environment variable not set".to_string())?;
let api_url = env::var("HETZNER_API_URL")
.unwrap_or_else(|_| "https://robot-ws.your-server.de".to_string());
Ok(Config {
username,
password,
api_url,
})
}
}

View File

@@ -0,0 +1,3 @@
pub mod api;
pub mod config;
pub mod rhai;

View File

@@ -0,0 +1,63 @@
use crate::api::{
models::{Boot, Rescue},
Client,
};
use rhai::{plugin::*, Engine};
pub fn register(engine: &mut Engine) {
let boot_module = exported_module!(boot_api);
engine.register_global_module(boot_module.into());
}
#[export_module]
pub mod boot_api {
use super::*;
use rhai::EvalAltResult;
#[rhai_fn(name = "get_boot_configuration", return_raw)]
pub fn get_boot_configuration(
client: &mut Client,
server_number: i64,
) -> Result<Boot, Box<EvalAltResult>> {
client
.get_boot_configuration(server_number as i32)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "get_rescue_boot_configuration", return_raw)]
pub fn get_rescue_boot_configuration(
client: &mut Client,
server_number: i64,
) -> Result<Rescue, Box<EvalAltResult>> {
client
.get_rescue_boot_configuration(server_number as i32)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "enable_rescue_mode", return_raw)]
pub fn enable_rescue_mode(
client: &mut Client,
server_number: i64,
os: &str,
authorized_keys: rhai::Array,
) -> Result<Rescue, Box<EvalAltResult>> {
let keys: Vec<String> = authorized_keys
.into_iter()
.map(|k| k.into_string().unwrap())
.collect();
client
.enable_rescue_mode(server_number as i32, os, Some(&keys))
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "disable_rescue_mode", return_raw)]
pub fn disable_rescue_mode(
client: &mut Client,
server_number: i64,
) -> Result<Rescue, Box<EvalAltResult>> {
client
.disable_rescue_mode(server_number as i32)
.map_err(|e| e.to_string().into())
}
}

View File

@@ -0,0 +1,54 @@
use rhai::{Engine, EvalAltResult};
use crate::api::models::{
AuctionServerProduct, AuctionTransaction, AuctionTransactionProduct, AuthorizedKey, Boot,
Cancellation, Cpanel, HostKey, Linux, OrderAuctionServerBuilder, OrderServerAddonBuilder,
OrderServerBuilder, OrderServerProduct, Plesk, Rescue, Server, ServerAddonProduct,
ServerAddonResource, ServerAddonTransaction, SshKey, Transaction, TransactionProduct, Vnc,
Windows,
};
pub mod boot;
pub mod printing;
pub mod server;
pub mod server_ordering;
pub mod ssh_keys;
// here just register the hetzner module
pub fn register_hetzner_module(engine: &mut Engine) -> Result<(), Box<EvalAltResult>> {
// TODO:register types
engine.build_type::<Server>();
engine.build_type::<SshKey>();
engine.build_type::<Boot>();
engine.build_type::<Rescue>();
engine.build_type::<Linux>();
engine.build_type::<Vnc>();
engine.build_type::<Windows>();
engine.build_type::<Plesk>();
engine.build_type::<Cpanel>();
engine.build_type::<Cancellation>();
engine.build_type::<OrderServerProduct>();
engine.build_type::<Transaction>();
engine.build_type::<AuthorizedKey>();
engine.build_type::<TransactionProduct>();
engine.build_type::<HostKey>();
engine.build_type::<AuctionServerProduct>();
engine.build_type::<AuctionTransaction>();
engine.build_type::<AuctionTransactionProduct>();
engine.build_type::<OrderAuctionServerBuilder>();
engine.build_type::<OrderServerBuilder>();
engine.build_type::<ServerAddonProduct>();
engine.build_type::<ServerAddonTransaction>();
engine.build_type::<ServerAddonResource>();
engine.build_type::<OrderServerAddonBuilder>();
server::register(engine);
ssh_keys::register(engine);
boot::register(engine);
server_ordering::register(engine);
// TODO: push hetzner to scope as value client:
// scope.push("hetzner", client);
Ok(())
}

View File

@@ -0,0 +1,43 @@
use rhai::{Array, Engine};
use crate::{api::models::{OrderServerProduct, AuctionServerProduct, AuctionTransaction, ServerAddonProduct, ServerAddonTransaction, Server, SshKey}};
mod servers_table;
mod ssh_keys_table;
mod server_ordering_table;
// This will be called when we print(...) or pretty_print() an Array (with Dynamic values)
pub fn pretty_print_dispatch(array: Array) {
if array.is_empty() {
println!("<empty table>");
return;
}
let first = &array[0];
if first.is::<Server>() {
println!("Yeah first is server!");
servers_table::pretty_print_servers(array);
} else if first.is::<SshKey>() {
ssh_keys_table::pretty_print_ssh_keys(array);
}
else if first.is::<OrderServerProduct>() {
server_ordering_table::pretty_print_server_products(array);
} else if first.is::<AuctionServerProduct>() {
server_ordering_table::pretty_print_auction_server_products(array);
} else if first.is::<AuctionTransaction>() {
server_ordering_table::pretty_print_auction_transactions(array);
} else if first.is::<ServerAddonProduct>() {
server_ordering_table::pretty_print_server_addon_products(array);
} else if first.is::<ServerAddonTransaction>() {
server_ordering_table::pretty_print_server_addon_transactions(array);
} else {
// Generic fallback for other types
for item in array {
println!("{}", item.to_string());
}
}
}
pub fn register(engine: &mut Engine) {
engine.register_fn("pretty_print", pretty_print_dispatch);
}

View File

@@ -0,0 +1,293 @@
use prettytable::{row, Table};
use crate::api::models::{OrderServerProduct, ServerAddonProduct, ServerAddonTransaction, ServerAddonResource};
pub fn pretty_print_server_products(products: rhai::Array) {
let mut table = Table::new();
table.add_row(row![b =>
"ID",
"Name",
"Description",
"Traffic",
"Location",
"Price (Net)",
"Price (Gross)",
]);
for product_dyn in products {
if let Some(product) = product_dyn.try_cast::<OrderServerProduct>() {
let mut price_net = "N/A".to_string();
let mut price_gross = "N/A".to_string();
if let Some(first_price) = product.prices.first() {
price_net = first_price.price.net.clone();
price_gross = first_price.price.gross.clone();
}
table.add_row(row![
product.id,
product.name,
product.description.join(", "),
product.traffic,
product.location.join(", "),
price_net,
price_gross,
]);
}
}
table.printstd();
}
pub fn pretty_print_auction_server_products(products: rhai::Array) {
let mut table = Table::new();
table.add_row(row![b =>
"ID",
"Name",
"Description",
"Traffic",
"Distributions",
"Architectures",
"Languages",
"CPU",
"CPU Benchmark",
"Memory Size (GB)",
"HDD Size (GB)",
"HDD Text",
"HDD Count",
"Datacenter",
"Network Speed",
"Price (Net)",
"Price (Hourly Net)",
"Price (Setup Net)",
"Price (VAT)",
"Price (Hourly VAT)",
"Price (Setup VAT)",
"Fixed Price",
"Next Reduce (seconds)",
"Next Reduce Date",
"Orderable Addons",
]);
for product_dyn in products {
if let Some(product) = product_dyn.try_cast::<crate::api::models::AuctionServerProduct>() {
let mut addons_table = Table::new();
addons_table.add_row(row![b => "ID", "Name", "Min", "Max", "Prices"]);
for addon in &product.orderable_addons {
let mut addon_prices_table = Table::new();
addon_prices_table.add_row(row![b => "Location", "Net", "Gross", "Hourly Net", "Hourly Gross", "Setup Net", "Setup Gross"]);
for price in &addon.prices {
addon_prices_table.add_row(row![
price.location,
price.price.net,
price.price.gross,
price.price.hourly_net,
price.price.hourly_gross,
price.price_setup.net,
price.price_setup.gross
]);
}
addons_table.add_row(row![
addon.id,
addon.name,
addon.min,
addon.max,
addon_prices_table
]);
}
table.add_row(row![
product.id,
product.name,
product.description.join(", "),
product.traffic,
product.dist.join(", "),
product.arch.as_deref().unwrap_or_default().join(", "),
product.lang.join(", "),
product.cpu,
product.cpu_benchmark,
product.memory_size,
product.hdd_size,
product.hdd_text,
product.hdd_count,
product.datacenter,
product.network_speed,
product.price,
product.price_hourly.as_deref().unwrap_or("N/A"),
product.price_setup,
product.price_with_vat,
product.price_hourly_with_vat.as_deref().unwrap_or("N/A"),
product.price_setup_with_vat,
product.fixed_price,
product.next_reduce,
product.next_reduce_date,
addons_table,
]);
}
}
table.printstd();
}
pub fn pretty_print_server_addon_products(products: rhai::Array) {
let mut table = Table::new();
table.add_row(row![b =>
"ID",
"Name",
"Type",
"Location",
"Price (Net)",
"Price (Gross)",
"Hourly Net",
"Hourly Gross",
"Setup Net",
"Setup Gross",
]);
for product_dyn in products {
if let Some(product) = product_dyn.try_cast::<ServerAddonProduct>() {
table.add_row(row![
product.id,
product.name,
product.product_type,
product.price.location,
product.price.price.net,
product.price.price.gross,
product.price.price.hourly_net,
product.price.price.hourly_gross,
product.price.price_setup.net,
product.price.price_setup.gross,
]);
}
}
table.printstd();
}
pub fn pretty_print_auction_transactions(transactions: rhai::Array) {
let mut table = Table::new();
table.add_row(row![b =>
"ID",
"Date",
"Status",
"Server Number",
"Server IP",
"Comment",
"Product ID",
"Product Name",
"Product Traffic",
"Product Distributions",
"Product Architectures",
"Product Languages",
"Product CPU",
"Product CPU Benchmark",
"Product Memory Size (GB)",
"Product HDD Size (GB)",
"Product HDD Text",
"Product HDD Count",
"Product Datacenter",
"Product Network Speed",
"Product Fixed Price",
"Product Next Reduce (seconds)",
"Product Next Reduce Date",
"Addons",
]);
for transaction_dyn in transactions {
if let Some(transaction) = transaction_dyn.try_cast::<crate::api::models::AuctionTransaction>() {
let _authorized_keys_table = {
let mut table = Table::new();
table.add_row(row![b => "Name", "Fingerprint", "Type", "Size"]);
for key in &transaction.authorized_key {
table.add_row(row![
key.key.name.as_deref().unwrap_or("N/A"),
key.key.fingerprint.as_deref().unwrap_or("N/A"),
key.key.key_type.as_deref().unwrap_or("N/A"),
key.key.size.map_or("N/A".to_string(), |s| s.to_string())
]);
}
table
};
let _host_keys_table = {
let mut table = Table::new();
table.add_row(row![b => "Fingerprint", "Type", "Size"]);
for key in &transaction.host_key {
table.add_row(row![
key.key.fingerprint.as_deref().unwrap_or("N/A"),
key.key.key_type.as_deref().unwrap_or("N/A"),
key.key.size.map_or("N/A".to_string(), |s| s.to_string())
]);
}
table
};
table.add_row(row![
transaction.id,
transaction.date,
transaction.status,
transaction.server_number.map_or("N/A".to_string(), |id| id.to_string()),
transaction.server_ip.as_deref().unwrap_or("N/A"),
transaction.comment.as_deref().unwrap_or("N/A"),
transaction.product.id,
transaction.product.name,
transaction.product.traffic,
transaction.product.dist,
transaction.product.arch.as_deref().unwrap_or("N/A"),
transaction.product.lang,
transaction.product.cpu,
transaction.product.cpu_benchmark,
transaction.product.memory_size,
transaction.product.hdd_size,
transaction.product.hdd_text,
transaction.product.hdd_count,
transaction.product.datacenter,
transaction.product.network_speed,
transaction.product.fixed_price.unwrap_or_default().to_string(),
transaction
.product
.next_reduce
.map_or("N/A".to_string(), |r| r.to_string()),
transaction
.product
.next_reduce_date
.as_deref()
.unwrap_or("N/A"),
transaction.addons.join(", "),
]);
}
}
table.printstd();
}
pub fn pretty_print_server_addon_transactions(transactions: rhai::Array) {
let mut table = Table::new();
table.add_row(row![b =>
"ID",
"Date",
"Status",
"Server Number",
"Product ID",
"Product Name",
"Product Price",
"Resources",
]);
for transaction_dyn in transactions {
if let Some(transaction) = transaction_dyn.try_cast::<ServerAddonTransaction>() {
let mut resources_table = Table::new();
resources_table.add_row(row![b => "Type", "ID"]);
for resource in &transaction.resources {
resources_table.add_row(row![resource.resource_type, resource.id]);
}
table.add_row(row![
transaction.id,
transaction.date,
transaction.status,
transaction.server_number,
transaction.product.id,
transaction.product.name,
transaction.product.price.to_string(),
resources_table,
]);
}
}
table.printstd();
}

View File

@@ -0,0 +1,30 @@
use prettytable::{row, Table};
use rhai::Array;
use super::Server;
pub fn pretty_print_servers(servers: Array) {
let mut table = Table::new();
table.add_row(row![b =>
"Number",
"Name",
"IP",
"Product",
"DC",
"Status"
]);
for server_dyn in servers {
if let Some(server) = server_dyn.try_cast::<Server>() {
table.add_row(row![
server.server_number.to_string(),
server.server_name,
server.server_ip.unwrap_or("N/A".to_string()),
server.product,
server.dc,
server.status
]);
}
}
table.printstd();
}

View File

@@ -0,0 +1,26 @@
use prettytable::{row, Table};
use super::SshKey;
pub fn pretty_print_ssh_keys(keys: rhai::Array) {
let mut table = Table::new();
table.add_row(row![b =>
"Name",
"Fingerprint",
"Type",
"Size",
"Created At"
]);
for key_dyn in keys {
if let Some(key) = key_dyn.try_cast::<SshKey>() {
table.add_row(row![
key.name,
key.fingerprint,
key.key_type,
key.size.to_string(),
key.created_at
]);
}
}
table.printstd();
}

View File

@@ -0,0 +1,76 @@
use crate::api::{Client, models::Server};
use rhai::{Array, Dynamic, plugin::*};
pub fn register(engine: &mut Engine) {
let server_module = exported_module!(server_api);
engine.register_global_module(server_module.into());
}
#[export_module]
pub mod server_api {
use crate::api::models::Cancellation;
use super::*;
use rhai::EvalAltResult;
#[rhai_fn(name = "get_server", return_raw)]
pub fn get_server(
client: &mut Client,
server_number: i64,
) -> Result<Server, Box<EvalAltResult>> {
client
.get_server(server_number as i32)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "get_servers", return_raw)]
pub fn get_servers(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
let servers = client
.get_servers()
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
println!("number of SERVERS we got: {:#?}", servers.len());
Ok(servers.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "update_server_name", return_raw)]
pub fn update_server_name(
client: &mut Client,
server_number: i64,
name: &str,
) -> Result<Server, Box<EvalAltResult>> {
client
.update_server_name(server_number as i32, name)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "get_cancellation_data", return_raw)]
pub fn get_cancellation_data(
client: &mut Client,
server_number: i64,
) -> Result<Cancellation, Box<EvalAltResult>> {
client
.get_cancellation_data(server_number as i32)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "cancel_server", return_raw)]
pub fn cancel_server(
client: &mut Client,
server_number: i64,
cancellation_date: &str,
) -> Result<Cancellation, Box<EvalAltResult>> {
client
.cancel_server(server_number as i32, cancellation_date)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "withdraw_cancellation", return_raw)]
pub fn withdraw_cancellation(
client: &mut Client,
server_number: i64,
) -> Result<(), Box<EvalAltResult>> {
client
.withdraw_cancellation(server_number as i32)
.map_err(|e| e.to_string().into())
}
}

View File

@@ -0,0 +1,170 @@
use crate::api::{
Client,
models::{
AuctionServerProduct, AuctionTransaction, OrderAuctionServerBuilder, OrderServerBuilder,
OrderServerProduct, ServerAddonProduct, ServerAddonTransaction, Transaction,
},
};
use rhai::{Array, Dynamic, plugin::*};
pub fn register(engine: &mut Engine) {
let server_order_module = exported_module!(server_order_api);
engine.register_global_module(server_order_module.into());
}
#[export_module]
pub mod server_order_api {
use crate::api::models::OrderServerAddonBuilder;
#[rhai_fn(name = "get_server_products", return_raw)]
pub fn get_server_ordering_product_overview(
client: &mut Client,
) -> Result<Array, Box<EvalAltResult>> {
let overview_servers = client
.get_server_products()
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(overview_servers.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "get_server_product_by_id", return_raw)]
pub fn get_server_ordering_product_by_id(
client: &mut Client,
product_id: &str,
) -> Result<OrderServerProduct, Box<EvalAltResult>> {
let product = client
.get_server_product_by_id(product_id)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(product)
}
#[rhai_fn(name = "order_server", return_raw)]
pub fn order_server(
client: &mut Client,
order: OrderServerBuilder,
) -> Result<Transaction, Box<EvalAltResult>> {
let transaction = client
.order_server(order)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transaction)
}
#[rhai_fn(name = "get_transaction_by_id", return_raw)]
pub fn get_transaction_by_id(
client: &mut Client,
transaction_id: &str,
) -> Result<Transaction, Box<EvalAltResult>> {
let transaction = client
.get_transaction_by_id(transaction_id)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transaction)
}
#[rhai_fn(name = "get_transactions", return_raw)]
pub fn get_transactions(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
let transactions = client
.get_transactions()
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transactions.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "get_auction_server_products", return_raw)]
pub fn get_auction_server_products(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
let products = client
.get_auction_server_products()
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(products.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "get_auction_server_product_by_id", return_raw)]
pub fn get_auction_server_product_by_id(
client: &mut Client,
product_id: &str,
) -> Result<AuctionServerProduct, Box<EvalAltResult>> {
let product = client
.get_auction_server_product_by_id(product_id)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(product)
}
#[rhai_fn(name = "get_auction_transactions", return_raw)]
pub fn get_auction_transactions(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
let transactions = client
.get_auction_transactions()
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transactions.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "get_auction_transaction_by_id", return_raw)]
pub fn get_auction_transaction_by_id(
client: &mut Client,
transaction_id: &str,
) -> Result<AuctionTransaction, Box<EvalAltResult>> {
let transaction = client
.get_auction_transaction_by_id(transaction_id)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transaction)
}
#[rhai_fn(name = "get_server_addon_products", return_raw)]
pub fn get_server_addon_products(
client: &mut Client,
server_number: i64,
) -> Result<Array, Box<EvalAltResult>> {
let products = client
.get_server_addon_products(server_number)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(products.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "get_server_addon_transactions", return_raw)]
pub fn get_server_addon_transactions(
client: &mut Client,
) -> Result<Array, Box<EvalAltResult>> {
let transactions = client
.get_server_addon_transactions()
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transactions.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "get_server_addon_transaction_by_id", return_raw)]
pub fn get_server_addon_transaction_by_id(
client: &mut Client,
transaction_id: &str,
) -> Result<ServerAddonTransaction, Box<EvalAltResult>> {
let transaction = client
.get_server_addon_transaction_by_id(transaction_id)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transaction)
}
#[rhai_fn(name = "order_auction_server", return_raw)]
pub fn order_auction_server(
client: &mut Client,
order: OrderAuctionServerBuilder,
) -> Result<AuctionTransaction, Box<EvalAltResult>> {
println!("Builder struct being used to order server: {:#?}", order);
let transaction = client.order_auction_server(
order.product_id,
order.authorized_keys.unwrap_or(vec![]),
order.dist,
None,
order.lang,
order.comment,
order.addon,
order.test,
).map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transaction)
}
#[rhai_fn(name = "order_server_addon", return_raw)]
pub fn order_server_addon(
client: &mut Client,
order: OrderServerAddonBuilder,
) -> Result<ServerAddonTransaction, Box<EvalAltResult>> {
println!("Builder struct being used to order server addon: {:#?}", order);
let transaction = client
.order_server_addon(order)
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(transaction)
}
}

View File

@@ -0,0 +1,89 @@
use crate::api::{Client, models::SshKey};
use prettytable::{Table, row};
use rhai::{Array, Dynamic, Engine, plugin::*};
pub fn register(engine: &mut Engine) {
let ssh_keys_module = exported_module!(ssh_keys_api);
engine.register_global_module(ssh_keys_module.into());
}
#[export_module]
pub mod ssh_keys_api {
use super::*;
use rhai::EvalAltResult;
#[rhai_fn(name = "get_ssh_keys", return_raw)]
pub fn get_ssh_keys(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
let ssh_keys = client
.get_ssh_keys()
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
Ok(ssh_keys.into_iter().map(Dynamic::from).collect())
}
#[rhai_fn(name = "get_ssh_key", return_raw)]
pub fn get_ssh_key(
client: &mut Client,
fingerprint: &str,
) -> Result<SshKey, Box<EvalAltResult>> {
client
.get_ssh_key(fingerprint)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "add_ssh_key", return_raw)]
pub fn add_ssh_key(
client: &mut Client,
name: &str,
data: &str,
) -> Result<SshKey, Box<EvalAltResult>> {
client
.add_ssh_key(name, data)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "update_ssh_key_name", return_raw)]
pub fn update_ssh_key_name(
client: &mut Client,
fingerprint: &str,
name: &str,
) -> Result<SshKey, Box<EvalAltResult>> {
client
.update_ssh_key_name(fingerprint, name)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "delete_ssh_key", return_raw)]
pub fn delete_ssh_key(
client: &mut Client,
fingerprint: &str,
) -> Result<(), Box<EvalAltResult>> {
client
.delete_ssh_key(fingerprint)
.map_err(|e| e.to_string().into())
}
#[rhai_fn(name = "pretty_print")]
pub fn pretty_print_ssh_keys(keys: Array) {
let mut table = Table::new();
table.add_row(row![b =>
"Name",
"Fingerprint",
"Type",
"Size",
"Created At"
]);
for key_dyn in keys {
if let Some(key) = key_dyn.try_cast::<SshKey>() {
table.add_row(row![
key.name,
key.fingerprint,
key.key_type,
key.size.to_string(),
key.created_at
]);
}
}
table.printstd();
}
}

View File

@@ -9,22 +9,22 @@ license = "Apache-2.0"
[dependencies]
# HTTP client for async requests
reqwest = { version = "0.12.15", features = ["json"] }
reqwest = { workspace = true }
# JSON handling
serde_json = "1.0"
serde_json = { workspace = true }
# Base64 encoding/decoding for message payloads
base64 = "0.22.1"
base64 = { workspace = true }
# Async runtime
tokio = { version = "1.45.0", features = ["full"] }
tokio = { workspace = true }
# Rhai scripting support
rhai = { version = "1.12.0", features = ["sync"] }
rhai = { workspace = true }
# Logging
log = "0.4"
log = { workspace = true }
# URL encoding for API parameters
urlencoding = "2.1.3"
urlencoding = { workspace = true }
[dev-dependencies]
# For async testing
tokio-test = "0.4.4"
tokio-test = { workspace = true }
# For temporary files in tests
tempfile = "3.5"
tempfile = { workspace = true }

View File

@@ -11,24 +11,24 @@ categories = ["database", "api-bindings"]
[dependencies]
# PostgreSQL client dependencies
postgres = "0.19.4"
postgres-types = "0.2.5"
tokio-postgres = "0.7.8"
postgres = { workspace = true }
postgres-types = { workspace = true }
tokio-postgres = { workspace = true }
# Connection pooling
r2d2 = "0.8.10"
r2d2_postgres = "0.18.2"
r2d2 = { workspace = true }
r2d2_postgres = { workspace = true }
# Utility dependencies
lazy_static = "1.4.0"
thiserror = "2.0.12"
lazy_static = { workspace = true }
thiserror = { workspace = true }
# Rhai scripting support
rhai = { version = "1.12.0", features = ["sync"] }
rhai = { workspace = true }
# SAL dependencies
sal-virt = { path = "../virt" }
sal-virt = { workspace = true }
[dev-dependencies]
tempfile = "3.5"
tokio-test = "0.4.4"
tempfile = { workspace = true }
tokio-test = { workspace = true }

View File

@@ -11,11 +11,11 @@ categories = ["database", "caching", "api-bindings"]
[dependencies]
# Core Redis functionality
redis = "0.31.0"
lazy_static = "1.4.0"
redis = { workspace = true }
lazy_static = { workspace = true }
# Rhai integration (optional)
rhai = { version = "1.12.0", features = ["sync"], optional = true }
rhai = { workspace = true, optional = true }
[features]
default = ["rhai"]
@@ -23,4 +23,4 @@ rhai = ["dep:rhai"]
[dev-dependencies]
# For testing
tempfile = "3.5"
tempfile = { workspace = true }

View File

@@ -9,20 +9,20 @@ license = "Apache-2.0"
[dependencies]
# Core dependencies
anyhow = "1.0.98"
futures = "0.3.30"
lazy_static = "1.4.0"
log = "0.4"
serde_json = "1.0"
thiserror = "2.0.12"
tokio = { version = "1.45.0", features = ["full"] }
anyhow = { workspace = true }
futures = { workspace = true }
lazy_static = { workspace = true }
log = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
# Zinit client
zinit-client = "0.4.0"
zinit-client = { workspace = true }
# Rhai integration
rhai = { version = "1.12.0", features = ["sync"] }
rhai = { workspace = true }
[dev-dependencies]
tokio-test = "0.4.4"
tempfile = "3.5"
tokio-test = { workspace = true }
tempfile = { workspace = true }

View File

@@ -0,0 +1,825 @@
<file_map>
/Users/despiegk/code/github/freeflowuniverse/herolib
├── aiprompts
│ └── herolib_core
│ ├── core_ourtime.md
│ ├── core_paths.md
│ └── core_text.md
└── lib
└── core
└── logger
├── factory.v
├── log_test.v
├── log.v
├── model.v
├── readme.md
└── search.v
</file_map>
<file_contents>
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/factory.v
```v
module logger
import freeflowuniverse.herolib.core.pathlib
pub fn new(path string) !Logger {
mut p := pathlib.get_dir(path: path, create: true)!
return Logger{
path: p
lastlog_time: 0
}
}
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/log_test.v
```v
module logger
import os
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.pathlib
fn testsuite_begin() {
if os.exists('/tmp/testlogs') {
os.rmdir_all('/tmp/testlogs')!
}
}
fn test_logger() {
mut logger := new('/tmp/testlogs')!
// Test stdout logging
logger.log(LogItemArgs{
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
timestamp: ourtime.new('2022-12-05 20:14:35')!
})!
// Test error logging
logger.log(LogItemArgs{
cat: 'error-test'
log: 'This is an error\nWith details'
logtype: .error
timestamp: ourtime.new('2022-12-05 20:14:35')!
})!
logger.log(LogItemArgs{
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
timestamp: ourtime.new('2022-12-05 20:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
This is an error
With details
'
logtype: .error
timestamp: ourtime.new('2022-12-05 20:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
aaa
bbb
'
logtype: .error
timestamp: ourtime.new('2022-12-05 22:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
aaa2
bbb2
'
logtype: .error
timestamp: ourtime.new('2022-12-05 22:14:36')!
})!
// Verify log directory exists
assert os.exists('/tmp/testlogs'), 'Log directory should exist'
// Get log file
files := os.ls('/tmp/testlogs')!
assert files.len == 2
mut file := pathlib.get_file(
path: '/tmp/testlogs/${files[0]}'
create: false
)!
content := file.read()!.trim_space()
items_stdout := logger.search(
timestamp_from: ourtime.new('2022-11-1 20:14:35')!
timestamp_to: ourtime.new('2025-11-1 20:14:35')!
logtype: .stdout
)!
assert items_stdout.len == 2
items_error := logger.search(
timestamp_from: ourtime.new('2022-11-1 20:14:35')!
timestamp_to: ourtime.new('2025-11-1 20:14:35')!
logtype: .error
)!
assert items_error.len == 4
}
fn testsuite_end() {
// if os.exists('/tmp/testlogs') {
// os.rmdir_all('/tmp/testlogs')!
// }
}
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/log.v
```v
module logger
import os
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.ourtime
@[params]
pub struct LogItemArgs {
pub mut:
timestamp ?ourtime.OurTime
cat string
log string
logtype LogType
}
pub fn (mut l Logger) log(args_ LogItemArgs) ! {
mut args := args_
t := args.timestamp or {
t2 := ourtime.now()
t2
}
// Format category (max 10 chars, ascii only)
args.cat = texttools.name_fix(args.cat)
if args.cat.len > 10 {
return error('category cannot be longer than 10 chars')
}
args.cat = texttools.expand(args.cat, 10, ' ')
args.log = texttools.dedent(args.log).trim_space()
mut logfile_path := '${l.path.path}/${t.dayhour()}.log'
// Create log file if it doesn't exist
if !os.exists(logfile_path) {
os.write_file(logfile_path, '')!
l.lastlog_time = 0 // make sure we put time again
}
mut f := os.open_append(logfile_path)!
mut content := ''
// Add timestamp if we're in a new second
if t.unix() > l.lastlog_time {
content += '\n${t.time().format_ss()}\n'
l.lastlog_time = t.unix()
}
// Format log lines
error_prefix := if args.logtype == .error { 'E' } else { ' ' }
lines := args.log.split('\n')
for i, line in lines {
if i == 0 {
content += '${error_prefix} ${args.cat} - ${line}\n'
} else {
content += '${error_prefix} ${line}\n'
}
}
f.writeln(content.trim_space_right())!
f.close()
}
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/model.v
```v
module logger
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.pathlib
@[heap]
pub struct Logger {
pub mut:
path pathlib.Path
lastlog_time i64 // to see in log format, every second we put a time down, we need to know if we are in a new second (logs can come in much faster)
}
pub struct LogItem {
pub mut:
timestamp ourtime.OurTime
cat string
log string
logtype LogType
}
pub enum LogType {
stdout
error
}
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/readme.md
```md
# Logger Module
A simple logging system that provides structured logging with search capabilities.
Logs are stored in hourly files with a consistent format that makes them both human-readable and machine-parseable.
## Features
- Structured logging with categories and error types
- Automatic timestamp management
- Multi-line message support
- Search functionality with filtering options
- Human-readable log format
## Usage
```v
import freeflowuniverse.herolib.core.logger
import freeflowuniverse.herolib.data.ourtime
// Create a new logger
mut l := logger.new(path: '/var/logs')!
// Log a message
l.log(
cat: 'system',
log: 'System started successfully',
logtype: .stdout
)!
// Log an error
l.log(
cat: 'system',
log: 'Failed to connect\nRetrying in 5 seconds...',
logtype: .error
)!
// Search logs
results := l.search(
timestamp_from: ourtime.now().warp("-24h"), // Last 24 hours
cat: 'system', // Filter by category
log: 'failed', // Search in message content
logtype: .error, // Only error messages
maxitems: 100 // Limit results
)!
```
## Log Format
Each log file is named using the format `YYYY-MM-DD-HH.log` and contains entries in the following format:
```
21:23:42
system - This is a normal log message
system - This is a multi-line message
second line with proper indentation
third line maintaining alignment
E error_cat - This is an error message
E second line of error
E third line of error
```
### Format Rules
- Time stamps (HH:MM:SS) are written once per second when the log time changes
- Categories are:
- Limited to 10 characters maximum
- Padded with spaces to exactly 10 characters
- Any `-` in category names are converted to `_`
- Each line starts with either:
- ` ` (space) for normal logs (LogType.stdout)
- `E` for error logs (LogType.error)
- Multi-line messages maintain consistent indentation (14 spaces after the prefix)
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/search.v
```v
module logger
import os
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.ourtime
@[params]
pub struct SearchArgs {
pub mut:
timestamp_from ?ourtime.OurTime
timestamp_to ?ourtime.OurTime
cat string // can be empty
log string // any content in here will be looked for
logtype LogType
maxitems int = 10000
}
pub fn (mut l Logger) search(args_ SearchArgs) ![]LogItem {
mut args := args_
// Format category (max 10 chars, ascii only)
args.cat = texttools.name_fix(args.cat)
if args.cat.len > 10 {
return error('category cannot be longer than 10 chars')
}
mut timestamp_from := args.timestamp_from or { ourtime.OurTime{} }
mut timestamp_to := args.timestamp_to or { ourtime.OurTime{} }
// Get time range
from_time := timestamp_from.unix()
to_time := timestamp_to.unix()
if from_time > to_time {
return error('from_time cannot be after to_time: ${from_time} < ${to_time}')
}
mut result := []LogItem{}
// Find log files in time range
mut files := os.ls(l.path.path)!
files.sort()
for file in files {
if !file.ends_with('.log') {
continue
}
// Parse dayhour from filename
dayhour := file[..file.len - 4] // remove .log
file_time := ourtime.new(dayhour)!
mut current_time := ourtime.OurTime{}
mut current_item := LogItem{}
mut collecting := false
// Skip if file is outside time range
if file_time.unix() < from_time || file_time.unix() > to_time {
continue
}
// Read and parse log file
content := os.read_file('${l.path.path}/${file}')!
lines := content.split('\n')
for line in lines {
if result.len >= args.maxitems {
return result
}
line_trim := line.trim_space()
if line_trim == '' {
continue
}
// Check if this is a timestamp line
if !(line.starts_with(' ') || line.starts_with('E')) {
current_time = ourtime.new(line_trim)!
if collecting {
process(mut result, current_item, current_time, args, from_time, to_time)!
}
collecting = false
continue
}
if collecting && line.len > 14 && line[13] == `-` {
process(mut result, current_item, current_time, args, from_time, to_time)!
collecting = false
}
// Parse log line
is_error := line.starts_with('E')
if !collecting {
// Start new item
current_item = LogItem{
timestamp: current_time
cat: line[2..12].trim_space()
log: line[15..].trim_space()
logtype: if is_error { .error } else { .stdout }
}
// println('new current item: ${current_item}')
collecting = true
} else {
// Continuation line
if line_trim.len < 16 {
current_item.log += '\n'
} else {
current_item.log += '\n' + line[15..]
}
}
}
// Add last item if collecting
if collecting {
process(mut result, current_item, current_time, args, from_time, to_time)!
}
}
return result
}
fn process(mut result []LogItem, current_item LogItem, current_time ourtime.OurTime, args SearchArgs, from_time i64, to_time i64) ! {
// Add previous item if it matches filters
log_epoch := current_item.timestamp.unix()
if log_epoch < from_time || log_epoch > to_time {
return
}
if (args.cat == '' || current_item.cat.trim_space() == args.cat)
&& (args.log == '' || current_item.log.contains(args.log))
&& args.logtype == current_item.logtype {
result << current_item
}
}
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/aiprompts/herolib_core/core_ourtime.md
```md
# OurTime Module
The `OurTime` module in V provides flexible time handling, supporting relative and absolute time formats, Unix timestamps, and formatting utilities.
## Key Features
- Create time objects from strings or current time
- Relative time expressions (e.g., `+1h`, `-2d`)
- Absolute time formats (e.g., `YYYY-MM-DD HH:mm:ss`)
- Unix timestamp conversion
- Time formatting and warping
## Basic Usage
```v
import freeflowuniverse.herolib.data.ourtime
// Current time
mut t := ourtime.now()
// From string
t2 := ourtime.new('2022-12-05 20:14:35')!
// Get formatted string
println(t2.str()) // e.g., 2022-12-05 20:14
// Get Unix timestamp
println(t2.unix()) // e.g., 1670271275
```
## Time Formats
### Relative Time
Use `s` (seconds), `h` (hours), `d` (days), `w` (weeks), `M` (months), `Q` (quarters), `Y` (years).
```v
// Create with relative time
mut t := ourtime.new('+1w +2d -4h')!
// Warp existing time
mut t2 := ourtime.now()
t2.warp('+1h')!
```
### Absolute Time
Supports `YYYY-MM-DD HH:mm:ss`, `YYYY-MM-DD HH:mm`, `YYYY-MM-DD HH`, `YYYY-MM-DD`, `DD-MM-YYYY`.
```v
t1 := ourtime.new('2022-12-05 20:14:35')!
t2 := ourtime.new('2022-12-05')! // Time defaults to 00:00:00
```
## Methods Overview
### Creation
```v
now_time := ourtime.now()
from_string := ourtime.new('2023-01-15')!
from_epoch := ourtime.new_from_epoch(1673788800)
```
### Formatting
```v
mut t := ourtime.now()
println(t.str()) // YYYY-MM-DD HH:mm
println(t.day()) // YYYY-MM-DD
println(t.key()) // YYYY_MM_DD_HH_mm_ss
println(t.md()) // Markdown format
```
### Operations
```v
mut t := ourtime.now()
t.warp('+1h')! // Move 1 hour forward
unix_ts := t.unix()
is_empty := t.empty()
```
## Error Handling
Time parsing methods return a `Result` type and should be handled with `!` or `or` blocks.
```v
t_valid := ourtime.new('2023-01-01')!
t_invalid := ourtime.new('bad-date') or {
println('Error: ${err}')
ourtime.now() // Fallback
}
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/aiprompts/herolib_core/core_paths.md
```md
# Pathlib Usage Guide
## Overview
The pathlib module provides a comprehensive interface for handling file system operations. Key features include:
- Robust path handling for files, directories, and symlinks
- Support for both absolute and relative paths
- Automatic home directory expansion (~)
- Recursive directory operations
- Path filtering and listing
- File and directory metadata access
## Basic Usage
### Importing pathlib
```v
import freeflowuniverse.herolib.core.pathlib
```
### Creating Path Objects
```v
// Create a Path object for a file
mut file_path := pathlib.get("path/to/file.txt")
// Create a Path object for a directory
mut dir_path := pathlib.get("path/to/directory")
```
### Basic Path Operations
```v
// Get absolute path
abs_path := file_path.absolute()
// Get real path (resolves symlinks)
real_path := file_path.realpath()
// Check if path exists
if file_path.exists() {
// Path exists
}
```
## Path Properties and Methods
### Path Types
```v
// Check if path is a file
if file_path.is_file() {
// Handle as file
}
// Check if path is a directory
if dir_path.is_dir() {
// Handle as directory
}
// Check if path is a symlink
if file_path.is_link() {
// Handle as symlink
}
```
### Path Normalization
```v
// Normalize path (remove extra slashes, resolve . and ..)
normalized_path := file_path.path_normalize()
// Get path directory
dir_path := file_path.path_dir()
// Get path name without extension
name_no_ext := file_path.name_no_ext()
```
## File and Directory Operations
### File Operations
```v
// Write to file
file_path.write("Content to write")!
// Read from file
content := file_path.read()!
// Delete file
file_path.delete()!
```
### Directory Operations
```v
// Create directory
mut dir := pathlib.get_dir(
path: "path/to/new/dir"
create: true
)!
// List directory contents
mut dir_list := dir.list()!
// Delete directory
dir.delete()!
```
### Symlink Operations
```v
// Create symlink
file_path.link("path/to/symlink", delete_exists: true)!
// Resolve symlink
real_path := file_path.realpath()
```
## Advanced Operations
### Path Copying
```v
// Copy file to destination
file_path.copy(dest: "path/to/destination")!
```
### Recursive Operations
```v
// List directory recursively
mut recursive_list := dir.list(recursive: true)!
// Delete directory recursively
dir.delete()!
```
### Path Filtering
```v
// List files matching pattern
mut filtered_list := dir.list(
regex: [r".*\.txt$"],
recursive: true
)!
```
## Best Practices
### Error Handling
```v
if file_path.exists() {
// Safe to operate
} else {
// Handle missing file
}
```
```
File: /Users/despiegk/code/github/freeflowuniverse/herolib/aiprompts/herolib_core/core_text.md
```md
# TextTools Module
The `texttools` module provides a comprehensive set of utilities for text manipulation and processing.
## Functions and Examples:
```v
import freeflowuniverse.herolib.core.texttools
assert hello_world == texttools.name_fix("Hello World!")
```
### Name/Path Processing
* `name_fix(name string) string`: Normalizes filenames and paths.
* `name_fix_keepspace(name string) !string`: Like name_fix but preserves spaces.
* `name_fix_no_ext(name_ string) string`: Removes file extension.
* `name_fix_snake_to_pascal(name string) string`: Converts snake_case to PascalCase.
```v
name := texttools.name_fix_snake_to_pascal("hello_world") // Result: "HelloWorld"
```
* `snake_case(name string) string`: Converts PascalCase to snake_case.
```v
name := texttools.snake_case("HelloWorld") // Result: "hello_world"
```
* `name_split(name string) !(string, string)`: Splits name into site and page components.
### Text Cleaning
* `name_clean(r string) string`: Normalizes names by removing special characters.
```v
name := texttools.name_clean("Hello@World!") // Result: "HelloWorld"
```
* `ascii_clean(r string) string`: Removes all non-ASCII characters.
* `remove_empty_lines(text string) string`: Removes empty lines from text.
```v
text := texttools.remove_empty_lines("line1\n\nline2\n\n\nline3") // Result: "line1\nline2\nline3"
```
* `remove_double_lines(text string) string`: Removes consecutive empty lines.
* `remove_empty_js_blocks(text string) string`: Removes empty code blocks (```...```).
### Command Line Parsing
* `cmd_line_args_parser(text string) ![]string`: Parses command line arguments with support for quotes and escaping.
```v
args := texttools.cmd_line_args_parser("'arg with spaces' --flag=value") // Result: ['arg with spaces', '--flag=value']
```
* `text_remove_quotes(text string) string`: Removes quoted sections from text.
* `check_exists_outside_quotes(text string, items []string) bool`: Checks if items exist in text outside of quotes.
### Text Expansion
* `expand(txt_ string, l int, expand_with string) string`: Expands text to a specified length with a given character.
### Indentation
* `indent(text string, prefix string) string`: Adds indentation prefix to each line.
```v
text := texttools.indent("line1\nline2", " ") // Result: " line1\n line2\n"
```
* `dedent(text string) string`: Removes common leading whitespace from every line.
```v
text := texttools.dedent(" line1\n line2") // Result: "line1\nline2"
```
### String Validation
* `is_int(text string) bool`: Checks if text contains only digits.
* `is_upper_text(text string) bool`: Checks if text contains only uppercase letters.
### Multiline Processing
* `multiline_to_single(text string) !string`: Converts multiline text to a single line with proper escaping.
### Text Splitting
* `split_smart(t string, delimiter_ string) []string`: Intelligent string splitting that respects quotes.
### Tokenization
* `tokenize(text_ string) TokenizerResult`: Tokenizes text into meaningful parts.
* `text_token_replace(text string, tofind string, replacewith string) !string`: Replaces tokens in text.
### Version Parsing
* `version(text_ string) int`: Converts version strings to comparable integers.
```v
ver := texttools.version("v0.4.36") // Result: 4036
ver = texttools.version("v1.4.36") // Result: 1004036
```
### Formatting
* `format_rfc1123(t time.Time) string`: Formats a time.Time object into RFC 1123 format.
### Array Operations
* `to_array(r string) []string`: Converts a comma or newline separated list to an array of strings.
```v
text := "item1,item2,item3"
array := texttools.to_array(text) // Result: ['item1', 'item2', 'item3']
```
* `to_array_int(r string) []int`: Converts a text list to an array of integers.
* `to_map(mapstring string, line string, delimiter_ string) map[string]string`: Intelligent mapping of a line to a map based on a template.
```v
r := texttools.to_map("name,-,-,-,-,pid,-,-,-,-,path",
"root 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 /usr/sbin/distnoted")
// Result: {'name': 'root', 'pid': '1360', 'path': '/usr/sbin/distnoted'}
```
```
</file_contents>
<user_instructions>
create a module in rust in location packages/core/logger
which reimplements herolib/lib/core/logger
all features need to be reimplemented
write me an implementation plan for my coding agent
</user_instructions>

View File

@@ -10,7 +10,7 @@ keywords = ["network", "tcp", "http", "ssh", "connectivity"]
categories = ["network-programming", "api-bindings"]
[dependencies]
anyhow = "1.0.98"
tokio = { version = "1.0", features = ["full"] }
reqwest = { version = "0.12", features = ["json", "blocking"] }
rhai = "1.19.0"
anyhow = { workspace = true }
tokio = { workspace = true }
reqwest = { workspace = true, features = ["json", "blocking"] }
rhai = { workspace = true }

Some files were not shown because too many files have changed in this diff Show More