Compare commits
31 Commits
61f5331804
...
network_se
Author | SHA1 | Date | |
---|---|---|---|
|
da3da0ae30 | ||
|
784f87db97 | ||
|
773db2238d | ||
|
e8a369e3a2 | ||
|
4b4f3371b0 | ||
|
1bb731711b | ||
|
af89ef0149 | ||
|
768e3e176d | ||
|
aa0248ef17 | ||
|
aab2b6f128 | ||
|
d735316b7f | ||
|
d1c80863b8 | ||
|
169c62da47 | ||
|
33a5f24981 | ||
|
d7562ce466 | ||
ca736d62f3 | |||
|
078c6f723b | ||
|
9fdb8d8845 | ||
8203a3b1ff | |||
1770ac561e | |||
|
eed6dbf8dc | ||
4cd4e04028 | |||
8cc828fc0e | |||
56af312aad | |||
dfd6931c5b | |||
6e01f99958 | |||
0c02d0e99f | |||
7856fc0a4e | |||
|
758e59e921 | ||
f1806eb788 | |||
|
6e5d9b35e8 |
105
Cargo.toml
105
Cargo.toml
@@ -12,22 +12,25 @@ readme = "README.md"
|
|||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
".",
|
"packages/clients/myceliumclient",
|
||||||
"vault",
|
"packages/clients/postgresclient",
|
||||||
"git",
|
"packages/clients/redisclient",
|
||||||
"redisclient",
|
"packages/clients/zinitclient",
|
||||||
"mycelium",
|
"packages/core/net",
|
||||||
"text",
|
"packages/core/text",
|
||||||
"os",
|
"packages/crypt/vault",
|
||||||
"net",
|
"packages/data/ourdb",
|
||||||
"zinit_client",
|
"packages/data/radixtree",
|
||||||
"process",
|
"packages/data/tst",
|
||||||
"virt",
|
"packages/system/git",
|
||||||
"postgresclient",
|
"packages/system/kubernetes",
|
||||||
"kubernetes",
|
"packages/system/os",
|
||||||
|
"packages/system/process",
|
||||||
|
"packages/system/virt",
|
||||||
"rhai",
|
"rhai",
|
||||||
|
"rhailib",
|
||||||
"herodo",
|
"herodo",
|
||||||
"service_manager",
|
"packages/clients/hetznerclient",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
@@ -49,7 +52,7 @@ log = "0.4"
|
|||||||
once_cell = "1.18.0"
|
once_cell = "1.18.0"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
regex = "1.8.1"
|
regex = "1.8.1"
|
||||||
reqwest = { version = "0.12.15", features = ["json"] }
|
reqwest = { version = "0.12.15", features = ["json", "blocking"] }
|
||||||
rhai = { version = "1.12.0", features = ["sync"] }
|
rhai = { version = "1.12.0", features = ["sync"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
@@ -70,6 +73,10 @@ chacha20poly1305 = "0.10.1"
|
|||||||
k256 = { version = "0.13.4", features = ["ecdsa", "ecdh"] }
|
k256 = { version = "0.13.4", features = ["ecdsa", "ecdh"] }
|
||||||
sha2 = "0.10.7"
|
sha2 = "0.10.7"
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
|
bincode = { version = "2.0.1", features = ["serde"] }
|
||||||
|
pbkdf2 = "0.12.2"
|
||||||
|
getrandom = { version = "0.3.3", features = ["wasm_js"] }
|
||||||
|
tera = "1.19.0"
|
||||||
|
|
||||||
# Ethereum dependencies
|
# Ethereum dependencies
|
||||||
ethers = { version = "2.0.7", features = ["legacy"] }
|
ethers = { version = "2.0.7", features = ["legacy"] }
|
||||||
@@ -86,27 +93,52 @@ windows = { version = "0.61.1", features = [
|
|||||||
zinit-client = "0.4.0"
|
zinit-client = "0.4.0"
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.3"
|
||||||
tokio-test = "0.4.4"
|
tokio-test = "0.4.4"
|
||||||
|
kube = { version = "0.95.0", features = ["client", "config", "derive"] }
|
||||||
|
k8s-openapi = { version = "0.23.0", features = ["latest"] }
|
||||||
|
tokio-retry = "0.3.0"
|
||||||
|
governor = "0.6.3"
|
||||||
|
tower = { version = "0.5.2", features = ["timeout", "limit"] }
|
||||||
|
serde_yaml = "0.9"
|
||||||
|
postgres-types = "0.2.5"
|
||||||
|
r2d2 = "0.8.10"
|
||||||
|
|
||||||
|
# SAL dependencies
|
||||||
|
sal-git = { path = "packages/system/git" }
|
||||||
|
sal-kubernetes = { path = "packages/system/kubernetes" }
|
||||||
|
sal-redisclient = { path = "packages/clients/redisclient" }
|
||||||
|
sal-mycelium = { path = "packages/clients/myceliumclient" }
|
||||||
|
sal-hetzner = { path = "packages/clients/hetznerclient" }
|
||||||
|
sal-text = { path = "packages/core/text" }
|
||||||
|
sal-os = { path = "packages/system/os" }
|
||||||
|
sal-net = { path = "packages/core/net" }
|
||||||
|
sal-zinit-client = { path = "packages/clients/zinitclient" }
|
||||||
|
sal-process = { path = "packages/system/process" }
|
||||||
|
sal-virt = { path = "packages/system/virt" }
|
||||||
|
sal-postgresclient = { path = "packages/clients/postgresclient" }
|
||||||
|
sal-vault = { path = "packages/crypt/vault" }
|
||||||
|
sal-rhai = { path = "rhai" }
|
||||||
|
sal-service-manager = { path = "_archive/service_manager" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
thiserror = "2.0.12" # For error handling in the main Error enum
|
thiserror = { workspace = true }
|
||||||
tokio = { workspace = true } # For async examples
|
tokio = { workspace = true }
|
||||||
|
|
||||||
# Optional dependencies - users can choose which modules to include
|
# Optional dependencies - users can choose which modules to include
|
||||||
sal-git = { path = "git", optional = true }
|
sal-git = { workspace = true, optional = true }
|
||||||
sal-kubernetes = { path = "kubernetes", optional = true }
|
sal-kubernetes = { workspace = true, optional = true }
|
||||||
sal-redisclient = { path = "redisclient", optional = true }
|
sal-redisclient = { workspace = true, optional = true }
|
||||||
sal-mycelium = { path = "mycelium", optional = true }
|
sal-mycelium = { workspace = true, optional = true }
|
||||||
sal-text = { path = "text", optional = true }
|
sal-hetzner = { workspace = true, optional = true }
|
||||||
sal-os = { path = "os", optional = true }
|
sal-text = { workspace = true, optional = true }
|
||||||
sal-net = { path = "net", optional = true }
|
sal-os = { workspace = true, optional = true }
|
||||||
sal-zinit-client = { path = "zinit_client", optional = true }
|
sal-net = { workspace = true, optional = true }
|
||||||
sal-process = { path = "process", optional = true }
|
sal-zinit-client = { workspace = true, optional = true }
|
||||||
sal-virt = { path = "virt", optional = true }
|
sal-process = { workspace = true, optional = true }
|
||||||
sal-postgresclient = { path = "postgresclient", optional = true }
|
sal-virt = { workspace = true, optional = true }
|
||||||
sal-vault = { path = "vault", optional = true }
|
sal-postgresclient = { workspace = true, optional = true }
|
||||||
sal-rhai = { path = "rhai", optional = true }
|
sal-vault = { workspace = true, optional = true }
|
||||||
sal-service-manager = { path = "service_manager", optional = true }
|
sal-rhai = { workspace = true, optional = true }
|
||||||
zinit-client.workspace = true
|
sal-service-manager = { workspace = true, optional = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
@@ -116,6 +148,7 @@ git = ["dep:sal-git"]
|
|||||||
kubernetes = ["dep:sal-kubernetes"]
|
kubernetes = ["dep:sal-kubernetes"]
|
||||||
redisclient = ["dep:sal-redisclient"]
|
redisclient = ["dep:sal-redisclient"]
|
||||||
mycelium = ["dep:sal-mycelium"]
|
mycelium = ["dep:sal-mycelium"]
|
||||||
|
hetzner = ["dep:sal-hetzner"]
|
||||||
text = ["dep:sal-text"]
|
text = ["dep:sal-text"]
|
||||||
os = ["dep:sal-os"]
|
os = ["dep:sal-os"]
|
||||||
net = ["dep:sal-net"]
|
net = ["dep:sal-net"]
|
||||||
@@ -125,18 +158,19 @@ virt = ["dep:sal-virt"]
|
|||||||
postgresclient = ["dep:sal-postgresclient"]
|
postgresclient = ["dep:sal-postgresclient"]
|
||||||
vault = ["dep:sal-vault"]
|
vault = ["dep:sal-vault"]
|
||||||
rhai = ["dep:sal-rhai"]
|
rhai = ["dep:sal-rhai"]
|
||||||
service_manager = ["dep:sal-service-manager"]
|
# service_manager is removed as it's not a direct member anymore
|
||||||
|
|
||||||
# Convenience feature groups
|
# Convenience feature groups
|
||||||
core = ["os", "process", "text", "net"]
|
core = ["os", "process", "text", "net"]
|
||||||
clients = ["redisclient", "postgresclient", "zinit_client", "mycelium"]
|
clients = ["redisclient", "postgresclient", "zinit_client", "mycelium", "hetzner"]
|
||||||
infrastructure = ["git", "vault", "kubernetes", "virt", "service_manager"]
|
infrastructure = ["git", "vault", "kubernetes", "virt"]
|
||||||
scripting = ["rhai"]
|
scripting = ["rhai"]
|
||||||
all = [
|
all = [
|
||||||
"git",
|
"git",
|
||||||
"kubernetes",
|
"kubernetes",
|
||||||
"redisclient",
|
"redisclient",
|
||||||
"mycelium",
|
"mycelium",
|
||||||
|
"hetzner",
|
||||||
"text",
|
"text",
|
||||||
"os",
|
"os",
|
||||||
"net",
|
"net",
|
||||||
@@ -146,7 +180,6 @@ all = [
|
|||||||
"postgresclient",
|
"postgresclient",
|
||||||
"vault",
|
"vault",
|
||||||
"rhai",
|
"rhai",
|
||||||
"service_manager",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
|
456
README.md
456
README.md
@@ -1,404 +1,136 @@
|
|||||||
# SAL (System Abstraction Layer)
|
# Herocode Herolib Rust Repository
|
||||||
|
|
||||||
**Version: 0.1.0**
|
## Overview
|
||||||
|
|
||||||
SAL is a comprehensive Rust library designed to provide a unified and simplified interface for a wide array of system-level operations and interactions. It abstracts platform-specific details, enabling developers to write robust, cross-platform code with greater ease. SAL also includes `herodo`, a powerful command-line tool for executing Rhai scripts that leverage SAL's capabilities for automation and system management tasks.
|
This repository contains the **Herocode Herolib** Rust library and a collection of scripts, examples, and utilities for building, testing, and publishing the SAL (System Abstraction Layer) crates. The repository includes:
|
||||||
|
|
||||||
## 🏗️ **Cargo Workspace Structure**
|
- **Rust crates** for various system components (e.g., `os`, `process`, `text`, `git`, `vault`, `kubernetes`, etc.).
|
||||||
|
- **Rhai scripts** and test suites for each crate.
|
||||||
|
- **Utility scripts** to automate common development tasks.
|
||||||
|
|
||||||
SAL is organized as a **Cargo workspace** with 15 specialized crates:
|
## Scripts
|
||||||
|
|
||||||
- **Root Package**: `sal` - Umbrella crate that re-exports all modules
|
The repository provides three primary helper scripts located in the repository root:
|
||||||
- **12 Library Crates**: Core SAL modules (os, process, text, net, git, vault, kubernetes, virt, redisclient, postgresclient, zinit_client, mycelium)
|
|
||||||
- **1 Binary Crate**: `herodo` - Rhai script execution engine
|
|
||||||
- **1 Integration Crate**: `rhai` - Rhai scripting integration layer
|
|
||||||
|
|
||||||
This workspace structure provides excellent build performance, dependency management, and maintainability.
|
| Script | Description | Typical Usage |
|
||||||
|
|--------|-------------|--------------|
|
||||||
|
| `scripts/publish-all.sh` | Publishes all SAL crates to **crates.io** in the correct dependency order. Handles version bumping, dependency updates, dry‑run mode, and rate‑limiting. | `./scripts/publish-all.sh [--dry-run] [--wait <seconds>] [--version <ver>]` |
|
||||||
|
| `build_herodo.sh` | Builds the `herodo` binary from the `herodo` package and optionally runs a specified Rhai script. | `./build_herodo.sh [script_name]` |
|
||||||
|
| `run_rhai_tests.sh` | Executes all Rhai test suites across the repository, logging results and providing a summary. | `./run_rhai_tests.sh` |
|
||||||
|
|
||||||
### **🚀 Workspace Benefits**
|
Below are detailed usage instructions for each script.
|
||||||
- **Unified Dependency Management**: Shared dependencies across all crates with consistent versions
|
|
||||||
- **Optimized Build Performance**: Parallel compilation and shared build artifacts
|
|
||||||
- **Simplified Testing**: Run tests across all modules with a single command
|
|
||||||
- **Modular Architecture**: Each module is independently maintainable while sharing common infrastructure
|
|
||||||
- **Production Ready**: 100% test coverage with comprehensive Rhai integration tests
|
|
||||||
|
|
||||||
## 📦 Installation
|
---
|
||||||
|
|
||||||
SAL is designed to be modular - install only the components you need!
|
## 1. `scripts/publish-all.sh`
|
||||||
|
|
||||||
### Option 1: Individual Crates (Recommended)
|
### Purpose
|
||||||
|
|
||||||
Install only the modules you need:
|
- Publishes each SAL crate in the correct dependency order.
|
||||||
|
- Updates crate versions (if `--version` is supplied).
|
||||||
|
- Updates path dependencies to version dependencies before publishing.
|
||||||
|
- Supports **dry‑run** mode to preview actions without publishing.
|
||||||
|
- Handles rate‑limiting between crate publishes.
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
| Option | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `--dry-run` | Shows what would be published without actually publishing. |
|
||||||
|
| `--wait <seconds>` | Wait time between publishes (default: 15 s). |
|
||||||
|
| `--version <ver>` | Set a new version for all crates (updates `Cargo.toml` files). |
|
||||||
|
| `-h, --help` | Show help message. |
|
||||||
|
|
||||||
|
### Example Usage
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Currently available packages
|
# Dry run – no crates will be published
|
||||||
cargo add sal-os sal-process sal-text sal-net sal-git sal-vault sal-kubernetes sal-virt
|
./scripts/publish-all.sh --dry-run
|
||||||
|
|
||||||
# Coming soon (rate limited)
|
# Publish with a custom wait time and version bump
|
||||||
# cargo add sal-redisclient sal-postgresclient sal-zinit-client sal-mycelium sal-rhai
|
./scripts/publish-all.sh --wait 30 --version 1.2.3
|
||||||
|
|
||||||
|
# Normal publish (no dry‑run)
|
||||||
|
./scripts/publish-all.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### Option 2: Meta-crate with Features
|
### Notes
|
||||||
|
|
||||||
Use the main `sal` crate with specific features:
|
- Must be run from the repository root (where `Cargo.toml` lives).
|
||||||
|
- Requires `cargo` and a logged‑in `cargo` session (`cargo login`).
|
||||||
|
- The script automatically updates dependencies in each crate’s `Cargo.toml` to use the new version before publishing.
|
||||||
|
|
||||||
```bash
|
---
|
||||||
# Coming soon - meta-crate with features (rate limited)
|
|
||||||
# cargo add sal --features os,process,text
|
|
||||||
# cargo add sal --features core # os, process, text, net
|
|
||||||
# cargo add sal --features infrastructure # git, vault, kubernetes, virt
|
|
||||||
# cargo add sal --features all
|
|
||||||
|
|
||||||
# For now, use individual crates (see Option 1 above)
|
## 2. `build_herodo.sh`
|
||||||
```
|
|
||||||
|
|
||||||
### Quick Start Examples
|
### Purpose
|
||||||
|
|
||||||
#### Using Individual Crates (Recommended)
|
- Builds the `herodo` binary from the `herodo` package.
|
||||||
|
- Copies the binary to a system‑wide location (`/usr/local/bin`) if run as root, otherwise to `~/hero/bin`.
|
||||||
```rust
|
- Optionally runs a specified Rhai script after building.
|
||||||
use sal_os::fs;
|
|
||||||
use sal_process::run;
|
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
// File system operations
|
|
||||||
let files = fs::list_files(".")?;
|
|
||||||
println!("Found {} files", files.len());
|
|
||||||
|
|
||||||
// Process execution
|
|
||||||
let result = run::command("echo hello")?;
|
|
||||||
println!("Output: {}", result.stdout);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Using Meta-crate with Features
|
|
||||||
|
|
||||||
```rust
|
|
||||||
// In Cargo.toml: sal = { version = "0.1.0", features = ["os", "process"] }
|
|
||||||
use sal::os::fs;
|
|
||||||
use sal::process::run;
|
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
// File system operations
|
|
||||||
let files = fs::list_files(".")?;
|
|
||||||
println!("Found {} files", files.len());
|
|
||||||
|
|
||||||
// Process execution
|
|
||||||
let result = run::command("echo hello")?;
|
|
||||||
println!("Output: {}", result.stdout);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Using Herodo for Scripting
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Build and install herodo
|
|
||||||
git clone https://github.com/PlanetFirst/sal.git
|
|
||||||
cd sal
|
|
||||||
./build_herodo.sh
|
|
||||||
|
|
||||||
# Create a script file
|
|
||||||
cat > example.rhai << 'EOF'
|
|
||||||
// File operations
|
|
||||||
let files = find_files(".", "*.rs");
|
|
||||||
print("Found " + files.len() + " Rust files");
|
|
||||||
|
|
||||||
// Process execution
|
|
||||||
let result = run("echo 'Hello from SAL!'");
|
|
||||||
print("Output: " + result.stdout);
|
|
||||||
|
|
||||||
// Network operations
|
|
||||||
let reachable = http_check("https://github.com");
|
|
||||||
print("GitHub reachable: " + reachable);
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Execute the script
|
|
||||||
herodo example.rhai
|
|
||||||
```
|
|
||||||
|
|
||||||
## 📦 Available Packages
|
|
||||||
|
|
||||||
SAL is published as individual crates, allowing you to install only what you need:
|
|
||||||
|
|
||||||
| Package | Description | Install Command |
|
|
||||||
|---------|-------------|-----------------|
|
|
||||||
| [`sal-os`](https://crates.io/crates/sal-os) | Operating system operations | `cargo add sal-os` |
|
|
||||||
| [`sal-process`](https://crates.io/crates/sal-process) | Process management | `cargo add sal-process` |
|
|
||||||
| [`sal-text`](https://crates.io/crates/sal-text) | Text processing utilities | `cargo add sal-text` |
|
|
||||||
| [`sal-net`](https://crates.io/crates/sal-net) | Network operations | `cargo add sal-net` |
|
|
||||||
| [`sal-git`](https://crates.io/crates/sal-git) | Git repository management | `cargo add sal-git` |
|
|
||||||
| [`sal-vault`](https://crates.io/crates/sal-vault) | Cryptographic operations | `cargo add sal-vault` |
|
|
||||||
| [`sal-kubernetes`](https://crates.io/crates/sal-kubernetes) | Kubernetes management | `cargo add sal-kubernetes` |
|
|
||||||
| [`sal-virt`](https://crates.io/crates/sal-virt) | Virtualization tools | `cargo add sal-virt` |
|
|
||||||
| `sal-redisclient` | Redis database client | `cargo add sal-redisclient` ⏳ |
|
|
||||||
| `sal-postgresclient` | PostgreSQL client | `cargo add sal-postgresclient` ⏳ |
|
|
||||||
| `sal-zinit-client` | Zinit process supervisor | `cargo add sal-zinit-client` ⏳ |
|
|
||||||
| `sal-mycelium` | Mycelium network client | `cargo add sal-mycelium` ⏳ |
|
|
||||||
| `sal-rhai` | Rhai scripting integration | `cargo add sal-rhai` ⏳ |
|
|
||||||
| `sal` | Meta-crate with features | `cargo add sal --features all` ⏳ |
|
|
||||||
| `herodo` | Script executor binary | Build from source ⏳ |
|
|
||||||
|
|
||||||
**Legend**: ✅ Published | ⏳ Publishing soon (rate limited)
|
|
||||||
|
|
||||||
### 📢 **Publishing Status**
|
|
||||||
|
|
||||||
**Currently Available on crates.io:**
|
|
||||||
- ✅ [`sal-os`](https://crates.io/crates/sal-os) - Operating system operations
|
|
||||||
- ✅ [`sal-process`](https://crates.io/crates/sal-process) - Process management
|
|
||||||
- ✅ [`sal-text`](https://crates.io/crates/sal-text) - Text processing utilities
|
|
||||||
- ✅ [`sal-net`](https://crates.io/crates/sal-net) - Network operations
|
|
||||||
- ✅ [`sal-git`](https://crates.io/crates/sal-git) - Git repository management
|
|
||||||
- ✅ [`sal-vault`](https://crates.io/crates/sal-vault) - Cryptographic operations
|
|
||||||
- ✅ [`sal-kubernetes`](https://crates.io/crates/sal-kubernetes) - Kubernetes management
|
|
||||||
- ✅ [`sal-virt`](https://crates.io/crates/sal-virt) - Virtualization tools
|
|
||||||
|
|
||||||
**Publishing Soon** (hit crates.io rate limit):
|
|
||||||
- ⏳ `sal-redisclient`, `sal-postgresclient`, `sal-zinit-client`, `sal-mycelium`
|
|
||||||
- ⏳ `sal-rhai`
|
|
||||||
- ⏳ `sal` (meta-crate), `herodo` (binary)
|
|
||||||
|
|
||||||
**Estimated Timeline**: Remaining packages will be published within 24 hours once the rate limit resets.
|
|
||||||
|
|
||||||
## Core Features
|
|
||||||
|
|
||||||
SAL offers a broad spectrum of functionalities, including:
|
|
||||||
|
|
||||||
- **System Operations**: File and directory management, environment variable access, system information retrieval, and OS-specific commands.
|
|
||||||
- **Process Management**: Create, monitor, control, and interact with system processes.
|
|
||||||
- **Containerization Tools**:
|
|
||||||
- Integration with **Buildah** for building OCI/Docker-compatible container images.
|
|
||||||
- Integration with **nerdctl** for managing containers (run, stop, list, build, etc.).
|
|
||||||
- **Version Control**: Programmatic interaction with Git repositories (clone, commit, push, pull, status, etc.).
|
|
||||||
- **Database Clients**:
|
|
||||||
- **Redis**: Robust client for interacting with Redis servers.
|
|
||||||
- **PostgreSQL**: Client for executing queries and managing PostgreSQL databases.
|
|
||||||
- **Scripting Engine**: In-built support for the **Rhai** scripting language, allowing SAL functionalities to be scripted and automated, primarily through the `herodo` tool.
|
|
||||||
- **Networking & Services**:
|
|
||||||
- **Mycelium**: Tools for Mycelium network peer management and message passing.
|
|
||||||
- **Zinit**: Client for interacting with the Zinit process supervision system.
|
|
||||||
- **RFS (Remote/Virtual Filesystem)**: Mount, manage, pack, and unpack various types of filesystems (local, SSH, S3, WebDAV).
|
|
||||||
- **Text Processing**: A suite of utilities for text manipulation, formatting, and regular expressions.
|
|
||||||
- **Cryptography (`vault`)**: Functions for common cryptographic operations.
|
|
||||||
|
|
||||||
## `herodo`: The SAL Scripting Tool
|
|
||||||
|
|
||||||
`herodo` is a command-line utility bundled with SAL that executes Rhai scripts. It empowers users to automate tasks and orchestrate complex workflows by leveraging SAL's diverse modules directly from scripts.
|
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Execute a single Rhai script
|
# Build only
|
||||||
herodo script.rhai
|
./build_herodo.sh
|
||||||
|
|
||||||
# Execute a script with arguments
|
# Build and run a specific Rhai script (e.g., `example`):
|
||||||
herodo script.rhai arg1 arg2
|
./build_herodo.sh example
|
||||||
|
|
||||||
# Execute all .rhai scripts in a directory
|
|
||||||
herodo /path/to/scripts/
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If a directory is provided, `herodo` will execute all `.rhai` scripts within that directory (and its subdirectories) in alphabetical order.
|
### Details
|
||||||
|
|
||||||
### Scriptable SAL Modules via `herodo`
|
- The script changes to its own directory, builds the `herodo` crate (`cargo build`), and copies the binary.
|
||||||
|
- If a script name is provided, it looks for the script in:
|
||||||
|
- `src/rhaiexamples/<name>.rhai`
|
||||||
|
- `src/herodo/scripts/<name>.rhai`
|
||||||
|
- If the script is not found, the script exits with an error.
|
||||||
|
|
||||||
The following SAL modules and functionalities are exposed to the Rhai scripting environment through `herodo`:
|
---
|
||||||
|
|
||||||
- **OS (`os`)**: Comprehensive file system operations, file downloading & installation, and system package management. [Documentation](os/README.md)
|
## 3. `run_rhai_tests.sh`
|
||||||
- **Process (`process`)**: Robust command and script execution, plus process management (listing, finding, killing, checking command existence). [Documentation](process/README.md)
|
|
||||||
- **Text (`text`)**: String manipulation, prefixing, path/name fixing, text replacement, and templating. [Documentation](text/README.md)
|
|
||||||
- **Net (`net`)**: Network operations, HTTP requests, and connectivity utilities. [Documentation](net/README.md)
|
|
||||||
- **Git (`git`)**: High-level repository management and generic Git command execution with Redis-backed authentication (clone, pull, push, commit, etc.). [Documentation](git/README.md)
|
|
||||||
- **Vault (`vault`)**: Cryptographic operations, keypair management, encryption, decryption, hashing, etc. [Documentation](vault/README.md)
|
|
||||||
- **Redis Client (`redisclient`)**: Execute Redis commands (`redis_get`, `redis_set`, `redis_execute`, etc.). [Documentation](redisclient/README.md)
|
|
||||||
- **PostgreSQL Client (`postgresclient`)**: Execute SQL queries against PostgreSQL databases. [Documentation](postgresclient/README.md)
|
|
||||||
- **Zinit (`zinit_client`)**: Client for Zinit process supervisor (service management, logs). [Documentation](zinit_client/README.md)
|
|
||||||
- **Mycelium (`mycelium`)**: Client for Mycelium decentralized networking API (node info, peer management, messaging). [Documentation](mycelium/README.md)
|
|
||||||
- **Virtualization (`virt`)**:
|
|
||||||
- **Buildah**: OCI/Docker image building functions. [Documentation](virt/README.md)
|
|
||||||
- **nerdctl**: Container lifecycle management (`nerdctl_run`, `nerdctl_stop`, `nerdctl_images`, `nerdctl_image_build`, etc.)
|
|
||||||
- **RFS**: Mount various filesystems (local, SSH, S3, etc.), pack/unpack filesystem layers.
|
|
||||||
|
|
||||||
### Example `herodo` Rhai Script
|
### Purpose
|
||||||
|
|
||||||
```rhai
|
- Runs **all** Rhai test suites across the repository.
|
||||||
// file: /opt/scripts/example_task.rhai
|
- Supports both the legacy `rhai_tests` directory and the newer `*/tests/rhai` layout.
|
||||||
|
- Logs output to `run_rhai_tests.log` and prints a summary.
|
||||||
|
|
||||||
// OS operations
|
### Usage
|
||||||
println("Checking for /tmp/my_app_data...");
|
|
||||||
if !exist("/tmp/my_app_data") {
|
|
||||||
mkdir("/tmp/my_app_data");
|
|
||||||
println("Created directory /tmp/my_app_data");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Redis operations
|
|
||||||
println("Setting Redis key 'app_status' to 'running'");
|
|
||||||
redis_set("app_status", "running");
|
|
||||||
let status = redis_get("app_status");
|
|
||||||
println("Current app_status from Redis: " + status);
|
|
||||||
|
|
||||||
// Process execution
|
|
||||||
println("Listing files in /tmp:");
|
|
||||||
let output = run("ls -la /tmp");
|
|
||||||
println(output.stdout);
|
|
||||||
|
|
||||||
println("Script finished.");
|
|
||||||
```
|
|
||||||
|
|
||||||
Run with: `herodo /opt/scripts/example_task.rhai`
|
|
||||||
|
|
||||||
For more examples, check the individual module test directories (e.g., `text/tests/rhai/`, `os/tests/rhai/`, etc.) in this repository.
|
|
||||||
|
|
||||||
## Using SAL as a Rust Library
|
|
||||||
|
|
||||||
### Option 1: Individual Crates (Recommended)
|
|
||||||
|
|
||||||
Add only the SAL modules you need:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[dependencies]
|
|
||||||
sal-os = "0.1.0"
|
|
||||||
sal-process = "0.1.0"
|
|
||||||
sal-text = "0.1.0"
|
|
||||||
```
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use sal_os::fs;
|
|
||||||
use sal_process::run;
|
|
||||||
use sal_text::template;
|
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
// File operations
|
|
||||||
let files = fs::list_files(".")?;
|
|
||||||
println!("Found {} files", files.len());
|
|
||||||
|
|
||||||
// Process execution
|
|
||||||
let result = run::command("echo 'Hello SAL!'")?;
|
|
||||||
println!("Output: {}", result.stdout);
|
|
||||||
|
|
||||||
// Text templating
|
|
||||||
let template_str = "Hello {{name}}!";
|
|
||||||
let mut vars = std::collections::HashMap::new();
|
|
||||||
vars.insert("name".to_string(), "World".to_string());
|
|
||||||
let rendered = template::render(template_str, &vars)?;
|
|
||||||
println!("Rendered: {}", rendered);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Option 2: Meta-crate with Features (Coming Soon)
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[dependencies]
|
|
||||||
sal = { version = "0.1.0", features = ["os", "process", "text"] }
|
|
||||||
```
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use sal::os::fs;
|
|
||||||
use sal::process::run;
|
|
||||||
use sal::text::template;
|
|
||||||
|
|
||||||
// Same code as above, but using the meta-crate
|
|
||||||
```
|
|
||||||
|
|
||||||
*(Note: The meta-crate `sal` will be available once all individual packages are published.)*
|
|
||||||
|
|
||||||
## 🎯 **Why Choose SAL?**
|
|
||||||
|
|
||||||
### **Modular Architecture**
|
|
||||||
- **Install Only What You Need**: Each package is independent - no bloated dependencies
|
|
||||||
- **Faster Compilation**: Smaller dependency trees mean faster build times
|
|
||||||
- **Smaller Binaries**: Only include the functionality you actually use
|
|
||||||
- **Clear Dependencies**: Explicit about what functionality your project uses
|
|
||||||
|
|
||||||
### **Developer Experience**
|
|
||||||
- **Consistent APIs**: All packages follow the same design patterns and conventions
|
|
||||||
- **Comprehensive Documentation**: Each package has detailed documentation and examples
|
|
||||||
- **Real-World Tested**: All functionality is production-tested, no placeholder code
|
|
||||||
- **Type Safety**: Leverages Rust's type system for safe, reliable operations
|
|
||||||
|
|
||||||
### **Scripting Power**
|
|
||||||
- **Herodo Integration**: Execute Rhai scripts with full access to SAL functionality
|
|
||||||
- **Cross-Platform**: Works consistently across Windows, macOS, and Linux
|
|
||||||
- **Automation Ready**: Perfect for DevOps, CI/CD, and system administration tasks
|
|
||||||
|
|
||||||
## 📦 **Workspace Modules Overview**
|
|
||||||
|
|
||||||
SAL is organized as a Cargo workspace with the following crates:
|
|
||||||
|
|
||||||
### **Core Library Modules**
|
|
||||||
- **`sal-os`**: Core OS interactions, file system operations, environment access
|
|
||||||
- **`sal-process`**: Process creation, management, and control
|
|
||||||
- **`sal-text`**: Utilities for text processing and manipulation
|
|
||||||
- **`sal-net`**: Network operations, HTTP requests, and connectivity utilities
|
|
||||||
|
|
||||||
### **Integration Modules**
|
|
||||||
- **`sal-git`**: Git repository management and operations
|
|
||||||
- **`sal-vault`**: Cryptographic functions and keypair management
|
|
||||||
- **`sal-rhai`**: Integration layer for the Rhai scripting engine, used by `herodo`
|
|
||||||
|
|
||||||
### **Client Modules**
|
|
||||||
- **`sal-redisclient`**: Client for Redis database interactions
|
|
||||||
- **`sal-postgresclient`**: Client for PostgreSQL database interactions
|
|
||||||
- **`sal-zinit-client`**: Client for Zinit process supervisor
|
|
||||||
- **`sal-mycelium`**: Client for Mycelium network operations
|
|
||||||
|
|
||||||
### **Specialized Modules**
|
|
||||||
- **`sal-virt`**: Virtualization-related utilities (buildah, nerdctl, rfs)
|
|
||||||
|
|
||||||
### **Root Package & Binary**
|
|
||||||
- **`sal`**: Root umbrella crate that re-exports all modules
|
|
||||||
- **`herodo`**: Command-line binary for executing Rhai scripts
|
|
||||||
|
|
||||||
## 🔨 **Building SAL**
|
|
||||||
|
|
||||||
Build the entire workspace (all crates) using Cargo:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build all workspace members
|
# Run all tests
|
||||||
cargo build --workspace
|
|
||||||
|
|
||||||
# Build for release
|
|
||||||
cargo build --workspace --release
|
|
||||||
|
|
||||||
# Build specific crate
|
|
||||||
cargo build -p sal-text
|
|
||||||
cargo build -p herodo
|
|
||||||
```
|
|
||||||
|
|
||||||
The `herodo` executable will be located at `target/debug/herodo` or `target/release/herodo`.
|
|
||||||
|
|
||||||
## 🧪 **Running Tests**
|
|
||||||
|
|
||||||
### **Rust Unit Tests**
|
|
||||||
```bash
|
|
||||||
# Run all workspace tests
|
|
||||||
cargo test --workspace
|
|
||||||
|
|
||||||
# Run tests for specific crate
|
|
||||||
cargo test -p sal-text
|
|
||||||
cargo test -p sal-os
|
|
||||||
|
|
||||||
# Run only library tests (faster)
|
|
||||||
cargo test --workspace --lib
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Rhai Integration Tests**
|
|
||||||
Run comprehensive Rhai script tests that exercise `herodo` and SAL's scripted functionalities:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all Rhai integration tests (16 modules)
|
|
||||||
./run_rhai_tests.sh
|
./run_rhai_tests.sh
|
||||||
|
|
||||||
# Results: 16/16 modules pass with 100% success rate
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The Rhai tests validate real-world functionality across all SAL modules and provide comprehensive integration testing.
|
### Output
|
||||||
|
|
||||||
|
- Colored console output for readability.
|
||||||
|
- Log file (`run_rhai_tests.log`) contains full output for later review.
|
||||||
|
- Summary includes total modules, passed, and failed counts.
|
||||||
|
- Exit code `0` if all tests pass, `1` otherwise.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## General Development Workflow
|
||||||
|
|
||||||
|
1. **Build**: Use `build_herodo.sh` to compile the `herodo` binary.
|
||||||
|
2. **Test**: Run `run_rhai_tests.sh` to ensure all Rhai scripts pass.
|
||||||
|
3. **Publish**: When ready to release, use `scripts/publish-all.sh` (with `--dry-run` first to verify).
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- **Rust toolchain** (`cargo`, `rustc`) installed.
|
||||||
|
- **Rhai** interpreter (`herodo`) built and available.
|
||||||
|
- **Git** for version control.
|
||||||
|
- **Cargo login** for publishing to crates.io.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
SAL is licensed under the Apache License 2.0. See the [LICENSE](LICENSE) file for details.
|
See `LICENSE` for details.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Happy coding!**
|
||||||
|
0
cargo_instructions.md
Normal file
0
cargo_instructions.md
Normal file
@@ -1,64 +1,76 @@
|
|||||||
# Hero Vault Cryptography Examples
|
# SAL Vault Examples
|
||||||
|
|
||||||
This directory contains examples demonstrating the Hero Vault cryptography functionality integrated into the SAL project.
|
This directory contains examples demonstrating the SAL Vault functionality.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Hero Vault provides cryptographic operations including:
|
SAL Vault provides secure key management and cryptographic operations including:
|
||||||
|
|
||||||
- Key space management (creation, loading, encryption, decryption)
|
- Vault creation and management
|
||||||
- Keypair management (creation, selection, listing)
|
- KeySpace operations (encrypted key-value stores)
|
||||||
- Digital signatures (signing and verification)
|
- Symmetric key generation and operations
|
||||||
- Symmetric encryption (key generation, encryption, decryption)
|
- Asymmetric key operations (signing and verification)
|
||||||
- Ethereum wallet functionality
|
- Secure key derivation from passwords
|
||||||
- Smart contract interactions
|
|
||||||
- Key-value store with encryption
|
|
||||||
|
|
||||||
## Example Files
|
## Current Status
|
||||||
|
|
||||||
- `example.rhai` - Basic example demonstrating key management, signing, and encryption
|
⚠️ **Note**: The vault module is currently being updated to use Lee's implementation.
|
||||||
- `advanced_example.rhai` - Advanced example with error handling, conditional logic, and more complex operations
|
The Rhai scripting integration is temporarily disabled while we adapt the examples
|
||||||
- `key_persistence_example.rhai` - Demonstrates creating and saving a key space to disk
|
to work with the new vault API.
|
||||||
- `load_existing_space.rhai` - Shows how to load a previously created key space and use its keypairs
|
|
||||||
- `contract_example.rhai` - Demonstrates loading a contract ABI and interacting with smart contracts
|
|
||||||
- `agung_send_transaction.rhai` - Demonstrates sending native tokens on the Agung network
|
|
||||||
- `agung_contract_with_args.rhai` - Shows how to interact with contracts with arguments on Agung
|
|
||||||
|
|
||||||
## Running the Examples
|
## Available Operations
|
||||||
|
|
||||||
You can run the examples using the `herodo` tool that comes with the SAL project:
|
- **Vault Management**: Create and manage vault instances
|
||||||
|
- **KeySpace Operations**: Open encrypted key-value stores within vaults
|
||||||
|
- **Symmetric Encryption**: Generate keys and encrypt/decrypt data
|
||||||
|
- **Asymmetric Operations**: Create keypairs, sign messages, verify signatures
|
||||||
|
|
||||||
```bash
|
## Example Files (Legacy - Sameh's Implementation)
|
||||||
# Run a single example
|
|
||||||
herodo --path example.rhai
|
|
||||||
|
|
||||||
# Run all examples using the provided script
|
⚠️ **These examples are currently archived and use the previous vault implementation**:
|
||||||
./run_examples.sh
|
|
||||||
|
- `_archive/example.rhai` - Basic example demonstrating key management, signing, and encryption
|
||||||
|
- `_archive/advanced_example.rhai` - Advanced example with error handling and complex operations
|
||||||
|
- `_archive/key_persistence_example.rhai` - Demonstrates creating and saving a key space to disk
|
||||||
|
- `_archive/load_existing_space.rhai` - Shows how to load a previously created key space
|
||||||
|
- `_archive/contract_example.rhai` - Demonstrates smart contract interactions (Ethereum)
|
||||||
|
- `_archive/agung_send_transaction.rhai` - Demonstrates Ethereum transactions on Agung network
|
||||||
|
- `_archive/agung_contract_with_args.rhai` - Shows contract interactions with arguments
|
||||||
|
|
||||||
|
## Current Implementation (Lee's Vault)
|
||||||
|
|
||||||
|
The current vault implementation provides:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// Create a new vault
|
||||||
|
let vault = Vault::new(&path).await?;
|
||||||
|
|
||||||
|
// Open an encrypted keyspace
|
||||||
|
let keyspace = vault.open_keyspace("my_space", "password").await?;
|
||||||
|
|
||||||
|
// Perform cryptographic operations
|
||||||
|
// (API documentation coming soon)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Key Space Storage
|
## Migration Status
|
||||||
|
|
||||||
Key spaces are stored in the `~/.hero-vault/key-spaces/` directory by default. Each key space is stored in a separate JSON file named after the key space (e.g., `my_space.json`).
|
- ✅ **Vault Core**: Lee's implementation is active
|
||||||
|
- ✅ **Archive**: Sameh's implementation preserved in `vault/_archive/`
|
||||||
## Ethereum Functionality
|
- ⏳ **Rhai Integration**: Being developed for Lee's implementation
|
||||||
|
- ⏳ **Examples**: Will be updated to use Lee's API
|
||||||
The Hero Vault module provides comprehensive Ethereum wallet functionality:
|
- ❌ **Ethereum Features**: Not available in Lee's implementation
|
||||||
|
|
||||||
- Creating and managing wallets for different networks
|
|
||||||
- Sending ETH transactions
|
|
||||||
- Checking balances
|
|
||||||
- Interacting with smart contracts (read and write functions)
|
|
||||||
- Support for multiple networks (Ethereum, Gnosis, Peaq, Agung, etc.)
|
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
|
|
||||||
Key spaces are encrypted with ChaCha20Poly1305 using a key derived from the provided password. The encryption ensures that the key material is secure at rest.
|
The vault uses:
|
||||||
|
|
||||||
## Best Practices
|
- **ChaCha20Poly1305** for symmetric encryption
|
||||||
|
- **Password-based key derivation** for keyspace encryption
|
||||||
|
- **Secure key storage** with proper isolation
|
||||||
|
|
||||||
1. **Use Strong Passwords**: Since the security of your key spaces depends on the strength of your passwords, use strong, unique passwords.
|
## Next Steps
|
||||||
2. **Backup Key Spaces**: Regularly backup your key spaces directory to prevent data loss.
|
|
||||||
3. **Script Organization**: Split your scripts into logical units, with separate scripts for key creation and key usage.
|
1. **Rhai Integration**: Implement Rhai bindings for Lee's vault
|
||||||
4. **Error Handling**: Always check the return values of functions to ensure operations succeeded before proceeding.
|
2. **New Examples**: Create examples using Lee's simpler API
|
||||||
5. **Network Selection**: When working with Ethereum functionality, be explicit about which network you're targeting to avoid confusion.
|
3. **Documentation**: Complete API documentation for Lee's implementation
|
||||||
6. **Gas Management**: For Ethereum transactions, consider gas costs and set appropriate gas limits.
|
4. **Migration Guide**: Provide guidance for users migrating from Sameh's implementation
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
// Example of using the network modules in SAL through Rhai
|
// Example of using the network modules in SAL through Rhai
|
||||||
// Shows TCP port checking, HTTP URL validation, and SSH command execution
|
// Shows TCP port checking, HTTP URL validation, and SSH command execution
|
||||||
|
|
||||||
|
|
||||||
// Function to print section header
|
// Function to print section header
|
||||||
fn section(title) {
|
fn section(title) {
|
||||||
print("\n");
|
print("\n");
|
||||||
@@ -19,14 +20,14 @@ let host = "localhost";
|
|||||||
let port = 22;
|
let port = 22;
|
||||||
print(`Checking if port ${port} is open on ${host}...`);
|
print(`Checking if port ${port} is open on ${host}...`);
|
||||||
let is_open = tcp.check_port(host, port);
|
let is_open = tcp.check_port(host, port);
|
||||||
print(`Port ${port} is ${is_open ? "open" : "closed"}`);
|
print(`Port ${port} is ${if is_open { "open" } else { "closed" }}`);
|
||||||
|
|
||||||
// Check multiple ports
|
// Check multiple ports
|
||||||
let ports = [22, 80, 443];
|
let ports = [22, 80, 443];
|
||||||
print(`Checking multiple ports on ${host}...`);
|
print(`Checking multiple ports on ${host}...`);
|
||||||
let port_results = tcp.check_ports(host, ports);
|
let port_results = tcp.check_ports(host, ports);
|
||||||
for result in port_results {
|
for result in port_results {
|
||||||
print(`Port ${result.port} is ${result.is_open ? "open" : "closed"}`);
|
print(`Port ${result.port} is ${if result.is_open { "open" } else { "closed" }}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// HTTP connectivity checks
|
// HTTP connectivity checks
|
||||||
@@ -39,7 +40,7 @@ let http = net::new_http_connector();
|
|||||||
let url = "https://www.example.com";
|
let url = "https://www.example.com";
|
||||||
print(`Checking if ${url} is reachable...`);
|
print(`Checking if ${url} is reachable...`);
|
||||||
let is_reachable = http.check_url(url);
|
let is_reachable = http.check_url(url);
|
||||||
print(`${url} is ${is_reachable ? "reachable" : "unreachable"}`);
|
print(`${url} is ${if is_reachable { "reachable" } else { "unreachable" }}`);
|
||||||
|
|
||||||
// Check the status code of a URL
|
// Check the status code of a URL
|
||||||
print(`Checking status code of ${url}...`);
|
print(`Checking status code of ${url}...`);
|
||||||
@@ -68,7 +69,7 @@ if is_open {
|
|||||||
let ssh = net::new_ssh_builder()
|
let ssh = net::new_ssh_builder()
|
||||||
.host("localhost")
|
.host("localhost")
|
||||||
.port(22)
|
.port(22)
|
||||||
.user(os::get_env("USER") || "root")
|
.user(if os::get_env("USER") != () { os::get_env("USER") } else { "root" })
|
||||||
.timeout(10)
|
.timeout(10)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
print("Running a basic command using run().do()...");
|
print("Running a basic command using run().execute()...");
|
||||||
|
|
||||||
// Execute a simple command
|
// Execute a simple command
|
||||||
let result = run("echo Hello from run_basic!").do();
|
let result = run("echo Hello from run_basic!").execute();
|
||||||
|
|
||||||
// Print the command result
|
// Print the command result
|
||||||
print(`Command: echo Hello from run_basic!`);
|
print(`Command: echo Hello from run_basic!`);
|
||||||
@@ -13,6 +13,6 @@ print(`Stderr:\n${result.stderr}`);
|
|||||||
// Example of a command that might fail (if 'nonexistent_command' doesn't exist)
|
// Example of a command that might fail (if 'nonexistent_command' doesn't exist)
|
||||||
// This will halt execution by default because ignore_error() is not used.
|
// This will halt execution by default because ignore_error() is not used.
|
||||||
// print("Running a command that will fail (and should halt)...");
|
// print("Running a command that will fail (and should halt)...");
|
||||||
// let fail_result = run("nonexistent_command").do(); // This line will cause the script to halt if the command doesn't exist
|
// let fail_result = run("nonexistent_command").execute(); // This line will cause the script to halt if the command doesn't exist
|
||||||
|
|
||||||
print("Basic run() example finished.");
|
print("Basic run() example finished.");
|
@@ -2,7 +2,7 @@ print("Running a command that will fail, but ignoring the error...");
|
|||||||
|
|
||||||
// Run a command that exits with a non-zero code (will fail)
|
// Run a command that exits with a non-zero code (will fail)
|
||||||
// Using .ignore_error() prevents the script from halting
|
// Using .ignore_error() prevents the script from halting
|
||||||
let result = run("exit 1").ignore_error().do();
|
let result = run("exit 1").ignore_error().execute();
|
||||||
|
|
||||||
print(`Command finished.`);
|
print(`Command finished.`);
|
||||||
print(`Success: ${result.success}`); // This should be false
|
print(`Success: ${result.success}`); // This should be false
|
||||||
@@ -22,7 +22,7 @@ print("\nScript continued execution after the potentially failing command.");
|
|||||||
// Example of a command that might fail due to OS error (e.g., command not found)
|
// Example of a command that might fail due to OS error (e.g., command not found)
|
||||||
// This *might* still halt depending on how the underlying Rust function handles it,
|
// This *might* still halt depending on how the underlying Rust function handles it,
|
||||||
// as ignore_error() primarily prevents halting on *command* non-zero exit codes.
|
// as ignore_error() primarily prevents halting on *command* non-zero exit codes.
|
||||||
// let os_error_result = run("nonexistent_command_123").ignore_error().do();
|
// let os_error_result = run("nonexistent_command_123").ignore_error().execute();
|
||||||
// print(`OS Error Command Success: ${os_error_result.success}`);
|
// print(`OS Error Command Success: ${os_error_result.success}`);
|
||||||
// print(`OS Error Command Exit Code: ${os_error_result.code}`);
|
// print(`OS Error Command Exit Code: ${os_error_result.code}`);
|
||||||
|
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
print("Running a command using run().log().do()...");
|
print("Running a command using run().log().execute()...");
|
||||||
|
|
||||||
// The .log() method will print the command string to the console before execution.
|
// The .log() method will print the command string to the console before execution.
|
||||||
// This is useful for debugging or tracing which commands are being run.
|
// This is useful for debugging or tracing which commands are being run.
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
print("Running a command using run().silent().do()...\n");
|
print("Running a command using run().silent().execute()...\n");
|
||||||
|
|
||||||
// This command will print to standard output and standard error
|
// This command will print to standard output and standard error
|
||||||
// However, because .silent() is used, the output will not appear in the console directly
|
// However, because .silent() is used, the output will not appear in the console directly
|
||||||
let result = run("echo 'This should be silent stdout.'; echo 'This should be silent stderr.' >&2; exit 0").silent().do();
|
let result = run("echo 'This should be silent stdout.'; echo 'This should be silent stderr.' >&2; exit 0").silent().execute();
|
||||||
|
|
||||||
// The output is still captured in the CommandResult
|
// The output is still captured in the CommandResult
|
||||||
print(`Command finished.`);
|
print(`Command finished.`);
|
||||||
@@ -12,7 +12,7 @@ print(`Captured Stdout:\\n${result.stdout}`);
|
|||||||
print(`Captured Stderr:\\n${result.stderr}`);
|
print(`Captured Stderr:\\n${result.stderr}`);
|
||||||
|
|
||||||
// Example of a silent command that fails (but won't halt because we only suppress output)
|
// Example of a silent command that fails (but won't halt because we only suppress output)
|
||||||
// let fail_result = run("echo 'This is silent failure stderr.' >&2; exit 1").silent().do();
|
// let fail_result = run("echo 'This is silent failure stderr.' >&2; exit 1").silent().execute();
|
||||||
// print(`Failed command finished (silent):`);
|
// print(`Failed command finished (silent):`);
|
||||||
// print(`Success: ${fail_result.success}`);
|
// print(`Success: ${fail_result.success}`);
|
||||||
// print(`Exit Code: ${fail_result.code}`);
|
// print(`Exit Code: ${fail_result.code}`);
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
//! This library loads the Rhai engine, registers all SAL modules,
|
//! This library loads the Rhai engine, registers all SAL modules,
|
||||||
//! and executes Rhai scripts from a specified directory in sorted order.
|
//! and executes Rhai scripts from a specified directory in sorted order.
|
||||||
|
|
||||||
use rhai::Engine;
|
use rhai::{Engine, Scope};
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@@ -30,6 +30,19 @@ pub fn run(script_path: &str) -> Result<(), Box<dyn Error>> {
|
|||||||
// Create a new Rhai engine
|
// Create a new Rhai engine
|
||||||
let mut engine = Engine::new();
|
let mut engine = Engine::new();
|
||||||
|
|
||||||
|
// TODO: if we create a scope here we could clean up all the different functionsand types regsitered wit the engine
|
||||||
|
// We should generalize the way we add things to the scope for each module sepeartely
|
||||||
|
let mut scope = Scope::new();
|
||||||
|
// Conditionally add Hetzner client only when env config is present
|
||||||
|
if let Ok(cfg) = sal::hetzner::config::Config::from_env() {
|
||||||
|
let hetzner_client = sal::hetzner::api::Client::new(cfg);
|
||||||
|
scope.push("hetzner", hetzner_client);
|
||||||
|
}
|
||||||
|
// This makes it easy to call e.g. `hetzner.get_server()` or `mycelium.get_connected_peers()`
|
||||||
|
// --> without the need of manually created a client for each one first
|
||||||
|
// --> could be conditionally compiled to only use those who we need (we only push the things to the scope that we actually need to run the script)
|
||||||
|
|
||||||
|
|
||||||
// Register println function for output
|
// Register println function for output
|
||||||
engine.register_fn("println", |s: &str| println!("{}", s));
|
engine.register_fn("println", |s: &str| println!("{}", s));
|
||||||
|
|
||||||
@@ -78,19 +91,20 @@ pub fn run(script_path: &str) -> Result<(), Box<dyn Error>> {
|
|||||||
let script = fs::read_to_string(&script_file)?;
|
let script = fs::read_to_string(&script_file)?;
|
||||||
|
|
||||||
// Execute the script
|
// Execute the script
|
||||||
match engine.eval::<rhai::Dynamic>(&script) {
|
// match engine.eval::<rhai::Dynamic>(&script) {
|
||||||
Ok(result) => {
|
// Ok(result) => {
|
||||||
println!("Script executed successfully");
|
// println!("Script executed successfully");
|
||||||
if !result.is_unit() {
|
// if !result.is_unit() {
|
||||||
println!("Result: {}", result);
|
// println!("Result: {}", result);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
Err(err) => {
|
// Err(err) => {
|
||||||
eprintln!("Error executing script: {}", err);
|
// eprintln!("Error executing script: {}", err);
|
||||||
// Exit with error code when a script fails
|
// // Exit with error code when a script fails
|
||||||
process::exit(1);
|
// process::exit(1);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
engine.run_with_scope(&mut scope, &script)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("\nAll scripts executed successfully!");
|
println!("\nAll scripts executed successfully!");
|
||||||
|
12
packages/clients/hetznerclient/Cargo.toml
Normal file
12
packages/clients/hetznerclient/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[package]
|
||||||
|
name = "sal-hetzner"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
prettytable = "0.10.0"
|
||||||
|
reqwest.workspace = true
|
||||||
|
rhai = { workspace = true, features = ["serde"] }
|
||||||
|
serde = { workspace = true, features = ["derive"] }
|
||||||
|
serde_json.workspace = true
|
||||||
|
thiserror.workspace = true
|
54
packages/clients/hetznerclient/src/api/error.rs
Normal file
54
packages/clients/hetznerclient/src/api/error.rs
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
use serde::Deserialize;
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum AppError {
|
||||||
|
#[error("Request failed: {0}")]
|
||||||
|
RequestError(#[from] reqwest::Error),
|
||||||
|
#[error("API error: {0}")]
|
||||||
|
ApiError(ApiError),
|
||||||
|
#[error("Deserialization Error: {0:?}")]
|
||||||
|
SerdeJsonError(#[from] serde_json::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct ApiError {
|
||||||
|
pub status: u16,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<reqwest::blocking::Response> for ApiError {
|
||||||
|
fn from(value: reqwest::blocking::Response) -> Self {
|
||||||
|
ApiError {
|
||||||
|
status: value.status().into(),
|
||||||
|
message: value.text().unwrap_or("The API call returned an error.".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ApiError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct HetznerApiError {
|
||||||
|
code: String,
|
||||||
|
message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct HetznerApiErrorWrapper {
|
||||||
|
error: HetznerApiError,
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(wrapper) = serde_json::from_str::<HetznerApiErrorWrapper>(&self.message) {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Status: {}, Code: {}, Message: {}",
|
||||||
|
self.status, wrapper.error.code, wrapper.error.message
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
write!(f, "Status: {}: {}", self.status, self.message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
513
packages/clients/hetznerclient/src/api/mod.rs
Normal file
513
packages/clients/hetznerclient/src/api/mod.rs
Normal file
@@ -0,0 +1,513 @@
|
|||||||
|
pub mod error;
|
||||||
|
pub mod models;
|
||||||
|
|
||||||
|
use self::models::{
|
||||||
|
Boot, Rescue, Server, SshKey, ServerAddonProduct, ServerAddonProductWrapper,
|
||||||
|
AuctionServerProduct, AuctionServerProductWrapper, AuctionTransaction,
|
||||||
|
AuctionTransactionWrapper, BootWrapper, Cancellation, CancellationWrapper,
|
||||||
|
OrderServerBuilder, OrderServerProduct, OrderServerProductWrapper, RescueWrapped,
|
||||||
|
ServerWrapper, SshKeyWrapper, Transaction, TransactionWrapper,
|
||||||
|
ServerAddonTransaction, ServerAddonTransactionWrapper,
|
||||||
|
OrderServerAddonBuilder,
|
||||||
|
};
|
||||||
|
use crate::api::error::ApiError;
|
||||||
|
use crate::config::Config;
|
||||||
|
use error::AppError;
|
||||||
|
use reqwest::blocking::Client as HttpClient;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Client {
|
||||||
|
http_client: HttpClient,
|
||||||
|
config: Config,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Client {
|
||||||
|
pub fn new(config: Config) -> Self {
|
||||||
|
Self {
|
||||||
|
http_client: HttpClient::new(),
|
||||||
|
config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_response<T>(&self, response: reqwest::blocking::Response) -> Result<T, AppError>
|
||||||
|
where
|
||||||
|
T: serde::de::DeserializeOwned,
|
||||||
|
{
|
||||||
|
let status = response.status();
|
||||||
|
let body = response.text()?;
|
||||||
|
|
||||||
|
if status.is_success() {
|
||||||
|
serde_json::from_str::<T>(&body).map_err(Into::into)
|
||||||
|
} else {
|
||||||
|
Err(AppError::ApiError(ApiError {
|
||||||
|
status: status.as_u16(),
|
||||||
|
message: body,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server(&self, server_number: i32) -> Result<Server, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/server/{}", self.config.api_url, server_number))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: ServerWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.server)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_servers(&self) -> Result<Vec<Server>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/server", self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<ServerWrapper> = self.handle_response(response)?;
|
||||||
|
let servers = wrapped.into_iter().map(|sw| sw.server).collect();
|
||||||
|
Ok(servers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_server_name(&self, server_number: i32, name: &str) -> Result<Server, AppError> {
|
||||||
|
let params = [("server_name", name)];
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!("{}/server/{}", self.config.api_url, server_number))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.form(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: ServerWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.server)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cancellation_data(&self, server_number: i32) -> Result<Cancellation, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!(
|
||||||
|
"{}/server/{}/cancellation",
|
||||||
|
self.config.api_url, server_number
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: CancellationWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.cancellation)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cancel_server(
|
||||||
|
&self,
|
||||||
|
server_number: i32,
|
||||||
|
cancellation_date: &str,
|
||||||
|
) -> Result<Cancellation, AppError> {
|
||||||
|
let params = [("cancellation_date", cancellation_date)];
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!(
|
||||||
|
"{}/server/{}/cancellation",
|
||||||
|
self.config.api_url, server_number
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.form(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: CancellationWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.cancellation)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn withdraw_cancellation(&self, server_number: i32) -> Result<(), AppError> {
|
||||||
|
self.http_client
|
||||||
|
.delete(format!(
|
||||||
|
"{}/server/{}/cancellation",
|
||||||
|
self.config.api_url, server_number
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ssh_keys(&self) -> Result<Vec<SshKey>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/key", self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<SshKeyWrapper> = self.handle_response(response)?;
|
||||||
|
let keys = wrapped.into_iter().map(|sk| sk.key).collect();
|
||||||
|
Ok(keys)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ssh_key(&self, fingerprint: &str) -> Result<SshKey, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/key/{}", self.config.api_url, fingerprint))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: SshKeyWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_ssh_key(&self, name: &str, data: &str) -> Result<SshKey, AppError> {
|
||||||
|
let params = [("name", name), ("data", data)];
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!("{}/key", self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.form(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: SshKeyWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_ssh_key_name(&self, fingerprint: &str, name: &str) -> Result<SshKey, AppError> {
|
||||||
|
let params = [("name", name)];
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!("{}/key/{}", self.config.api_url, fingerprint))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.form(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: SshKeyWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_ssh_key(&self, fingerprint: &str) -> Result<(), AppError> {
|
||||||
|
self.http_client
|
||||||
|
.delete(format!("{}/key/{}", self.config.api_url, fingerprint))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn get_boot_configuration(&self, server_number: i32) -> Result<Boot, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/boot/{}", self.config.api_url, server_number))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: BootWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.boot)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_rescue_boot_configuration(&self, server_number: i32) -> Result<Rescue, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!(
|
||||||
|
"{}/boot/{}/rescue",
|
||||||
|
self.config.api_url, server_number
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: RescueWrapped = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.rescue)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn enable_rescue_mode(
|
||||||
|
&self,
|
||||||
|
server_number: i32,
|
||||||
|
os: &str,
|
||||||
|
authorized_keys: Option<&[String]>,
|
||||||
|
) -> Result<Rescue, AppError> {
|
||||||
|
let mut params = vec![("os", os)];
|
||||||
|
if let Some(keys) = authorized_keys {
|
||||||
|
for key in keys {
|
||||||
|
params.push(("authorized_key[]", key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!(
|
||||||
|
"{}/boot/{}/rescue",
|
||||||
|
self.config.api_url, server_number
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.form(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: RescueWrapped = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.rescue)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn disable_rescue_mode(&self, server_number: i32) -> Result<Rescue, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.delete(format!(
|
||||||
|
"{}/boot/{}/rescue",
|
||||||
|
self.config.api_url, server_number
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: RescueWrapped = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.rescue)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server_products(
|
||||||
|
&self,
|
||||||
|
) -> Result<Vec<OrderServerProduct>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/order/server/product", &self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<OrderServerProductWrapper> = self.handle_response(response)?;
|
||||||
|
let products = wrapped.into_iter().map(|sop| sop.product).collect();
|
||||||
|
Ok(products)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server_product_by_id(
|
||||||
|
&self,
|
||||||
|
product_id: &str,
|
||||||
|
) -> Result<OrderServerProduct, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!(
|
||||||
|
"{}/order/server/product/{}",
|
||||||
|
&self.config.api_url, product_id
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: OrderServerProductWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.product)
|
||||||
|
}
|
||||||
|
pub fn order_server(&self, order: OrderServerBuilder) -> Result<Transaction, AppError> {
|
||||||
|
let mut params = json!({
|
||||||
|
"product_id": order.product_id,
|
||||||
|
"dist": order.dist,
|
||||||
|
"location": order.location,
|
||||||
|
"authorized_key": order.authorized_keys.unwrap_or_default(),
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(addons) = order.addons {
|
||||||
|
params["addon"] = json!(addons);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(test) = order.test {
|
||||||
|
if test {
|
||||||
|
params["test"] = json!(test);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!("{}/order/server/transaction", &self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.json(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: TransactionWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_transaction_by_id(&self, transaction_id: &str) -> Result<Transaction, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!(
|
||||||
|
"{}/order/server/transaction/{}",
|
||||||
|
&self.config.api_url, transaction_id
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: TransactionWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.transaction)
|
||||||
|
}
|
||||||
|
pub fn get_transactions(&self) -> Result<Vec<Transaction>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/order/server/transaction", &self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<TransactionWrapper> = self.handle_response(response)?;
|
||||||
|
let transactions = wrapped.into_iter().map(|t| t.transaction).collect();
|
||||||
|
Ok(transactions)
|
||||||
|
}
|
||||||
|
pub fn get_auction_server_products(&self) -> Result<Vec<AuctionServerProduct>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!(
|
||||||
|
"{}/order/server_market/product",
|
||||||
|
&self.config.api_url
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<AuctionServerProductWrapper> = self.handle_response(response)?;
|
||||||
|
let products = wrapped.into_iter().map(|asp| asp.product).collect();
|
||||||
|
Ok(products)
|
||||||
|
}
|
||||||
|
pub fn get_auction_server_product_by_id(&self, product_id: &str) -> Result<AuctionServerProduct, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/order/server_market/product/{}", &self.config.api_url, product_id))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: AuctionServerProductWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.product)
|
||||||
|
}
|
||||||
|
pub fn get_auction_transactions(&self) -> Result<Vec<AuctionTransaction>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/order/server_market/transaction", &self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<AuctionTransactionWrapper> = self.handle_response(response)?;
|
||||||
|
let transactions = wrapped.into_iter().map(|t| t.transaction).collect();
|
||||||
|
Ok(transactions)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_auction_transaction_by_id(&self, transaction_id: &str) -> Result<AuctionTransaction, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/order/server_market/transaction/{}", &self.config.api_url, transaction_id))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: AuctionTransactionWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server_addon_products(
|
||||||
|
&self,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<Vec<ServerAddonProduct>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!(
|
||||||
|
"{}/order/server_addon/{}/product",
|
||||||
|
&self.config.api_url, server_number
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<ServerAddonProductWrapper> = self.handle_response(response)?;
|
||||||
|
let products = wrapped.into_iter().map(|sap| sap.product).collect();
|
||||||
|
Ok(products)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn order_auction_server(
|
||||||
|
&self,
|
||||||
|
product_id: i64,
|
||||||
|
authorized_keys: Vec<String>,
|
||||||
|
dist: Option<String>,
|
||||||
|
arch: Option<String>,
|
||||||
|
lang: Option<String>,
|
||||||
|
comment: Option<String>,
|
||||||
|
addons: Option<Vec<String>>,
|
||||||
|
test: Option<bool>,
|
||||||
|
) -> Result<AuctionTransaction, AppError> {
|
||||||
|
let mut params: Vec<(&str, String)> = Vec::new();
|
||||||
|
|
||||||
|
params.push(("product_id", product_id.to_string()));
|
||||||
|
|
||||||
|
for key in &authorized_keys {
|
||||||
|
params.push(("authorized_key[]", key.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(dist) = dist {
|
||||||
|
params.push(("dist", dist));
|
||||||
|
}
|
||||||
|
if let Some(arch) = arch {
|
||||||
|
params.push(("@deprecated arch", arch));
|
||||||
|
}
|
||||||
|
if let Some(lang) = lang {
|
||||||
|
params.push(("lang", lang));
|
||||||
|
}
|
||||||
|
if let Some(comment) = comment {
|
||||||
|
params.push(("comment", comment));
|
||||||
|
}
|
||||||
|
if let Some(addons) = addons {
|
||||||
|
for addon in addons {
|
||||||
|
params.push(("addon[]", addon));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(test) = test {
|
||||||
|
params.push(("test", test.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!("{}/order/server_market/transaction", &self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.form(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: AuctionTransactionWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server_addon_transactions(&self) -> Result<Vec<ServerAddonTransaction>, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!("{}/order/server_addon/transaction", &self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: Vec<ServerAddonTransactionWrapper> = self.handle_response(response)?;
|
||||||
|
let transactions = wrapped.into_iter().map(|satw| satw.transaction).collect();
|
||||||
|
Ok(transactions)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server_addon_transaction_by_id(
|
||||||
|
&self,
|
||||||
|
transaction_id: &str,
|
||||||
|
) -> Result<ServerAddonTransaction, AppError> {
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(format!(
|
||||||
|
"{}/order/server_addon/transaction/{}",
|
||||||
|
&self.config.api_url, transaction_id
|
||||||
|
))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: ServerAddonTransactionWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn order_server_addon(
|
||||||
|
&self,
|
||||||
|
order: OrderServerAddonBuilder,
|
||||||
|
) -> Result<ServerAddonTransaction, AppError> {
|
||||||
|
let mut params = json!({
|
||||||
|
"server_number": order.server_number,
|
||||||
|
"product_id": order.product_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(reason) = order.reason {
|
||||||
|
params["reason"] = json!(reason);
|
||||||
|
}
|
||||||
|
if let Some(gateway) = order.gateway {
|
||||||
|
params["gateway"] = json!(gateway);
|
||||||
|
}
|
||||||
|
if let Some(test) = order.test {
|
||||||
|
if test {
|
||||||
|
params["test"] = json!(test);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.post(format!("{}/order/server_addon/transaction", &self.config.api_url))
|
||||||
|
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||||
|
.form(¶ms)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let wrapped: ServerAddonTransactionWrapper = self.handle_response(response)?;
|
||||||
|
Ok(wrapped.transaction)
|
||||||
|
}
|
||||||
|
}
|
1894
packages/clients/hetznerclient/src/api/models.rs
Normal file
1894
packages/clients/hetznerclient/src/api/models.rs
Normal file
File diff suppressed because it is too large
Load Diff
25
packages/clients/hetznerclient/src/config.rs
Normal file
25
packages/clients/hetznerclient/src/config.rs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
use std::env;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Config {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub api_url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
pub fn from_env() -> Result<Self, String> {
|
||||||
|
let username = env::var("HETZNER_USERNAME")
|
||||||
|
.map_err(|_| "HETZNER_USERNAME environment variable not set".to_string())?;
|
||||||
|
let password = env::var("HETZNER_PASSWORD")
|
||||||
|
.map_err(|_| "HETZNER_PASSWORD environment variable not set".to_string())?;
|
||||||
|
let api_url = env::var("HETZNER_API_URL")
|
||||||
|
.unwrap_or_else(|_| "https://robot-ws.your-server.de".to_string());
|
||||||
|
|
||||||
|
Ok(Config {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
api_url,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
3
packages/clients/hetznerclient/src/lib.rs
Normal file
3
packages/clients/hetznerclient/src/lib.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
pub mod api;
|
||||||
|
pub mod config;
|
||||||
|
pub mod rhai;
|
63
packages/clients/hetznerclient/src/rhai/boot.rs
Normal file
63
packages/clients/hetznerclient/src/rhai/boot.rs
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
use crate::api::{
|
||||||
|
models::{Boot, Rescue},
|
||||||
|
Client,
|
||||||
|
};
|
||||||
|
use rhai::{plugin::*, Engine};
|
||||||
|
|
||||||
|
pub fn register(engine: &mut Engine) {
|
||||||
|
let boot_module = exported_module!(boot_api);
|
||||||
|
engine.register_global_module(boot_module.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[export_module]
|
||||||
|
pub mod boot_api {
|
||||||
|
use super::*;
|
||||||
|
use rhai::EvalAltResult;
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_boot_configuration", return_raw)]
|
||||||
|
pub fn get_boot_configuration(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<Boot, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.get_boot_configuration(server_number as i32)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_rescue_boot_configuration", return_raw)]
|
||||||
|
pub fn get_rescue_boot_configuration(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<Rescue, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.get_rescue_boot_configuration(server_number as i32)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "enable_rescue_mode", return_raw)]
|
||||||
|
pub fn enable_rescue_mode(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
os: &str,
|
||||||
|
authorized_keys: rhai::Array,
|
||||||
|
) -> Result<Rescue, Box<EvalAltResult>> {
|
||||||
|
let keys: Vec<String> = authorized_keys
|
||||||
|
.into_iter()
|
||||||
|
.map(|k| k.into_string().unwrap())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
client
|
||||||
|
.enable_rescue_mode(server_number as i32, os, Some(&keys))
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "disable_rescue_mode", return_raw)]
|
||||||
|
pub fn disable_rescue_mode(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<Rescue, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.disable_rescue_mode(server_number as i32)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
}
|
54
packages/clients/hetznerclient/src/rhai/mod.rs
Normal file
54
packages/clients/hetznerclient/src/rhai/mod.rs
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
use rhai::{Engine, EvalAltResult};
|
||||||
|
|
||||||
|
use crate::api::models::{
|
||||||
|
AuctionServerProduct, AuctionTransaction, AuctionTransactionProduct, AuthorizedKey, Boot,
|
||||||
|
Cancellation, Cpanel, HostKey, Linux, OrderAuctionServerBuilder, OrderServerAddonBuilder,
|
||||||
|
OrderServerBuilder, OrderServerProduct, Plesk, Rescue, Server, ServerAddonProduct,
|
||||||
|
ServerAddonResource, ServerAddonTransaction, SshKey, Transaction, TransactionProduct, Vnc,
|
||||||
|
Windows,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub mod boot;
|
||||||
|
pub mod printing;
|
||||||
|
pub mod server;
|
||||||
|
pub mod server_ordering;
|
||||||
|
pub mod ssh_keys;
|
||||||
|
|
||||||
|
// here just register the hetzner module
|
||||||
|
pub fn register_hetzner_module(engine: &mut Engine) -> Result<(), Box<EvalAltResult>> {
|
||||||
|
// TODO:register types
|
||||||
|
engine.build_type::<Server>();
|
||||||
|
engine.build_type::<SshKey>();
|
||||||
|
engine.build_type::<Boot>();
|
||||||
|
engine.build_type::<Rescue>();
|
||||||
|
engine.build_type::<Linux>();
|
||||||
|
engine.build_type::<Vnc>();
|
||||||
|
engine.build_type::<Windows>();
|
||||||
|
engine.build_type::<Plesk>();
|
||||||
|
engine.build_type::<Cpanel>();
|
||||||
|
engine.build_type::<Cancellation>();
|
||||||
|
engine.build_type::<OrderServerProduct>();
|
||||||
|
engine.build_type::<Transaction>();
|
||||||
|
engine.build_type::<AuthorizedKey>();
|
||||||
|
engine.build_type::<TransactionProduct>();
|
||||||
|
engine.build_type::<HostKey>();
|
||||||
|
engine.build_type::<AuctionServerProduct>();
|
||||||
|
engine.build_type::<AuctionTransaction>();
|
||||||
|
engine.build_type::<AuctionTransactionProduct>();
|
||||||
|
engine.build_type::<OrderAuctionServerBuilder>();
|
||||||
|
engine.build_type::<OrderServerBuilder>();
|
||||||
|
engine.build_type::<ServerAddonProduct>();
|
||||||
|
engine.build_type::<ServerAddonTransaction>();
|
||||||
|
engine.build_type::<ServerAddonResource>();
|
||||||
|
engine.build_type::<OrderServerAddonBuilder>();
|
||||||
|
|
||||||
|
server::register(engine);
|
||||||
|
ssh_keys::register(engine);
|
||||||
|
boot::register(engine);
|
||||||
|
server_ordering::register(engine);
|
||||||
|
|
||||||
|
// TODO: push hetzner to scope as value client:
|
||||||
|
// scope.push("hetzner", client);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
43
packages/clients/hetznerclient/src/rhai/printing/mod.rs
Normal file
43
packages/clients/hetznerclient/src/rhai/printing/mod.rs
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
use rhai::{Array, Engine};
|
||||||
|
use crate::{api::models::{OrderServerProduct, AuctionServerProduct, AuctionTransaction, ServerAddonProduct, ServerAddonTransaction, Server, SshKey}};
|
||||||
|
|
||||||
|
mod servers_table;
|
||||||
|
mod ssh_keys_table;
|
||||||
|
mod server_ordering_table;
|
||||||
|
|
||||||
|
// This will be called when we print(...) or pretty_print() an Array (with Dynamic values)
|
||||||
|
pub fn pretty_print_dispatch(array: Array) {
|
||||||
|
if array.is_empty() {
|
||||||
|
println!("<empty table>");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let first = &array[0];
|
||||||
|
|
||||||
|
if first.is::<Server>() {
|
||||||
|
println!("Yeah first is server!");
|
||||||
|
servers_table::pretty_print_servers(array);
|
||||||
|
} else if first.is::<SshKey>() {
|
||||||
|
ssh_keys_table::pretty_print_ssh_keys(array);
|
||||||
|
}
|
||||||
|
else if first.is::<OrderServerProduct>() {
|
||||||
|
server_ordering_table::pretty_print_server_products(array);
|
||||||
|
} else if first.is::<AuctionServerProduct>() {
|
||||||
|
server_ordering_table::pretty_print_auction_server_products(array);
|
||||||
|
} else if first.is::<AuctionTransaction>() {
|
||||||
|
server_ordering_table::pretty_print_auction_transactions(array);
|
||||||
|
} else if first.is::<ServerAddonProduct>() {
|
||||||
|
server_ordering_table::pretty_print_server_addon_products(array);
|
||||||
|
} else if first.is::<ServerAddonTransaction>() {
|
||||||
|
server_ordering_table::pretty_print_server_addon_transactions(array);
|
||||||
|
} else {
|
||||||
|
// Generic fallback for other types
|
||||||
|
for item in array {
|
||||||
|
println!("{}", item.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register(engine: &mut Engine) {
|
||||||
|
engine.register_fn("pretty_print", pretty_print_dispatch);
|
||||||
|
}
|
@@ -0,0 +1,293 @@
|
|||||||
|
use prettytable::{row, Table};
|
||||||
|
use crate::api::models::{OrderServerProduct, ServerAddonProduct, ServerAddonTransaction, ServerAddonResource};
|
||||||
|
|
||||||
|
pub fn pretty_print_server_products(products: rhai::Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"ID",
|
||||||
|
"Name",
|
||||||
|
"Description",
|
||||||
|
"Traffic",
|
||||||
|
"Location",
|
||||||
|
"Price (Net)",
|
||||||
|
"Price (Gross)",
|
||||||
|
]);
|
||||||
|
|
||||||
|
for product_dyn in products {
|
||||||
|
if let Some(product) = product_dyn.try_cast::<OrderServerProduct>() {
|
||||||
|
let mut price_net = "N/A".to_string();
|
||||||
|
let mut price_gross = "N/A".to_string();
|
||||||
|
|
||||||
|
if let Some(first_price) = product.prices.first() {
|
||||||
|
price_net = first_price.price.net.clone();
|
||||||
|
price_gross = first_price.price.gross.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
table.add_row(row![
|
||||||
|
product.id,
|
||||||
|
product.name,
|
||||||
|
product.description.join(", "),
|
||||||
|
product.traffic,
|
||||||
|
product.location.join(", "),
|
||||||
|
price_net,
|
||||||
|
price_gross,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pretty_print_auction_server_products(products: rhai::Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"ID",
|
||||||
|
"Name",
|
||||||
|
"Description",
|
||||||
|
"Traffic",
|
||||||
|
"Distributions",
|
||||||
|
"Architectures",
|
||||||
|
"Languages",
|
||||||
|
"CPU",
|
||||||
|
"CPU Benchmark",
|
||||||
|
"Memory Size (GB)",
|
||||||
|
"HDD Size (GB)",
|
||||||
|
"HDD Text",
|
||||||
|
"HDD Count",
|
||||||
|
"Datacenter",
|
||||||
|
"Network Speed",
|
||||||
|
"Price (Net)",
|
||||||
|
"Price (Hourly Net)",
|
||||||
|
"Price (Setup Net)",
|
||||||
|
"Price (VAT)",
|
||||||
|
"Price (Hourly VAT)",
|
||||||
|
"Price (Setup VAT)",
|
||||||
|
"Fixed Price",
|
||||||
|
"Next Reduce (seconds)",
|
||||||
|
"Next Reduce Date",
|
||||||
|
"Orderable Addons",
|
||||||
|
]);
|
||||||
|
|
||||||
|
for product_dyn in products {
|
||||||
|
if let Some(product) = product_dyn.try_cast::<crate::api::models::AuctionServerProduct>() {
|
||||||
|
let mut addons_table = Table::new();
|
||||||
|
addons_table.add_row(row![b => "ID", "Name", "Min", "Max", "Prices"]);
|
||||||
|
for addon in &product.orderable_addons {
|
||||||
|
let mut addon_prices_table = Table::new();
|
||||||
|
addon_prices_table.add_row(row![b => "Location", "Net", "Gross", "Hourly Net", "Hourly Gross", "Setup Net", "Setup Gross"]);
|
||||||
|
for price in &addon.prices {
|
||||||
|
addon_prices_table.add_row(row![
|
||||||
|
price.location,
|
||||||
|
price.price.net,
|
||||||
|
price.price.gross,
|
||||||
|
price.price.hourly_net,
|
||||||
|
price.price.hourly_gross,
|
||||||
|
price.price_setup.net,
|
||||||
|
price.price_setup.gross
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
addons_table.add_row(row![
|
||||||
|
addon.id,
|
||||||
|
addon.name,
|
||||||
|
addon.min,
|
||||||
|
addon.max,
|
||||||
|
addon_prices_table
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
table.add_row(row![
|
||||||
|
product.id,
|
||||||
|
product.name,
|
||||||
|
product.description.join(", "),
|
||||||
|
product.traffic,
|
||||||
|
product.dist.join(", "),
|
||||||
|
product.arch.as_deref().unwrap_or_default().join(", "),
|
||||||
|
product.lang.join(", "),
|
||||||
|
product.cpu,
|
||||||
|
product.cpu_benchmark,
|
||||||
|
product.memory_size,
|
||||||
|
product.hdd_size,
|
||||||
|
product.hdd_text,
|
||||||
|
product.hdd_count,
|
||||||
|
product.datacenter,
|
||||||
|
product.network_speed,
|
||||||
|
product.price,
|
||||||
|
product.price_hourly.as_deref().unwrap_or("N/A"),
|
||||||
|
product.price_setup,
|
||||||
|
product.price_with_vat,
|
||||||
|
product.price_hourly_with_vat.as_deref().unwrap_or("N/A"),
|
||||||
|
product.price_setup_with_vat,
|
||||||
|
product.fixed_price,
|
||||||
|
product.next_reduce,
|
||||||
|
product.next_reduce_date,
|
||||||
|
addons_table,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pretty_print_server_addon_products(products: rhai::Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"ID",
|
||||||
|
"Name",
|
||||||
|
"Type",
|
||||||
|
"Location",
|
||||||
|
"Price (Net)",
|
||||||
|
"Price (Gross)",
|
||||||
|
"Hourly Net",
|
||||||
|
"Hourly Gross",
|
||||||
|
"Setup Net",
|
||||||
|
"Setup Gross",
|
||||||
|
]);
|
||||||
|
|
||||||
|
for product_dyn in products {
|
||||||
|
if let Some(product) = product_dyn.try_cast::<ServerAddonProduct>() {
|
||||||
|
table.add_row(row![
|
||||||
|
product.id,
|
||||||
|
product.name,
|
||||||
|
product.product_type,
|
||||||
|
product.price.location,
|
||||||
|
product.price.price.net,
|
||||||
|
product.price.price.gross,
|
||||||
|
product.price.price.hourly_net,
|
||||||
|
product.price.price.hourly_gross,
|
||||||
|
product.price.price_setup.net,
|
||||||
|
product.price.price_setup.gross,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pretty_print_auction_transactions(transactions: rhai::Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"ID",
|
||||||
|
"Date",
|
||||||
|
"Status",
|
||||||
|
"Server Number",
|
||||||
|
"Server IP",
|
||||||
|
"Comment",
|
||||||
|
"Product ID",
|
||||||
|
"Product Name",
|
||||||
|
"Product Traffic",
|
||||||
|
"Product Distributions",
|
||||||
|
"Product Architectures",
|
||||||
|
"Product Languages",
|
||||||
|
"Product CPU",
|
||||||
|
"Product CPU Benchmark",
|
||||||
|
"Product Memory Size (GB)",
|
||||||
|
"Product HDD Size (GB)",
|
||||||
|
"Product HDD Text",
|
||||||
|
"Product HDD Count",
|
||||||
|
"Product Datacenter",
|
||||||
|
"Product Network Speed",
|
||||||
|
"Product Fixed Price",
|
||||||
|
"Product Next Reduce (seconds)",
|
||||||
|
"Product Next Reduce Date",
|
||||||
|
"Addons",
|
||||||
|
]);
|
||||||
|
|
||||||
|
for transaction_dyn in transactions {
|
||||||
|
if let Some(transaction) = transaction_dyn.try_cast::<crate::api::models::AuctionTransaction>() {
|
||||||
|
let _authorized_keys_table = {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b => "Name", "Fingerprint", "Type", "Size"]);
|
||||||
|
for key in &transaction.authorized_key {
|
||||||
|
table.add_row(row![
|
||||||
|
key.key.name.as_deref().unwrap_or("N/A"),
|
||||||
|
key.key.fingerprint.as_deref().unwrap_or("N/A"),
|
||||||
|
key.key.key_type.as_deref().unwrap_or("N/A"),
|
||||||
|
key.key.size.map_or("N/A".to_string(), |s| s.to_string())
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
table
|
||||||
|
};
|
||||||
|
|
||||||
|
let _host_keys_table = {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b => "Fingerprint", "Type", "Size"]);
|
||||||
|
for key in &transaction.host_key {
|
||||||
|
table.add_row(row![
|
||||||
|
key.key.fingerprint.as_deref().unwrap_or("N/A"),
|
||||||
|
key.key.key_type.as_deref().unwrap_or("N/A"),
|
||||||
|
key.key.size.map_or("N/A".to_string(), |s| s.to_string())
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
table
|
||||||
|
};
|
||||||
|
|
||||||
|
table.add_row(row![
|
||||||
|
transaction.id,
|
||||||
|
transaction.date,
|
||||||
|
transaction.status,
|
||||||
|
transaction.server_number.map_or("N/A".to_string(), |id| id.to_string()),
|
||||||
|
transaction.server_ip.as_deref().unwrap_or("N/A"),
|
||||||
|
transaction.comment.as_deref().unwrap_or("N/A"),
|
||||||
|
transaction.product.id,
|
||||||
|
transaction.product.name,
|
||||||
|
transaction.product.traffic,
|
||||||
|
transaction.product.dist,
|
||||||
|
transaction.product.arch.as_deref().unwrap_or("N/A"),
|
||||||
|
transaction.product.lang,
|
||||||
|
transaction.product.cpu,
|
||||||
|
transaction.product.cpu_benchmark,
|
||||||
|
transaction.product.memory_size,
|
||||||
|
transaction.product.hdd_size,
|
||||||
|
transaction.product.hdd_text,
|
||||||
|
transaction.product.hdd_count,
|
||||||
|
transaction.product.datacenter,
|
||||||
|
transaction.product.network_speed,
|
||||||
|
transaction.product.fixed_price.unwrap_or_default().to_string(),
|
||||||
|
transaction
|
||||||
|
.product
|
||||||
|
.next_reduce
|
||||||
|
.map_or("N/A".to_string(), |r| r.to_string()),
|
||||||
|
transaction
|
||||||
|
.product
|
||||||
|
.next_reduce_date
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or("N/A"),
|
||||||
|
transaction.addons.join(", "),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pretty_print_server_addon_transactions(transactions: rhai::Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"ID",
|
||||||
|
"Date",
|
||||||
|
"Status",
|
||||||
|
"Server Number",
|
||||||
|
"Product ID",
|
||||||
|
"Product Name",
|
||||||
|
"Product Price",
|
||||||
|
"Resources",
|
||||||
|
]);
|
||||||
|
|
||||||
|
for transaction_dyn in transactions {
|
||||||
|
if let Some(transaction) = transaction_dyn.try_cast::<ServerAddonTransaction>() {
|
||||||
|
let mut resources_table = Table::new();
|
||||||
|
resources_table.add_row(row![b => "Type", "ID"]);
|
||||||
|
for resource in &transaction.resources {
|
||||||
|
resources_table.add_row(row![resource.resource_type, resource.id]);
|
||||||
|
}
|
||||||
|
|
||||||
|
table.add_row(row![
|
||||||
|
transaction.id,
|
||||||
|
transaction.date,
|
||||||
|
transaction.status,
|
||||||
|
transaction.server_number,
|
||||||
|
transaction.product.id,
|
||||||
|
transaction.product.name,
|
||||||
|
transaction.product.price.to_string(),
|
||||||
|
resources_table,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
@@ -0,0 +1,30 @@
|
|||||||
|
use prettytable::{row, Table};
|
||||||
|
use rhai::Array;
|
||||||
|
|
||||||
|
use super::Server;
|
||||||
|
|
||||||
|
pub fn pretty_print_servers(servers: Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"Number",
|
||||||
|
"Name",
|
||||||
|
"IP",
|
||||||
|
"Product",
|
||||||
|
"DC",
|
||||||
|
"Status"
|
||||||
|
]);
|
||||||
|
|
||||||
|
for server_dyn in servers {
|
||||||
|
if let Some(server) = server_dyn.try_cast::<Server>() {
|
||||||
|
table.add_row(row![
|
||||||
|
server.server_number.to_string(),
|
||||||
|
server.server_name,
|
||||||
|
server.server_ip.unwrap_or("N/A".to_string()),
|
||||||
|
server.product,
|
||||||
|
server.dc,
|
||||||
|
server.status
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
@@ -0,0 +1,26 @@
|
|||||||
|
use prettytable::{row, Table};
|
||||||
|
use super::SshKey;
|
||||||
|
|
||||||
|
pub fn pretty_print_ssh_keys(keys: rhai::Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"Name",
|
||||||
|
"Fingerprint",
|
||||||
|
"Type",
|
||||||
|
"Size",
|
||||||
|
"Created At"
|
||||||
|
]);
|
||||||
|
|
||||||
|
for key_dyn in keys {
|
||||||
|
if let Some(key) = key_dyn.try_cast::<SshKey>() {
|
||||||
|
table.add_row(row![
|
||||||
|
key.name,
|
||||||
|
key.fingerprint,
|
||||||
|
key.key_type,
|
||||||
|
key.size.to_string(),
|
||||||
|
key.created_at
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
76
packages/clients/hetznerclient/src/rhai/server.rs
Normal file
76
packages/clients/hetznerclient/src/rhai/server.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
use crate::api::{Client, models::Server};
|
||||||
|
use rhai::{Array, Dynamic, plugin::*};
|
||||||
|
|
||||||
|
pub fn register(engine: &mut Engine) {
|
||||||
|
let server_module = exported_module!(server_api);
|
||||||
|
engine.register_global_module(server_module.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[export_module]
|
||||||
|
pub mod server_api {
|
||||||
|
use crate::api::models::Cancellation;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use rhai::EvalAltResult;
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_server", return_raw)]
|
||||||
|
pub fn get_server(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<Server, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.get_server(server_number as i32)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_servers", return_raw)]
|
||||||
|
pub fn get_servers(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let servers = client
|
||||||
|
.get_servers()
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
println!("number of SERVERS we got: {:#?}", servers.len());
|
||||||
|
Ok(servers.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "update_server_name", return_raw)]
|
||||||
|
pub fn update_server_name(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
name: &str,
|
||||||
|
) -> Result<Server, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.update_server_name(server_number as i32, name)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_cancellation_data", return_raw)]
|
||||||
|
pub fn get_cancellation_data(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<Cancellation, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.get_cancellation_data(server_number as i32)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "cancel_server", return_raw)]
|
||||||
|
pub fn cancel_server(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
cancellation_date: &str,
|
||||||
|
) -> Result<Cancellation, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.cancel_server(server_number as i32, cancellation_date)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "withdraw_cancellation", return_raw)]
|
||||||
|
pub fn withdraw_cancellation(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<(), Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.withdraw_cancellation(server_number as i32)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
}
|
170
packages/clients/hetznerclient/src/rhai/server_ordering.rs
Normal file
170
packages/clients/hetznerclient/src/rhai/server_ordering.rs
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
use crate::api::{
|
||||||
|
Client,
|
||||||
|
models::{
|
||||||
|
AuctionServerProduct, AuctionTransaction, OrderAuctionServerBuilder, OrderServerBuilder,
|
||||||
|
OrderServerProduct, ServerAddonProduct, ServerAddonTransaction, Transaction,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use rhai::{Array, Dynamic, plugin::*};
|
||||||
|
|
||||||
|
pub fn register(engine: &mut Engine) {
|
||||||
|
let server_order_module = exported_module!(server_order_api);
|
||||||
|
engine.register_global_module(server_order_module.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[export_module]
|
||||||
|
pub mod server_order_api {
|
||||||
|
use crate::api::models::OrderServerAddonBuilder;
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_server_products", return_raw)]
|
||||||
|
pub fn get_server_ordering_product_overview(
|
||||||
|
client: &mut Client,
|
||||||
|
) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let overview_servers = client
|
||||||
|
.get_server_products()
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(overview_servers.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_server_product_by_id", return_raw)]
|
||||||
|
pub fn get_server_ordering_product_by_id(
|
||||||
|
client: &mut Client,
|
||||||
|
product_id: &str,
|
||||||
|
) -> Result<OrderServerProduct, Box<EvalAltResult>> {
|
||||||
|
let product = client
|
||||||
|
.get_server_product_by_id(product_id)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(product)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "order_server", return_raw)]
|
||||||
|
pub fn order_server(
|
||||||
|
client: &mut Client,
|
||||||
|
order: OrderServerBuilder,
|
||||||
|
) -> Result<Transaction, Box<EvalAltResult>> {
|
||||||
|
let transaction = client
|
||||||
|
.order_server(order)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_transaction_by_id", return_raw)]
|
||||||
|
pub fn get_transaction_by_id(
|
||||||
|
client: &mut Client,
|
||||||
|
transaction_id: &str,
|
||||||
|
) -> Result<Transaction, Box<EvalAltResult>> {
|
||||||
|
let transaction = client
|
||||||
|
.get_transaction_by_id(transaction_id)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_transactions", return_raw)]
|
||||||
|
pub fn get_transactions(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let transactions = client
|
||||||
|
.get_transactions()
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transactions.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_auction_server_products", return_raw)]
|
||||||
|
pub fn get_auction_server_products(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let products = client
|
||||||
|
.get_auction_server_products()
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(products.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_auction_server_product_by_id", return_raw)]
|
||||||
|
pub fn get_auction_server_product_by_id(
|
||||||
|
client: &mut Client,
|
||||||
|
product_id: &str,
|
||||||
|
) -> Result<AuctionServerProduct, Box<EvalAltResult>> {
|
||||||
|
let product = client
|
||||||
|
.get_auction_server_product_by_id(product_id)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(product)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_auction_transactions", return_raw)]
|
||||||
|
pub fn get_auction_transactions(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let transactions = client
|
||||||
|
.get_auction_transactions()
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transactions.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_auction_transaction_by_id", return_raw)]
|
||||||
|
pub fn get_auction_transaction_by_id(
|
||||||
|
client: &mut Client,
|
||||||
|
transaction_id: &str,
|
||||||
|
) -> Result<AuctionTransaction, Box<EvalAltResult>> {
|
||||||
|
let transaction = client
|
||||||
|
.get_auction_transaction_by_id(transaction_id)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_server_addon_products", return_raw)]
|
||||||
|
pub fn get_server_addon_products(
|
||||||
|
client: &mut Client,
|
||||||
|
server_number: i64,
|
||||||
|
) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let products = client
|
||||||
|
.get_server_addon_products(server_number)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(products.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_server_addon_transactions", return_raw)]
|
||||||
|
pub fn get_server_addon_transactions(
|
||||||
|
client: &mut Client,
|
||||||
|
) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let transactions = client
|
||||||
|
.get_server_addon_transactions()
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transactions.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_server_addon_transaction_by_id", return_raw)]
|
||||||
|
pub fn get_server_addon_transaction_by_id(
|
||||||
|
client: &mut Client,
|
||||||
|
transaction_id: &str,
|
||||||
|
) -> Result<ServerAddonTransaction, Box<EvalAltResult>> {
|
||||||
|
let transaction = client
|
||||||
|
.get_server_addon_transaction_by_id(transaction_id)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "order_auction_server", return_raw)]
|
||||||
|
pub fn order_auction_server(
|
||||||
|
client: &mut Client,
|
||||||
|
order: OrderAuctionServerBuilder,
|
||||||
|
) -> Result<AuctionTransaction, Box<EvalAltResult>> {
|
||||||
|
println!("Builder struct being used to order server: {:#?}", order);
|
||||||
|
let transaction = client.order_auction_server(
|
||||||
|
order.product_id,
|
||||||
|
order.authorized_keys.unwrap_or(vec![]),
|
||||||
|
order.dist,
|
||||||
|
None,
|
||||||
|
order.lang,
|
||||||
|
order.comment,
|
||||||
|
order.addon,
|
||||||
|
order.test,
|
||||||
|
).map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "order_server_addon", return_raw)]
|
||||||
|
pub fn order_server_addon(
|
||||||
|
client: &mut Client,
|
||||||
|
order: OrderServerAddonBuilder,
|
||||||
|
) -> Result<ServerAddonTransaction, Box<EvalAltResult>> {
|
||||||
|
println!("Builder struct being used to order server addon: {:#?}", order);
|
||||||
|
let transaction = client
|
||||||
|
.order_server_addon(order)
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(transaction)
|
||||||
|
}
|
||||||
|
}
|
89
packages/clients/hetznerclient/src/rhai/ssh_keys.rs
Normal file
89
packages/clients/hetznerclient/src/rhai/ssh_keys.rs
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
use crate::api::{Client, models::SshKey};
|
||||||
|
use prettytable::{Table, row};
|
||||||
|
use rhai::{Array, Dynamic, Engine, plugin::*};
|
||||||
|
|
||||||
|
pub fn register(engine: &mut Engine) {
|
||||||
|
let ssh_keys_module = exported_module!(ssh_keys_api);
|
||||||
|
engine.register_global_module(ssh_keys_module.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[export_module]
|
||||||
|
pub mod ssh_keys_api {
|
||||||
|
use super::*;
|
||||||
|
use rhai::EvalAltResult;
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_ssh_keys", return_raw)]
|
||||||
|
pub fn get_ssh_keys(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||||
|
let ssh_keys = client
|
||||||
|
.get_ssh_keys()
|
||||||
|
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||||
|
Ok(ssh_keys.into_iter().map(Dynamic::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "get_ssh_key", return_raw)]
|
||||||
|
pub fn get_ssh_key(
|
||||||
|
client: &mut Client,
|
||||||
|
fingerprint: &str,
|
||||||
|
) -> Result<SshKey, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.get_ssh_key(fingerprint)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "add_ssh_key", return_raw)]
|
||||||
|
pub fn add_ssh_key(
|
||||||
|
client: &mut Client,
|
||||||
|
name: &str,
|
||||||
|
data: &str,
|
||||||
|
) -> Result<SshKey, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.add_ssh_key(name, data)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "update_ssh_key_name", return_raw)]
|
||||||
|
pub fn update_ssh_key_name(
|
||||||
|
client: &mut Client,
|
||||||
|
fingerprint: &str,
|
||||||
|
name: &str,
|
||||||
|
) -> Result<SshKey, Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.update_ssh_key_name(fingerprint, name)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "delete_ssh_key", return_raw)]
|
||||||
|
pub fn delete_ssh_key(
|
||||||
|
client: &mut Client,
|
||||||
|
fingerprint: &str,
|
||||||
|
) -> Result<(), Box<EvalAltResult>> {
|
||||||
|
client
|
||||||
|
.delete_ssh_key(fingerprint)
|
||||||
|
.map_err(|e| e.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rhai_fn(name = "pretty_print")]
|
||||||
|
pub fn pretty_print_ssh_keys(keys: Array) {
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.add_row(row![b =>
|
||||||
|
"Name",
|
||||||
|
"Fingerprint",
|
||||||
|
"Type",
|
||||||
|
"Size",
|
||||||
|
"Created At"
|
||||||
|
]);
|
||||||
|
|
||||||
|
for key_dyn in keys {
|
||||||
|
if let Some(key) = key_dyn.try_cast::<SshKey>() {
|
||||||
|
table.add_row(row![
|
||||||
|
key.name,
|
||||||
|
key.fingerprint,
|
||||||
|
key.key_type,
|
||||||
|
key.size.to_string(),
|
||||||
|
key.created_at
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.printstd();
|
||||||
|
}
|
||||||
|
}
|
@@ -9,22 +9,22 @@ license = "Apache-2.0"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# HTTP client for async requests
|
# HTTP client for async requests
|
||||||
reqwest = { version = "0.12.15", features = ["json"] }
|
reqwest = { workspace = true }
|
||||||
# JSON handling
|
# JSON handling
|
||||||
serde_json = "1.0"
|
serde_json = { workspace = true }
|
||||||
# Base64 encoding/decoding for message payloads
|
# Base64 encoding/decoding for message payloads
|
||||||
base64 = "0.22.1"
|
base64 = { workspace = true }
|
||||||
# Async runtime
|
# Async runtime
|
||||||
tokio = { version = "1.45.0", features = ["full"] }
|
tokio = { workspace = true }
|
||||||
# Rhai scripting support
|
# Rhai scripting support
|
||||||
rhai = { version = "1.12.0", features = ["sync"] }
|
rhai = { workspace = true }
|
||||||
# Logging
|
# Logging
|
||||||
log = "0.4"
|
log = { workspace = true }
|
||||||
# URL encoding for API parameters
|
# URL encoding for API parameters
|
||||||
urlencoding = "2.1.3"
|
urlencoding = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
# For async testing
|
# For async testing
|
||||||
tokio-test = "0.4.4"
|
tokio-test = { workspace = true }
|
||||||
# For temporary files in tests
|
# For temporary files in tests
|
||||||
tempfile = "3.5"
|
tempfile = { workspace = true }
|
@@ -11,24 +11,24 @@ categories = ["database", "api-bindings"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# PostgreSQL client dependencies
|
# PostgreSQL client dependencies
|
||||||
postgres = "0.19.4"
|
postgres = { workspace = true }
|
||||||
postgres-types = "0.2.5"
|
postgres-types = { workspace = true }
|
||||||
tokio-postgres = "0.7.8"
|
tokio-postgres = { workspace = true }
|
||||||
|
|
||||||
# Connection pooling
|
# Connection pooling
|
||||||
r2d2 = "0.8.10"
|
r2d2 = { workspace = true }
|
||||||
r2d2_postgres = "0.18.2"
|
r2d2_postgres = { workspace = true }
|
||||||
|
|
||||||
# Utility dependencies
|
# Utility dependencies
|
||||||
lazy_static = "1.4.0"
|
lazy_static = { workspace = true }
|
||||||
thiserror = "2.0.12"
|
thiserror = { workspace = true }
|
||||||
|
|
||||||
# Rhai scripting support
|
# Rhai scripting support
|
||||||
rhai = { version = "1.12.0", features = ["sync"] }
|
rhai = { workspace = true }
|
||||||
|
|
||||||
# SAL dependencies
|
# SAL dependencies
|
||||||
sal-virt = { path = "../virt" }
|
sal-virt = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3.5"
|
tempfile = { workspace = true }
|
||||||
tokio-test = "0.4.4"
|
tokio-test = { workspace = true }
|
@@ -11,11 +11,11 @@ categories = ["database", "caching", "api-bindings"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Core Redis functionality
|
# Core Redis functionality
|
||||||
redis = "0.31.0"
|
redis = { workspace = true }
|
||||||
lazy_static = "1.4.0"
|
lazy_static = { workspace = true }
|
||||||
|
|
||||||
# Rhai integration (optional)
|
# Rhai integration (optional)
|
||||||
rhai = { version = "1.12.0", features = ["sync"], optional = true }
|
rhai = { workspace = true, optional = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["rhai"]
|
default = ["rhai"]
|
||||||
@@ -23,4 +23,4 @@ rhai = ["dep:rhai"]
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
# For testing
|
# For testing
|
||||||
tempfile = "3.5"
|
tempfile = { workspace = true }
|
@@ -9,20 +9,20 @@ license = "Apache-2.0"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Core dependencies
|
# Core dependencies
|
||||||
anyhow = "1.0.98"
|
anyhow = { workspace = true }
|
||||||
futures = "0.3.30"
|
futures = { workspace = true }
|
||||||
lazy_static = "1.4.0"
|
lazy_static = { workspace = true }
|
||||||
log = "0.4"
|
log = { workspace = true }
|
||||||
serde_json = "1.0"
|
serde_json = { workspace = true }
|
||||||
thiserror = "2.0.12"
|
thiserror = { workspace = true }
|
||||||
tokio = { version = "1.45.0", features = ["full"] }
|
tokio = { workspace = true }
|
||||||
|
|
||||||
# Zinit client
|
# Zinit client
|
||||||
zinit-client = "0.4.0"
|
zinit-client = { workspace = true }
|
||||||
|
|
||||||
# Rhai integration
|
# Rhai integration
|
||||||
rhai = { version = "1.12.0", features = ["sync"] }
|
rhai = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio-test = "0.4.4"
|
tokio-test = { workspace = true }
|
||||||
tempfile = "3.5"
|
tempfile = { workspace = true }
|
825
packages/core/logger/instructions.md
Normal file
825
packages/core/logger/instructions.md
Normal file
@@ -0,0 +1,825 @@
|
|||||||
|
<file_map>
|
||||||
|
/Users/despiegk/code/github/freeflowuniverse/herolib
|
||||||
|
├── aiprompts
|
||||||
|
│ └── herolib_core
|
||||||
|
│ ├── core_ourtime.md
|
||||||
|
│ ├── core_paths.md
|
||||||
|
│ └── core_text.md
|
||||||
|
└── lib
|
||||||
|
└── core
|
||||||
|
└── logger
|
||||||
|
├── factory.v
|
||||||
|
├── log_test.v
|
||||||
|
├── log.v
|
||||||
|
├── model.v
|
||||||
|
├── readme.md
|
||||||
|
└── search.v
|
||||||
|
|
||||||
|
</file_map>
|
||||||
|
|
||||||
|
<file_contents>
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/factory.v
|
||||||
|
```v
|
||||||
|
module logger
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
|
||||||
|
pub fn new(path string) !Logger {
|
||||||
|
mut p := pathlib.get_dir(path: path, create: true)!
|
||||||
|
return Logger{
|
||||||
|
path: p
|
||||||
|
lastlog_time: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/log_test.v
|
||||||
|
```v
|
||||||
|
module logger
|
||||||
|
|
||||||
|
import os
|
||||||
|
import freeflowuniverse.herolib.data.ourtime
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
|
||||||
|
fn testsuite_begin() {
|
||||||
|
if os.exists('/tmp/testlogs') {
|
||||||
|
os.rmdir_all('/tmp/testlogs')!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_logger() {
|
||||||
|
mut logger := new('/tmp/testlogs')!
|
||||||
|
|
||||||
|
// Test stdout logging
|
||||||
|
logger.log(LogItemArgs{
|
||||||
|
cat: 'test-app'
|
||||||
|
log: 'This is a test message\nWith a second line\nAnd a third line'
|
||||||
|
logtype: .stdout
|
||||||
|
timestamp: ourtime.new('2022-12-05 20:14:35')!
|
||||||
|
})!
|
||||||
|
|
||||||
|
// Test error logging
|
||||||
|
logger.log(LogItemArgs{
|
||||||
|
cat: 'error-test'
|
||||||
|
log: 'This is an error\nWith details'
|
||||||
|
logtype: .error
|
||||||
|
timestamp: ourtime.new('2022-12-05 20:14:35')!
|
||||||
|
})!
|
||||||
|
|
||||||
|
logger.log(LogItemArgs{
|
||||||
|
cat: 'test-app'
|
||||||
|
log: 'This is a test message\nWith a second line\nAnd a third line'
|
||||||
|
logtype: .stdout
|
||||||
|
timestamp: ourtime.new('2022-12-05 20:14:36')!
|
||||||
|
})!
|
||||||
|
|
||||||
|
logger.log(LogItemArgs{
|
||||||
|
cat: 'error-test'
|
||||||
|
log: '
|
||||||
|
This is an error
|
||||||
|
|
||||||
|
With details
|
||||||
|
'
|
||||||
|
logtype: .error
|
||||||
|
timestamp: ourtime.new('2022-12-05 20:14:36')!
|
||||||
|
})!
|
||||||
|
|
||||||
|
logger.log(LogItemArgs{
|
||||||
|
cat: 'error-test'
|
||||||
|
log: '
|
||||||
|
aaa
|
||||||
|
|
||||||
|
bbb
|
||||||
|
'
|
||||||
|
logtype: .error
|
||||||
|
timestamp: ourtime.new('2022-12-05 22:14:36')!
|
||||||
|
})!
|
||||||
|
|
||||||
|
logger.log(LogItemArgs{
|
||||||
|
cat: 'error-test'
|
||||||
|
log: '
|
||||||
|
aaa2
|
||||||
|
|
||||||
|
bbb2
|
||||||
|
'
|
||||||
|
logtype: .error
|
||||||
|
timestamp: ourtime.new('2022-12-05 22:14:36')!
|
||||||
|
})!
|
||||||
|
|
||||||
|
// Verify log directory exists
|
||||||
|
assert os.exists('/tmp/testlogs'), 'Log directory should exist'
|
||||||
|
|
||||||
|
// Get log file
|
||||||
|
files := os.ls('/tmp/testlogs')!
|
||||||
|
assert files.len == 2
|
||||||
|
|
||||||
|
mut file := pathlib.get_file(
|
||||||
|
path: '/tmp/testlogs/${files[0]}'
|
||||||
|
create: false
|
||||||
|
)!
|
||||||
|
|
||||||
|
content := file.read()!.trim_space()
|
||||||
|
|
||||||
|
items_stdout := logger.search(
|
||||||
|
timestamp_from: ourtime.new('2022-11-1 20:14:35')!
|
||||||
|
timestamp_to: ourtime.new('2025-11-1 20:14:35')!
|
||||||
|
logtype: .stdout
|
||||||
|
)!
|
||||||
|
assert items_stdout.len == 2
|
||||||
|
|
||||||
|
items_error := logger.search(
|
||||||
|
timestamp_from: ourtime.new('2022-11-1 20:14:35')!
|
||||||
|
timestamp_to: ourtime.new('2025-11-1 20:14:35')!
|
||||||
|
logtype: .error
|
||||||
|
)!
|
||||||
|
assert items_error.len == 4
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testsuite_end() {
|
||||||
|
// if os.exists('/tmp/testlogs') {
|
||||||
|
// os.rmdir_all('/tmp/testlogs')!
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/log.v
|
||||||
|
```v
|
||||||
|
module logger
|
||||||
|
|
||||||
|
import os
|
||||||
|
import freeflowuniverse.herolib.core.texttools
|
||||||
|
import freeflowuniverse.herolib.data.ourtime
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct LogItemArgs {
|
||||||
|
pub mut:
|
||||||
|
timestamp ?ourtime.OurTime
|
||||||
|
cat string
|
||||||
|
log string
|
||||||
|
logtype LogType
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut l Logger) log(args_ LogItemArgs) ! {
|
||||||
|
mut args := args_
|
||||||
|
|
||||||
|
t := args.timestamp or {
|
||||||
|
t2 := ourtime.now()
|
||||||
|
t2
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format category (max 10 chars, ascii only)
|
||||||
|
args.cat = texttools.name_fix(args.cat)
|
||||||
|
if args.cat.len > 10 {
|
||||||
|
return error('category cannot be longer than 10 chars')
|
||||||
|
}
|
||||||
|
args.cat = texttools.expand(args.cat, 10, ' ')
|
||||||
|
|
||||||
|
args.log = texttools.dedent(args.log).trim_space()
|
||||||
|
|
||||||
|
mut logfile_path := '${l.path.path}/${t.dayhour()}.log'
|
||||||
|
|
||||||
|
// Create log file if it doesn't exist
|
||||||
|
if !os.exists(logfile_path) {
|
||||||
|
os.write_file(logfile_path, '')!
|
||||||
|
l.lastlog_time = 0 // make sure we put time again
|
||||||
|
}
|
||||||
|
|
||||||
|
mut f := os.open_append(logfile_path)!
|
||||||
|
|
||||||
|
mut content := ''
|
||||||
|
|
||||||
|
// Add timestamp if we're in a new second
|
||||||
|
if t.unix() > l.lastlog_time {
|
||||||
|
content += '\n${t.time().format_ss()}\n'
|
||||||
|
l.lastlog_time = t.unix()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format log lines
|
||||||
|
error_prefix := if args.logtype == .error { 'E' } else { ' ' }
|
||||||
|
lines := args.log.split('\n')
|
||||||
|
|
||||||
|
for i, line in lines {
|
||||||
|
if i == 0 {
|
||||||
|
content += '${error_prefix} ${args.cat} - ${line}\n'
|
||||||
|
} else {
|
||||||
|
content += '${error_prefix} ${line}\n'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.writeln(content.trim_space_right())!
|
||||||
|
f.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/model.v
|
||||||
|
```v
|
||||||
|
module logger
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.data.ourtime
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
|
||||||
|
@[heap]
|
||||||
|
pub struct Logger {
|
||||||
|
pub mut:
|
||||||
|
path pathlib.Path
|
||||||
|
lastlog_time i64 // to see in log format, every second we put a time down, we need to know if we are in a new second (logs can come in much faster)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LogItem {
|
||||||
|
pub mut:
|
||||||
|
timestamp ourtime.OurTime
|
||||||
|
cat string
|
||||||
|
log string
|
||||||
|
logtype LogType
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum LogType {
|
||||||
|
stdout
|
||||||
|
error
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/readme.md
|
||||||
|
```md
|
||||||
|
# Logger Module
|
||||||
|
|
||||||
|
A simple logging system that provides structured logging with search capabilities.
|
||||||
|
|
||||||
|
Logs are stored in hourly files with a consistent format that makes them both human-readable and machine-parseable.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Structured logging with categories and error types
|
||||||
|
- Automatic timestamp management
|
||||||
|
- Multi-line message support
|
||||||
|
- Search functionality with filtering options
|
||||||
|
- Human-readable log format
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.core.logger
|
||||||
|
import freeflowuniverse.herolib.data.ourtime
|
||||||
|
|
||||||
|
// Create a new logger
|
||||||
|
mut l := logger.new(path: '/var/logs')!
|
||||||
|
|
||||||
|
// Log a message
|
||||||
|
l.log(
|
||||||
|
cat: 'system',
|
||||||
|
log: 'System started successfully',
|
||||||
|
logtype: .stdout
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Log an error
|
||||||
|
l.log(
|
||||||
|
cat: 'system',
|
||||||
|
log: 'Failed to connect\nRetrying in 5 seconds...',
|
||||||
|
logtype: .error
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Search logs
|
||||||
|
results := l.search(
|
||||||
|
timestamp_from: ourtime.now().warp("-24h"), // Last 24 hours
|
||||||
|
cat: 'system', // Filter by category
|
||||||
|
log: 'failed', // Search in message content
|
||||||
|
logtype: .error, // Only error messages
|
||||||
|
maxitems: 100 // Limit results
|
||||||
|
)!
|
||||||
|
```
|
||||||
|
|
||||||
|
## Log Format
|
||||||
|
|
||||||
|
Each log file is named using the format `YYYY-MM-DD-HH.log` and contains entries in the following format:
|
||||||
|
|
||||||
|
```
|
||||||
|
21:23:42
|
||||||
|
system - This is a normal log message
|
||||||
|
system - This is a multi-line message
|
||||||
|
second line with proper indentation
|
||||||
|
third line maintaining alignment
|
||||||
|
E error_cat - This is an error message
|
||||||
|
E second line of error
|
||||||
|
E third line of error
|
||||||
|
```
|
||||||
|
|
||||||
|
### Format Rules
|
||||||
|
|
||||||
|
- Time stamps (HH:MM:SS) are written once per second when the log time changes
|
||||||
|
- Categories are:
|
||||||
|
- Limited to 10 characters maximum
|
||||||
|
- Padded with spaces to exactly 10 characters
|
||||||
|
- Any `-` in category names are converted to `_`
|
||||||
|
- Each line starts with either:
|
||||||
|
- ` ` (space) for normal logs (LogType.stdout)
|
||||||
|
- `E` for error logs (LogType.error)
|
||||||
|
- Multi-line messages maintain consistent indentation (14 spaces after the prefix)
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/logger/search.v
|
||||||
|
```v
|
||||||
|
module logger
|
||||||
|
|
||||||
|
import os
|
||||||
|
import freeflowuniverse.herolib.core.texttools
|
||||||
|
import freeflowuniverse.herolib.data.ourtime
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct SearchArgs {
|
||||||
|
pub mut:
|
||||||
|
timestamp_from ?ourtime.OurTime
|
||||||
|
timestamp_to ?ourtime.OurTime
|
||||||
|
cat string // can be empty
|
||||||
|
log string // any content in here will be looked for
|
||||||
|
logtype LogType
|
||||||
|
maxitems int = 10000
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut l Logger) search(args_ SearchArgs) ![]LogItem {
|
||||||
|
mut args := args_
|
||||||
|
|
||||||
|
// Format category (max 10 chars, ascii only)
|
||||||
|
args.cat = texttools.name_fix(args.cat)
|
||||||
|
if args.cat.len > 10 {
|
||||||
|
return error('category cannot be longer than 10 chars')
|
||||||
|
}
|
||||||
|
|
||||||
|
mut timestamp_from := args.timestamp_from or { ourtime.OurTime{} }
|
||||||
|
mut timestamp_to := args.timestamp_to or { ourtime.OurTime{} }
|
||||||
|
|
||||||
|
// Get time range
|
||||||
|
from_time := timestamp_from.unix()
|
||||||
|
to_time := timestamp_to.unix()
|
||||||
|
if from_time > to_time {
|
||||||
|
return error('from_time cannot be after to_time: ${from_time} < ${to_time}')
|
||||||
|
}
|
||||||
|
|
||||||
|
mut result := []LogItem{}
|
||||||
|
|
||||||
|
// Find log files in time range
|
||||||
|
mut files := os.ls(l.path.path)!
|
||||||
|
files.sort()
|
||||||
|
|
||||||
|
for file in files {
|
||||||
|
if !file.ends_with('.log') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse dayhour from filename
|
||||||
|
dayhour := file[..file.len - 4] // remove .log
|
||||||
|
file_time := ourtime.new(dayhour)!
|
||||||
|
mut current_time := ourtime.OurTime{}
|
||||||
|
mut current_item := LogItem{}
|
||||||
|
mut collecting := false
|
||||||
|
|
||||||
|
// Skip if file is outside time range
|
||||||
|
if file_time.unix() < from_time || file_time.unix() > to_time {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read and parse log file
|
||||||
|
content := os.read_file('${l.path.path}/${file}')!
|
||||||
|
lines := content.split('\n')
|
||||||
|
|
||||||
|
for line in lines {
|
||||||
|
if result.len >= args.maxitems {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
line_trim := line.trim_space()
|
||||||
|
if line_trim == '' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a timestamp line
|
||||||
|
if !(line.starts_with(' ') || line.starts_with('E')) {
|
||||||
|
current_time = ourtime.new(line_trim)!
|
||||||
|
if collecting {
|
||||||
|
process(mut result, current_item, current_time, args, from_time, to_time)!
|
||||||
|
}
|
||||||
|
collecting = false
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if collecting && line.len > 14 && line[13] == `-` {
|
||||||
|
process(mut result, current_item, current_time, args, from_time, to_time)!
|
||||||
|
collecting = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse log line
|
||||||
|
is_error := line.starts_with('E')
|
||||||
|
if !collecting {
|
||||||
|
// Start new item
|
||||||
|
current_item = LogItem{
|
||||||
|
timestamp: current_time
|
||||||
|
cat: line[2..12].trim_space()
|
||||||
|
log: line[15..].trim_space()
|
||||||
|
logtype: if is_error { .error } else { .stdout }
|
||||||
|
}
|
||||||
|
// println('new current item: ${current_item}')
|
||||||
|
collecting = true
|
||||||
|
} else {
|
||||||
|
// Continuation line
|
||||||
|
if line_trim.len < 16 {
|
||||||
|
current_item.log += '\n'
|
||||||
|
} else {
|
||||||
|
current_item.log += '\n' + line[15..]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add last item if collecting
|
||||||
|
if collecting {
|
||||||
|
process(mut result, current_item, current_time, args, from_time, to_time)!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process(mut result []LogItem, current_item LogItem, current_time ourtime.OurTime, args SearchArgs, from_time i64, to_time i64) ! {
|
||||||
|
// Add previous item if it matches filters
|
||||||
|
log_epoch := current_item.timestamp.unix()
|
||||||
|
if log_epoch < from_time || log_epoch > to_time {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (args.cat == '' || current_item.cat.trim_space() == args.cat)
|
||||||
|
&& (args.log == '' || current_item.log.contains(args.log))
|
||||||
|
&& args.logtype == current_item.logtype {
|
||||||
|
result << current_item
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/aiprompts/herolib_core/core_ourtime.md
|
||||||
|
```md
|
||||||
|
# OurTime Module
|
||||||
|
|
||||||
|
The `OurTime` module in V provides flexible time handling, supporting relative and absolute time formats, Unix timestamps, and formatting utilities.
|
||||||
|
|
||||||
|
## Key Features
|
||||||
|
- Create time objects from strings or current time
|
||||||
|
- Relative time expressions (e.g., `+1h`, `-2d`)
|
||||||
|
- Absolute time formats (e.g., `YYYY-MM-DD HH:mm:ss`)
|
||||||
|
- Unix timestamp conversion
|
||||||
|
- Time formatting and warping
|
||||||
|
|
||||||
|
## Basic Usage
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.data.ourtime
|
||||||
|
|
||||||
|
// Current time
|
||||||
|
mut t := ourtime.now()
|
||||||
|
|
||||||
|
// From string
|
||||||
|
t2 := ourtime.new('2022-12-05 20:14:35')!
|
||||||
|
|
||||||
|
// Get formatted string
|
||||||
|
println(t2.str()) // e.g., 2022-12-05 20:14
|
||||||
|
|
||||||
|
// Get Unix timestamp
|
||||||
|
println(t2.unix()) // e.g., 1670271275
|
||||||
|
```
|
||||||
|
|
||||||
|
## Time Formats
|
||||||
|
|
||||||
|
### Relative Time
|
||||||
|
|
||||||
|
Use `s` (seconds), `h` (hours), `d` (days), `w` (weeks), `M` (months), `Q` (quarters), `Y` (years).
|
||||||
|
|
||||||
|
```v
|
||||||
|
// Create with relative time
|
||||||
|
mut t := ourtime.new('+1w +2d -4h')!
|
||||||
|
|
||||||
|
// Warp existing time
|
||||||
|
mut t2 := ourtime.now()
|
||||||
|
t2.warp('+1h')!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Absolute Time
|
||||||
|
|
||||||
|
Supports `YYYY-MM-DD HH:mm:ss`, `YYYY-MM-DD HH:mm`, `YYYY-MM-DD HH`, `YYYY-MM-DD`, `DD-MM-YYYY`.
|
||||||
|
|
||||||
|
```v
|
||||||
|
t1 := ourtime.new('2022-12-05 20:14:35')!
|
||||||
|
t2 := ourtime.new('2022-12-05')! // Time defaults to 00:00:00
|
||||||
|
```
|
||||||
|
|
||||||
|
## Methods Overview
|
||||||
|
|
||||||
|
### Creation
|
||||||
|
|
||||||
|
```v
|
||||||
|
now_time := ourtime.now()
|
||||||
|
from_string := ourtime.new('2023-01-15')!
|
||||||
|
from_epoch := ourtime.new_from_epoch(1673788800)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
```v
|
||||||
|
mut t := ourtime.now()
|
||||||
|
println(t.str()) // YYYY-MM-DD HH:mm
|
||||||
|
println(t.day()) // YYYY-MM-DD
|
||||||
|
println(t.key()) // YYYY_MM_DD_HH_mm_ss
|
||||||
|
println(t.md()) // Markdown format
|
||||||
|
```
|
||||||
|
|
||||||
|
### Operations
|
||||||
|
|
||||||
|
```v
|
||||||
|
mut t := ourtime.now()
|
||||||
|
t.warp('+1h')! // Move 1 hour forward
|
||||||
|
unix_ts := t.unix()
|
||||||
|
is_empty := t.empty()
|
||||||
|
```
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
Time parsing methods return a `Result` type and should be handled with `!` or `or` blocks.
|
||||||
|
|
||||||
|
```v
|
||||||
|
t_valid := ourtime.new('2023-01-01')!
|
||||||
|
t_invalid := ourtime.new('bad-date') or {
|
||||||
|
println('Error: ${err}')
|
||||||
|
ourtime.now() // Fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/aiprompts/herolib_core/core_paths.md
|
||||||
|
```md
|
||||||
|
# Pathlib Usage Guide
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The pathlib module provides a comprehensive interface for handling file system operations. Key features include:
|
||||||
|
|
||||||
|
- Robust path handling for files, directories, and symlinks
|
||||||
|
- Support for both absolute and relative paths
|
||||||
|
- Automatic home directory expansion (~)
|
||||||
|
- Recursive directory operations
|
||||||
|
- Path filtering and listing
|
||||||
|
- File and directory metadata access
|
||||||
|
|
||||||
|
## Basic Usage
|
||||||
|
|
||||||
|
### Importing pathlib
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating Path Objects
|
||||||
|
```v
|
||||||
|
// Create a Path object for a file
|
||||||
|
mut file_path := pathlib.get("path/to/file.txt")
|
||||||
|
|
||||||
|
// Create a Path object for a directory
|
||||||
|
mut dir_path := pathlib.get("path/to/directory")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Basic Path Operations
|
||||||
|
```v
|
||||||
|
// Get absolute path
|
||||||
|
abs_path := file_path.absolute()
|
||||||
|
|
||||||
|
// Get real path (resolves symlinks)
|
||||||
|
real_path := file_path.realpath()
|
||||||
|
|
||||||
|
// Check if path exists
|
||||||
|
if file_path.exists() {
|
||||||
|
// Path exists
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Path Properties and Methods
|
||||||
|
|
||||||
|
### Path Types
|
||||||
|
```v
|
||||||
|
// Check if path is a file
|
||||||
|
if file_path.is_file() {
|
||||||
|
// Handle as file
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path is a directory
|
||||||
|
if dir_path.is_dir() {
|
||||||
|
// Handle as directory
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if path is a symlink
|
||||||
|
if file_path.is_link() {
|
||||||
|
// Handle as symlink
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Path Normalization
|
||||||
|
```v
|
||||||
|
// Normalize path (remove extra slashes, resolve . and ..)
|
||||||
|
normalized_path := file_path.path_normalize()
|
||||||
|
|
||||||
|
// Get path directory
|
||||||
|
dir_path := file_path.path_dir()
|
||||||
|
|
||||||
|
// Get path name without extension
|
||||||
|
name_no_ext := file_path.name_no_ext()
|
||||||
|
```
|
||||||
|
|
||||||
|
## File and Directory Operations
|
||||||
|
|
||||||
|
### File Operations
|
||||||
|
```v
|
||||||
|
// Write to file
|
||||||
|
file_path.write("Content to write")!
|
||||||
|
|
||||||
|
// Read from file
|
||||||
|
content := file_path.read()!
|
||||||
|
|
||||||
|
// Delete file
|
||||||
|
file_path.delete()!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Directory Operations
|
||||||
|
```v
|
||||||
|
// Create directory
|
||||||
|
mut dir := pathlib.get_dir(
|
||||||
|
path: "path/to/new/dir"
|
||||||
|
create: true
|
||||||
|
)!
|
||||||
|
|
||||||
|
// List directory contents
|
||||||
|
mut dir_list := dir.list()!
|
||||||
|
|
||||||
|
// Delete directory
|
||||||
|
dir.delete()!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Symlink Operations
|
||||||
|
```v
|
||||||
|
// Create symlink
|
||||||
|
file_path.link("path/to/symlink", delete_exists: true)!
|
||||||
|
|
||||||
|
// Resolve symlink
|
||||||
|
real_path := file_path.realpath()
|
||||||
|
```
|
||||||
|
|
||||||
|
## Advanced Operations
|
||||||
|
|
||||||
|
### Path Copying
|
||||||
|
```v
|
||||||
|
// Copy file to destination
|
||||||
|
file_path.copy(dest: "path/to/destination")!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Recursive Operations
|
||||||
|
```v
|
||||||
|
// List directory recursively
|
||||||
|
mut recursive_list := dir.list(recursive: true)!
|
||||||
|
|
||||||
|
// Delete directory recursively
|
||||||
|
dir.delete()!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Path Filtering
|
||||||
|
```v
|
||||||
|
// List files matching pattern
|
||||||
|
mut filtered_list := dir.list(
|
||||||
|
regex: [r".*\.txt$"],
|
||||||
|
recursive: true
|
||||||
|
)!
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
```v
|
||||||
|
if file_path.exists() {
|
||||||
|
// Safe to operate
|
||||||
|
} else {
|
||||||
|
// Handle missing file
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
File: /Users/despiegk/code/github/freeflowuniverse/herolib/aiprompts/herolib_core/core_text.md
|
||||||
|
```md
|
||||||
|
# TextTools Module
|
||||||
|
|
||||||
|
The `texttools` module provides a comprehensive set of utilities for text manipulation and processing.
|
||||||
|
|
||||||
|
## Functions and Examples:
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.core.texttools
|
||||||
|
|
||||||
|
assert hello_world == texttools.name_fix("Hello World!")
|
||||||
|
|
||||||
|
```
|
||||||
|
### Name/Path Processing
|
||||||
|
* `name_fix(name string) string`: Normalizes filenames and paths.
|
||||||
|
* `name_fix_keepspace(name string) !string`: Like name_fix but preserves spaces.
|
||||||
|
* `name_fix_no_ext(name_ string) string`: Removes file extension.
|
||||||
|
* `name_fix_snake_to_pascal(name string) string`: Converts snake_case to PascalCase.
|
||||||
|
```v
|
||||||
|
name := texttools.name_fix_snake_to_pascal("hello_world") // Result: "HelloWorld"
|
||||||
|
```
|
||||||
|
* `snake_case(name string) string`: Converts PascalCase to snake_case.
|
||||||
|
```v
|
||||||
|
name := texttools.snake_case("HelloWorld") // Result: "hello_world"
|
||||||
|
```
|
||||||
|
* `name_split(name string) !(string, string)`: Splits name into site and page components.
|
||||||
|
|
||||||
|
|
||||||
|
### Text Cleaning
|
||||||
|
* `name_clean(r string) string`: Normalizes names by removing special characters.
|
||||||
|
```v
|
||||||
|
name := texttools.name_clean("Hello@World!") // Result: "HelloWorld"
|
||||||
|
```
|
||||||
|
* `ascii_clean(r string) string`: Removes all non-ASCII characters.
|
||||||
|
* `remove_empty_lines(text string) string`: Removes empty lines from text.
|
||||||
|
```v
|
||||||
|
text := texttools.remove_empty_lines("line1\n\nline2\n\n\nline3") // Result: "line1\nline2\nline3"
|
||||||
|
```
|
||||||
|
* `remove_double_lines(text string) string`: Removes consecutive empty lines.
|
||||||
|
* `remove_empty_js_blocks(text string) string`: Removes empty code blocks (```...```).
|
||||||
|
|
||||||
|
### Command Line Parsing
|
||||||
|
* `cmd_line_args_parser(text string) ![]string`: Parses command line arguments with support for quotes and escaping.
|
||||||
|
```v
|
||||||
|
args := texttools.cmd_line_args_parser("'arg with spaces' --flag=value") // Result: ['arg with spaces', '--flag=value']
|
||||||
|
```
|
||||||
|
* `text_remove_quotes(text string) string`: Removes quoted sections from text.
|
||||||
|
* `check_exists_outside_quotes(text string, items []string) bool`: Checks if items exist in text outside of quotes.
|
||||||
|
|
||||||
|
### Text Expansion
|
||||||
|
* `expand(txt_ string, l int, expand_with string) string`: Expands text to a specified length with a given character.
|
||||||
|
|
||||||
|
### Indentation
|
||||||
|
* `indent(text string, prefix string) string`: Adds indentation prefix to each line.
|
||||||
|
```v
|
||||||
|
text := texttools.indent("line1\nline2", " ") // Result: " line1\n line2\n"
|
||||||
|
```
|
||||||
|
* `dedent(text string) string`: Removes common leading whitespace from every line.
|
||||||
|
```v
|
||||||
|
text := texttools.dedent(" line1\n line2") // Result: "line1\nline2"
|
||||||
|
```
|
||||||
|
|
||||||
|
### String Validation
|
||||||
|
* `is_int(text string) bool`: Checks if text contains only digits.
|
||||||
|
* `is_upper_text(text string) bool`: Checks if text contains only uppercase letters.
|
||||||
|
|
||||||
|
### Multiline Processing
|
||||||
|
* `multiline_to_single(text string) !string`: Converts multiline text to a single line with proper escaping.
|
||||||
|
|
||||||
|
### Text Splitting
|
||||||
|
* `split_smart(t string, delimiter_ string) []string`: Intelligent string splitting that respects quotes.
|
||||||
|
|
||||||
|
### Tokenization
|
||||||
|
* `tokenize(text_ string) TokenizerResult`: Tokenizes text into meaningful parts.
|
||||||
|
* `text_token_replace(text string, tofind string, replacewith string) !string`: Replaces tokens in text.
|
||||||
|
|
||||||
|
### Version Parsing
|
||||||
|
* `version(text_ string) int`: Converts version strings to comparable integers.
|
||||||
|
```v
|
||||||
|
ver := texttools.version("v0.4.36") // Result: 4036
|
||||||
|
ver = texttools.version("v1.4.36") // Result: 1004036
|
||||||
|
```
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
* `format_rfc1123(t time.Time) string`: Formats a time.Time object into RFC 1123 format.
|
||||||
|
|
||||||
|
|
||||||
|
### Array Operations
|
||||||
|
* `to_array(r string) []string`: Converts a comma or newline separated list to an array of strings.
|
||||||
|
```v
|
||||||
|
text := "item1,item2,item3"
|
||||||
|
array := texttools.to_array(text) // Result: ['item1', 'item2', 'item3']
|
||||||
|
```
|
||||||
|
* `to_array_int(r string) []int`: Converts a text list to an array of integers.
|
||||||
|
* `to_map(mapstring string, line string, delimiter_ string) map[string]string`: Intelligent mapping of a line to a map based on a template.
|
||||||
|
```v
|
||||||
|
r := texttools.to_map("name,-,-,-,-,pid,-,-,-,-,path",
|
||||||
|
"root 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 /usr/sbin/distnoted")
|
||||||
|
// Result: {'name': 'root', 'pid': '1360', 'path': '/usr/sbin/distnoted'}
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
</file_contents>
|
||||||
|
<user_instructions>
|
||||||
|
create a module in rust in location packages/core/logger
|
||||||
|
|
||||||
|
which reimplements herolib/lib/core/logger
|
||||||
|
all features need to be reimplemented
|
||||||
|
|
||||||
|
|
||||||
|
write me an implementation plan for my coding agent
|
||||||
|
|
||||||
|
|
||||||
|
</user_instructions>
|
@@ -10,7 +10,7 @@ keywords = ["network", "tcp", "http", "ssh", "connectivity"]
|
|||||||
categories = ["network-programming", "api-bindings"]
|
categories = ["network-programming", "api-bindings"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = { workspace = true }
|
||||||
tokio = { version = "1.0", features = ["full"] }
|
tokio = { workspace = true }
|
||||||
reqwest = { version = "0.12", features = ["json", "blocking"] }
|
reqwest = { workspace = true, features = ["json", "blocking"] }
|
||||||
rhai = "1.19.0"
|
rhai = { workspace = true }
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user