Merge branch 'development' of https://git.ourworld.tf/herocode/sal into development
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -64,3 +64,6 @@ sidebars.ts
|
||||
tsconfig.json
|
||||
Cargo.toml.bak
|
||||
for_augment
|
||||
|
||||
myenv.sh
|
||||
|
||||
|
113
Cargo.toml
113
Cargo.toml
@@ -12,23 +12,27 @@ readme = "README.md"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
".",
|
||||
"vault",
|
||||
"git",
|
||||
"redisclient",
|
||||
"mycelium",
|
||||
"text",
|
||||
"os",
|
||||
"net",
|
||||
"zinit_client",
|
||||
"process",
|
||||
"virt",
|
||||
"zos",
|
||||
"postgresclient",
|
||||
"kubernetes",
|
||||
"packages/clients/myceliumclient",
|
||||
"packages/clients/postgresclient",
|
||||
"packages/clients/redisclient",
|
||||
"packages/clients/zinitclient",
|
||||
"packages/clients/rfsclient",
|
||||
"packages/core/net",
|
||||
"packages/core/text",
|
||||
"packages/crypt/vault",
|
||||
"packages/data/ourdb",
|
||||
"packages/data/radixtree",
|
||||
"packages/data/tst",
|
||||
"packages/system/git",
|
||||
"packages/system/kubernetes",
|
||||
"packages/system/os",
|
||||
"packages/system/process",
|
||||
"packages/system/virt",
|
||||
"rhai",
|
||||
"rhailib",
|
||||
"herodo",
|
||||
"service_manager",
|
||||
"packages/clients/hetznerclient",
|
||||
"packages/ai/codemonkey",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -40,6 +44,7 @@ rust-version = "1.70.0"
|
||||
# Core shared dependencies with consistent versions
|
||||
anyhow = "1.0.98"
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.7.1"
|
||||
dirs = "6.0.0"
|
||||
env_logger = "0.11.8"
|
||||
futures = "0.3.30"
|
||||
@@ -50,7 +55,7 @@ log = "0.4"
|
||||
once_cell = "1.18.0"
|
||||
rand = "0.8.5"
|
||||
regex = "1.8.1"
|
||||
reqwest = { version = "0.12.15", features = ["json"] }
|
||||
reqwest = { version = "0.12.15", features = ["json", "blocking"] }
|
||||
rhai = { version = "1.12.0", features = ["sync"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
@@ -71,6 +76,10 @@ chacha20poly1305 = "0.10.1"
|
||||
k256 = { version = "0.13.4", features = ["ecdsa", "ecdh"] }
|
||||
sha2 = "0.10.7"
|
||||
hex = "0.4"
|
||||
bincode = { version = "2.0.1", features = ["serde"] }
|
||||
pbkdf2 = "0.12.2"
|
||||
getrandom = { version = "0.3.3", features = ["wasm_js"] }
|
||||
tera = "1.19.0"
|
||||
|
||||
# Ethereum dependencies
|
||||
ethers = { version = "2.0.7", features = ["legacy"] }
|
||||
@@ -87,27 +96,54 @@ windows = { version = "0.61.1", features = [
|
||||
zinit-client = "0.4.0"
|
||||
urlencoding = "2.1.3"
|
||||
tokio-test = "0.4.4"
|
||||
kube = { version = "0.95.0", features = ["client", "config", "derive"] }
|
||||
k8s-openapi = { version = "0.23.0", features = ["latest"] }
|
||||
tokio-retry = "0.3.0"
|
||||
governor = "0.6.3"
|
||||
tower = { version = "0.5.2", features = ["timeout", "limit"] }
|
||||
serde_yaml = "0.9"
|
||||
postgres-types = "0.2.5"
|
||||
r2d2 = "0.8.10"
|
||||
|
||||
# SAL dependencies
|
||||
sal-git = { path = "packages/system/git" }
|
||||
sal-kubernetes = { path = "packages/system/kubernetes" }
|
||||
sal-redisclient = { path = "packages/clients/redisclient" }
|
||||
sal-mycelium = { path = "packages/clients/myceliumclient" }
|
||||
sal-hetzner = { path = "packages/clients/hetznerclient" }
|
||||
sal-rfs-client = { path = "packages/clients/rfsclient" }
|
||||
sal-text = { path = "packages/core/text" }
|
||||
sal-os = { path = "packages/system/os" }
|
||||
sal-net = { path = "packages/core/net" }
|
||||
sal-zinit-client = { path = "packages/clients/zinitclient" }
|
||||
sal-process = { path = "packages/system/process" }
|
||||
sal-virt = { path = "packages/system/virt" }
|
||||
sal-postgresclient = { path = "packages/clients/postgresclient" }
|
||||
sal-vault = { path = "packages/crypt/vault" }
|
||||
sal-rhai = { path = "rhai" }
|
||||
sal-service-manager = { path = "_archive/service_manager" }
|
||||
|
||||
[dependencies]
|
||||
thiserror = "2.0.12" # For error handling in the main Error enum
|
||||
tokio = { workspace = true } # For async examples
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
# Optional dependencies - users can choose which modules to include
|
||||
sal-git = { path = "git", optional = true }
|
||||
sal-kubernetes = { path = "kubernetes", optional = true }
|
||||
sal-redisclient = { path = "redisclient", optional = true }
|
||||
sal-mycelium = { path = "mycelium", optional = true }
|
||||
sal-text = { path = "text", optional = true }
|
||||
sal-os = { path = "os", optional = true }
|
||||
sal-net = { path = "net", optional = true }
|
||||
sal-zinit-client = { path = "zinit_client", optional = true }
|
||||
sal-process = { path = "process", optional = true }
|
||||
sal-virt = { path = "virt", optional = true }
|
||||
sal-postgresclient = { path = "postgresclient", optional = true }
|
||||
sal-vault = { path = "vault", optional = true }
|
||||
sal-rhai = { path = "rhai", optional = true }
|
||||
sal-service-manager = { path = "service_manager", optional = true }
|
||||
zinit-client.workspace = true
|
||||
sal-git = { workspace = true, optional = true }
|
||||
sal-kubernetes = { workspace = true, optional = true }
|
||||
sal-redisclient = { workspace = true, optional = true }
|
||||
sal-mycelium = { workspace = true, optional = true }
|
||||
sal-hetzner = { workspace = true, optional = true }
|
||||
sal-rfs-client = { workspace = true, optional = true }
|
||||
sal-text = { workspace = true, optional = true }
|
||||
sal-os = { workspace = true, optional = true }
|
||||
sal-net = { workspace = true, optional = true }
|
||||
sal-zinit-client = { workspace = true, optional = true }
|
||||
sal-process = { workspace = true, optional = true }
|
||||
sal-virt = { workspace = true, optional = true }
|
||||
sal-postgresclient = { workspace = true, optional = true }
|
||||
sal-vault = { workspace = true, optional = true }
|
||||
sal-rhai = { workspace = true, optional = true }
|
||||
sal-service-manager = { workspace = true, optional = true }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
@@ -117,6 +153,8 @@ git = ["dep:sal-git"]
|
||||
kubernetes = ["dep:sal-kubernetes"]
|
||||
redisclient = ["dep:sal-redisclient"]
|
||||
mycelium = ["dep:sal-mycelium"]
|
||||
hetzner = ["dep:sal-hetzner"]
|
||||
rfsclient = ["dep:sal-rfs-client"]
|
||||
text = ["dep:sal-text"]
|
||||
os = ["dep:sal-os"]
|
||||
net = ["dep:sal-net"]
|
||||
@@ -126,18 +164,20 @@ virt = ["dep:sal-virt"]
|
||||
postgresclient = ["dep:sal-postgresclient"]
|
||||
vault = ["dep:sal-vault"]
|
||||
rhai = ["dep:sal-rhai"]
|
||||
service_manager = ["dep:sal-service-manager"]
|
||||
# service_manager is removed as it's not a direct member anymore
|
||||
|
||||
# Convenience feature groups
|
||||
core = ["os", "process", "text", "net"]
|
||||
clients = ["redisclient", "postgresclient", "zinit_client", "mycelium"]
|
||||
infrastructure = ["git", "vault", "kubernetes", "virt", "service_manager"]
|
||||
clients = ["redisclient", "postgresclient", "zinit_client", "mycelium", "hetzner", "rfsclient"]
|
||||
infrastructure = ["git", "vault", "kubernetes", "virt"]
|
||||
scripting = ["rhai"]
|
||||
all = [
|
||||
"git",
|
||||
"kubernetes",
|
||||
"redisclient",
|
||||
"mycelium",
|
||||
"hetzner",
|
||||
"rfsclient",
|
||||
"text",
|
||||
"os",
|
||||
"net",
|
||||
@@ -147,7 +187,6 @@ all = [
|
||||
"postgresclient",
|
||||
"vault",
|
||||
"rhai",
|
||||
"service_manager",
|
||||
]
|
||||
|
||||
# Examples
|
||||
|
228
README.md
228
README.md
@@ -1,148 +1,136 @@
|
||||
# SAL (System Abstraction Layer)
|
||||
# Herocode Herolib Rust Repository
|
||||
|
||||
**Version 0.1.0** - A modular Rust library for cross-platform system operations and automation.
|
||||
## Overview
|
||||
|
||||
SAL provides a unified interface for system operations with Rhai scripting support through the `herodo` tool.
|
||||
This repository contains the **Herocode Herolib** Rust library and a collection of scripts, examples, and utilities for building, testing, and publishing the SAL (System Abstraction Layer) crates. The repository includes:
|
||||
|
||||
## Installation
|
||||
- **Rust crates** for various system components (e.g., `os`, `process`, `text`, `git`, `vault`, `kubernetes`, etc.).
|
||||
- **Rhai scripts** and test suites for each crate.
|
||||
- **Utility scripts** to automate common development tasks.
|
||||
|
||||
### Individual Packages (Recommended)
|
||||
## Scripts
|
||||
|
||||
The repository provides three primary helper scripts located in the repository root:
|
||||
|
||||
| Script | Description | Typical Usage |
|
||||
|--------|-------------|--------------|
|
||||
| `scripts/publish-all.sh` | Publishes all SAL crates to **crates.io** in the correct dependency order. Handles version bumping, dependency updates, dry‑run mode, and rate‑limiting. | `./scripts/publish-all.sh [--dry-run] [--wait <seconds>] [--version <ver>]` |
|
||||
| `build_herodo.sh` | Builds the `herodo` binary from the `herodo` package and optionally runs a specified Rhai script. | `./build_herodo.sh [script_name]` |
|
||||
| `run_rhai_tests.sh` | Executes all Rhai test suites across the repository, logging results and providing a summary. | `./run_rhai_tests.sh` |
|
||||
|
||||
Below are detailed usage instructions for each script.
|
||||
|
||||
---
|
||||
|
||||
## 1. `scripts/publish-all.sh`
|
||||
|
||||
### Purpose
|
||||
|
||||
- Publishes each SAL crate in the correct dependency order.
|
||||
- Updates crate versions (if `--version` is supplied).
|
||||
- Updates path dependencies to version dependencies before publishing.
|
||||
- Supports **dry‑run** mode to preview actions without publishing.
|
||||
- Handles rate‑limiting between crate publishes.
|
||||
|
||||
### Options
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--dry-run` | Shows what would be published without actually publishing. |
|
||||
| `--wait <seconds>` | Wait time between publishes (default: 15 s). |
|
||||
| `--version <ver>` | Set a new version for all crates (updates `Cargo.toml` files). |
|
||||
| `-h, --help` | Show help message. |
|
||||
|
||||
### Example Usage
|
||||
|
||||
```bash
|
||||
# Core functionality
|
||||
cargo add sal-os sal-process sal-text sal-net
|
||||
# Dry run – no crates will be published
|
||||
./scripts/publish-all.sh --dry-run
|
||||
|
||||
# Infrastructure
|
||||
cargo add sal-git sal-vault sal-kubernetes sal-virt
|
||||
# Publish with a custom wait time and version bump
|
||||
./scripts/publish-all.sh --wait 30 --version 1.2.3
|
||||
|
||||
# Database clients
|
||||
cargo add sal-redisclient sal-postgresclient sal-zinit-client
|
||||
|
||||
# Scripting
|
||||
cargo add sal-rhai
|
||||
# Normal publish (no dry‑run)
|
||||
./scripts/publish-all.sh
|
||||
```
|
||||
|
||||
### Meta-package with Features
|
||||
### Notes
|
||||
|
||||
- Must be run from the repository root (where `Cargo.toml` lives).
|
||||
- Requires `cargo` and a logged‑in `cargo` session (`cargo login`).
|
||||
- The script automatically updates dependencies in each crate’s `Cargo.toml` to use the new version before publishing.
|
||||
|
||||
---
|
||||
|
||||
## 2. `build_herodo.sh`
|
||||
|
||||
### Purpose
|
||||
|
||||
- Builds the `herodo` binary from the `herodo` package.
|
||||
- Copies the binary to a system‑wide location (`/usr/local/bin`) if run as root, otherwise to `~/hero/bin`.
|
||||
- Optionally runs a specified Rhai script after building.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
cargo add sal --features core # os, process, text, net
|
||||
cargo add sal --features infrastructure # git, vault, kubernetes, virt
|
||||
cargo add sal --features all # everything
|
||||
# Build only
|
||||
./build_herodo.sh
|
||||
|
||||
# Build and run a specific Rhai script (e.g., `example`):
|
||||
./build_herodo.sh example
|
||||
```
|
||||
|
||||
### Herodo Script Runner
|
||||
### Details
|
||||
|
||||
- The script changes to its own directory, builds the `herodo` crate (`cargo build`), and copies the binary.
|
||||
- If a script name is provided, it looks for the script in:
|
||||
- `src/rhaiexamples/<name>.rhai`
|
||||
- `src/herodo/scripts/<name>.rhai`
|
||||
- If the script is not found, the script exits with an error.
|
||||
|
||||
---
|
||||
|
||||
## 3. `run_rhai_tests.sh`
|
||||
|
||||
### Purpose
|
||||
|
||||
- Runs **all** Rhai test suites across the repository.
|
||||
- Supports both the legacy `rhai_tests` directory and the newer `*/tests/rhai` layout.
|
||||
- Logs output to `run_rhai_tests.log` and prints a summary.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
cargo install herodo
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Rust Library Usage
|
||||
|
||||
```rust
|
||||
use sal_os::fs;
|
||||
use sal_process::run;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let files = fs::list_files(".")?;
|
||||
println!("Found {} files", files.len());
|
||||
|
||||
let result = run::command("echo hello")?;
|
||||
println!("Output: {}", result.stdout);
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
### Herodo Scripting
|
||||
|
||||
```bash
|
||||
# Create script
|
||||
cat > example.rhai << 'EOF'
|
||||
let files = find_files(".", "*.rs");
|
||||
print("Found " + files.len() + " Rust files");
|
||||
|
||||
let result = run("echo 'Hello from SAL!'");
|
||||
print("Output: " + result.stdout);
|
||||
EOF
|
||||
|
||||
# Run script
|
||||
herodo example.rhai
|
||||
```
|
||||
|
||||
## Available Packages
|
||||
|
||||
| Package | Description |
|
||||
|---------|-------------|
|
||||
| [`sal-os`](https://crates.io/crates/sal-os) | Operating system operations |
|
||||
| [`sal-process`](https://crates.io/crates/sal-process) | Process management |
|
||||
| [`sal-text`](https://crates.io/crates/sal-text) | Text processing |
|
||||
| [`sal-net`](https://crates.io/crates/sal-net) | Network operations |
|
||||
| [`sal-git`](https://crates.io/crates/sal-git) | Git repository management |
|
||||
| [`sal-vault`](https://crates.io/crates/sal-vault) | Cryptographic operations |
|
||||
| [`sal-kubernetes`](https://crates.io/crates/sal-kubernetes) | Kubernetes management |
|
||||
| [`sal-virt`](https://crates.io/crates/sal-virt) | Virtualization tools |
|
||||
| [`sal-redisclient`](https://crates.io/crates/sal-redisclient) | Redis client |
|
||||
| [`sal-postgresclient`](https://crates.io/crates/sal-postgresclient) | PostgreSQL client |
|
||||
| [`sal-zinit-client`](https://crates.io/crates/sal-zinit-client) | Zinit process supervisor |
|
||||
| [`sal-mycelium`](https://crates.io/crates/sal-mycelium) | Mycelium network client |
|
||||
| [`sal-service-manager`](https://crates.io/crates/sal-service-manager) | Service management |
|
||||
| [`sal-rhai`](https://crates.io/crates/sal-rhai) | Rhai scripting integration |
|
||||
| [`sal`](https://crates.io/crates/sal) | Meta-crate with features |
|
||||
| [`herodo`](https://crates.io/crates/herodo) | Script executor binary |
|
||||
|
||||
## Building & Testing
|
||||
|
||||
```bash
|
||||
# Build all packages
|
||||
cargo build --workspace
|
||||
|
||||
# Run tests
|
||||
cargo test --workspace
|
||||
|
||||
# Run Rhai integration tests
|
||||
# Run all tests
|
||||
./run_rhai_tests.sh
|
||||
```
|
||||
|
||||
## Core Features
|
||||
### Output
|
||||
|
||||
- **System Operations**: File/directory management, environment access, OS commands
|
||||
- **Process Management**: Create, monitor, and control system processes
|
||||
- **Containerization**: Buildah and nerdctl integration
|
||||
- **Version Control**: Git repository operations
|
||||
- **Database Clients**: Redis and PostgreSQL support
|
||||
- **Networking**: HTTP, TCP, SSH connectivity utilities
|
||||
- **Cryptography**: Key management, encryption, digital signatures
|
||||
- **Text Processing**: String manipulation and templating
|
||||
- **Scripting**: Rhai script execution via `herodo`
|
||||
- Colored console output for readability.
|
||||
- Log file (`run_rhai_tests.log`) contains full output for later review.
|
||||
- Summary includes total modules, passed, and failed counts.
|
||||
- Exit code `0` if all tests pass, `1` otherwise.
|
||||
|
||||
## Herodo Scripting
|
||||
---
|
||||
|
||||
`herodo` executes Rhai scripts with access to all SAL modules:
|
||||
## General Development Workflow
|
||||
|
||||
```bash
|
||||
herodo script.rhai # Run single script
|
||||
herodo script.rhai arg1 arg2 # With arguments
|
||||
herodo /path/to/scripts/ # Run all .rhai files in directory
|
||||
```
|
||||
1. **Build**: Use `build_herodo.sh` to compile the `herodo` binary.
|
||||
2. **Test**: Run `run_rhai_tests.sh` to ensure all Rhai scripts pass.
|
||||
3. **Publish**: When ready to release, use `scripts/publish-all.sh` (with `--dry-run` first to verify).
|
||||
|
||||
### Example Script
|
||||
## Prerequisites
|
||||
|
||||
```rhai
|
||||
// File operations
|
||||
let files = find_files(".", "*.rs");
|
||||
print("Found " + files.len() + " Rust files");
|
||||
|
||||
// Process execution
|
||||
let result = run("echo 'Hello SAL!'");
|
||||
print("Output: " + result.stdout);
|
||||
|
||||
// Redis operations
|
||||
redis_set("status", "running");
|
||||
let status = redis_get("status");
|
||||
print("Status: " + status);
|
||||
```
|
||||
- **Rust toolchain** (`cargo`, `rustc`) installed.
|
||||
- **Rhai** interpreter (`herodo`) built and available.
|
||||
- **Git** for version control.
|
||||
- **Cargo login** for publishing to crates.io.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under the Apache License 2.0. See [LICENSE](LICENSE) for details.
|
||||
See `LICENSE` for details.
|
||||
|
||||
---
|
||||
|
||||
**Happy coding!**
|
||||
|
0
cargo_instructions.md
Normal file
0
cargo_instructions.md
Normal file
14
config/README.md
Normal file
14
config/README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Environment Configuration
|
||||
|
||||
To set up your environment variables:
|
||||
|
||||
1. Copy the template file to `env.sh`:
|
||||
|
||||
```bash
|
||||
cp config/myenv_templ.sh config/env.sh
|
||||
```
|
||||
|
||||
2. Edit `config/env.sh` and fill in your specific values for the variables.
|
||||
|
||||
3. This file (`config/env.sh`) is excluded from version control by the project's `.gitignore` configuration, ensuring your sensitive information remains local and is never committed to the repository.
|
||||
|
6
config/myenv_templ.sh
Normal file
6
config/myenv_templ.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
|
||||
export OPENROUTER_API_KEY=""
|
||||
export GROQ_API_KEY=""
|
||||
export CEREBRAS_API_KEY=""
|
||||
export OPENAI_API_KEY="sk-xxxxxxx"
|
@@ -1,6 +1,7 @@
|
||||
// Example of using the network modules in SAL through Rhai
|
||||
// Shows TCP port checking, HTTP URL validation, and SSH command execution
|
||||
|
||||
|
||||
// Function to print section header
|
||||
fn section(title) {
|
||||
print("\n");
|
||||
@@ -19,14 +20,14 @@ let host = "localhost";
|
||||
let port = 22;
|
||||
print(`Checking if port ${port} is open on ${host}...`);
|
||||
let is_open = tcp.check_port(host, port);
|
||||
print(`Port ${port} is ${is_open ? "open" : "closed"}`);
|
||||
print(`Port ${port} is ${if is_open { "open" } else { "closed" }}`);
|
||||
|
||||
// Check multiple ports
|
||||
let ports = [22, 80, 443];
|
||||
print(`Checking multiple ports on ${host}...`);
|
||||
let port_results = tcp.check_ports(host, ports);
|
||||
for result in port_results {
|
||||
print(`Port ${result.port} is ${result.is_open ? "open" : "closed"}`);
|
||||
print(`Port ${result.port} is ${if result.is_open { "open" } else { "closed" }}`);
|
||||
}
|
||||
|
||||
// HTTP connectivity checks
|
||||
@@ -39,7 +40,7 @@ let http = net::new_http_connector();
|
||||
let url = "https://www.example.com";
|
||||
print(`Checking if ${url} is reachable...`);
|
||||
let is_reachable = http.check_url(url);
|
||||
print(`${url} is ${is_reachable ? "reachable" : "unreachable"}`);
|
||||
print(`${url} is ${if is_reachable { "reachable" } else { "unreachable" }}`);
|
||||
|
||||
// Check the status code of a URL
|
||||
print(`Checking status code of ${url}...`);
|
||||
@@ -68,7 +69,7 @@ if is_open {
|
||||
let ssh = net::new_ssh_builder()
|
||||
.host("localhost")
|
||||
.port(22)
|
||||
.user(os::get_env("USER") || "root")
|
||||
.user(if os::get_env("USER") != () { os::get_env("USER") } else { "root" })
|
||||
.timeout(10)
|
||||
.build();
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
print("Running a basic command using run().do()...");
|
||||
print("Running a basic command using run().execute()...");
|
||||
|
||||
// Execute a simple command
|
||||
let result = run("echo Hello from run_basic!").do();
|
||||
let result = run("echo Hello from run_basic!").execute();
|
||||
|
||||
// Print the command result
|
||||
print(`Command: echo Hello from run_basic!`);
|
||||
@@ -13,6 +13,6 @@ print(`Stderr:\n${result.stderr}`);
|
||||
// Example of a command that might fail (if 'nonexistent_command' doesn't exist)
|
||||
// This will halt execution by default because ignore_error() is not used.
|
||||
// print("Running a command that will fail (and should halt)...");
|
||||
// let fail_result = run("nonexistent_command").do(); // This line will cause the script to halt if the command doesn't exist
|
||||
// let fail_result = run("nonexistent_command").execute(); // This line will cause the script to halt if the command doesn't exist
|
||||
|
||||
print("Basic run() example finished.");
|
@@ -2,7 +2,7 @@ print("Running a command that will fail, but ignoring the error...");
|
||||
|
||||
// Run a command that exits with a non-zero code (will fail)
|
||||
// Using .ignore_error() prevents the script from halting
|
||||
let result = run("exit 1").ignore_error().do();
|
||||
let result = run("exit 1").ignore_error().execute();
|
||||
|
||||
print(`Command finished.`);
|
||||
print(`Success: ${result.success}`); // This should be false
|
||||
@@ -22,7 +22,7 @@ print("\nScript continued execution after the potentially failing command.");
|
||||
// Example of a command that might fail due to OS error (e.g., command not found)
|
||||
// This *might* still halt depending on how the underlying Rust function handles it,
|
||||
// as ignore_error() primarily prevents halting on *command* non-zero exit codes.
|
||||
// let os_error_result = run("nonexistent_command_123").ignore_error().do();
|
||||
// let os_error_result = run("nonexistent_command_123").ignore_error().execute();
|
||||
// print(`OS Error Command Success: ${os_error_result.success}`);
|
||||
// print(`OS Error Command Exit Code: ${os_error_result.code}`);
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
print("Running a command using run().log().do()...");
|
||||
print("Running a command using run().log().execute()...");
|
||||
|
||||
// The .log() method will print the command string to the console before execution.
|
||||
// This is useful for debugging or tracing which commands are being run.
|
||||
|
@@ -1,8 +1,8 @@
|
||||
print("Running a command using run().silent().do()...\n");
|
||||
print("Running a command using run().silent().execute()...\n");
|
||||
|
||||
// This command will print to standard output and standard error
|
||||
// However, because .silent() is used, the output will not appear in the console directly
|
||||
let result = run("echo 'This should be silent stdout.'; echo 'This should be silent stderr.' >&2; exit 0").silent().do();
|
||||
let result = run("echo 'This should be silent stdout.'; echo 'This should be silent stderr.' >&2; exit 0").silent().execute();
|
||||
|
||||
// The output is still captured in the CommandResult
|
||||
print(`Command finished.`);
|
||||
@@ -12,7 +12,7 @@ print(`Captured Stdout:\\n${result.stdout}`);
|
||||
print(`Captured Stderr:\\n${result.stderr}`);
|
||||
|
||||
// Example of a silent command that fails (but won't halt because we only suppress output)
|
||||
// let fail_result = run("echo 'This is silent failure stderr.' >&2; exit 1").silent().do();
|
||||
// let fail_result = run("echo 'This is silent failure stderr.' >&2; exit 1").silent().execute();
|
||||
// print(`Failed command finished (silent):`);
|
||||
// print(`Success: ${fail_result.success}`);
|
||||
// print(`Exit Code: ${fail_result.code}`);
|
||||
|
43
examples/rfsclient/README.md
Normal file
43
examples/rfsclient/README.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# RFS Client Rhai Examples
|
||||
|
||||
This folder contains Rhai examples that use the SAL RFS client wrappers registered by `sal::rhai::register(&mut engine)` and executed by the `herodo` binary.
|
||||
|
||||
## Quick start
|
||||
|
||||
Run the auth + upload + download example (uses hardcoded credentials and `/etc/hosts` as input):
|
||||
|
||||
```bash
|
||||
cargo run -p herodo -- examples/rfsclient/auth_and_upload.rhai
|
||||
```
|
||||
|
||||
By default, the script:
|
||||
|
||||
- Uses base URL `http://127.0.0.1:8080`
|
||||
- Uses credentials `user` / `password`
|
||||
- Uploads the file `/etc/hosts`
|
||||
- Downloads to `/tmp/rfs_example_out.txt`
|
||||
|
||||
To customize, edit `examples/rfsclient/auth_and_upload.rhai` near the top and change `BASE_URL`, `USER`, `PASS`, and file paths.
|
||||
|
||||
## What the example does
|
||||
|
||||
- Creates the RFS client: `rfs_create_client(BASE_URL, USER, PASS, TIMEOUT)`
|
||||
- Health check: `rfs_health_check()`
|
||||
- Authenticates: `rfs_authenticate()`
|
||||
- Uploads a file: `rfs_upload_file(local_path, chunk_size, verify)` → returns file hash
|
||||
- Downloads it back: `rfs_download_file(file_id_or_hash, dest_path, verify)` → returns unit (throws on error)
|
||||
|
||||
See `examples/rfsclient/auth_and_upload.rhai` for details.
|
||||
|
||||
## Using the Rust client directly (optional)
|
||||
|
||||
If you want to use the Rust API (without Rhai), depend on `sal-rfs-client` and see:
|
||||
|
||||
- `packages/clients/rfsclient/src/client.rs` (`RfsClient`)
|
||||
- `packages/clients/rfsclient/src/types.rs` (config and option types)
|
||||
- `packages/clients/rfsclient/examples/` (example usage)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- Auth failures: verify credentials and that the server requires/authenticates them.
|
||||
- Connection errors: verify the base URL is reachable from your machine.
|
41
examples/rfsclient/auth_and_upload.rhai
Normal file
41
examples/rfsclient/auth_and_upload.rhai
Normal file
@@ -0,0 +1,41 @@
|
||||
// RFS Client: Auth + Upload + Download example
|
||||
// Prereqs:
|
||||
// - RFS server reachable at RFS_BASE_URL
|
||||
// - Valid credentials in env: RFS_USER, RFS_PASS
|
||||
// - Run with herodo so the SAL Rhai modules are registered
|
||||
|
||||
// NOTE: env_get not available in this runtime; hardcode or replace with your env loader
|
||||
let BASE_URL = "http://127.0.0.1:8080";
|
||||
let USER = "user";
|
||||
let PASS = "password";
|
||||
let TIMEOUT = 30; // seconds
|
||||
|
||||
if BASE_URL == "" { throw "Set BASE_URL in the script"; }
|
||||
|
||||
// Create client
|
||||
let ok = rfs_create_client(BASE_URL, USER, PASS, TIMEOUT);
|
||||
if !ok { throw "Failed to create RFS client"; }
|
||||
|
||||
// Optional health check
|
||||
let health = rfs_health_check();
|
||||
print(`RFS health: ${health}`);
|
||||
|
||||
// Authenticate (required for some operations)
|
||||
let auth_ok = rfs_authenticate();
|
||||
if !auth_ok { throw "Authentication failed"; }
|
||||
|
||||
// Upload a local file
|
||||
// Use an existing readable file to avoid needing os_write_file module
|
||||
let local_file = "/etc/hosts";
|
||||
// rfs_upload_file(file_path, chunk_size, verify)
|
||||
let hash = rfs_upload_file(local_file, 0, false);
|
||||
print(`Uploaded file hash: ${hash}`);
|
||||
|
||||
// Download it back
|
||||
let out_path = "/tmp/rfs_example_out.txt";
|
||||
// rfs_download_file(file_id, output_path, verify) returns unit and throws on error
|
||||
rfs_download_file(hash, out_path, false);
|
||||
|
||||
print(`Downloaded to: ${out_path}`);
|
||||
|
||||
true
|
15
examples_rust/ai/Cargo.toml
Normal file
15
examples_rust/ai/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "openrouter_example"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[workspace]
|
||||
|
||||
[[bin]]
|
||||
name = "openrouter_example"
|
||||
path = "openrouter_example.rs"
|
||||
|
||||
[dependencies]
|
||||
codemonkey = { path = "../../packages/ai/codemonkey" }
|
||||
openai-api-rs = "6.0.8"
|
||||
tokio = { version = "1.0", features = ["full"] }
|
47
examples_rust/ai/openrouter_example.rs
Normal file
47
examples_rust/ai/openrouter_example.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use codemonkey::{create_ai_provider, AIProviderType, CompletionRequestBuilder, Message, MessageRole, Content};
|
||||
use std::error::Error;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
|
||||
let (mut provider, provider_type) = create_ai_provider(AIProviderType::OpenRouter)?;
|
||||
|
||||
let messages = vec![Message {
|
||||
role: MessageRole::user,
|
||||
content: Content::Text("Explain the concept of a factory design pattern in Rust.".to_string()),
|
||||
name: None,
|
||||
tool_calls: None,
|
||||
tool_call_id: None,
|
||||
}];
|
||||
|
||||
println!("Sending request to OpenRouter...");
|
||||
let response = CompletionRequestBuilder::new(
|
||||
&mut *provider,
|
||||
"openai/gpt-oss-120b".to_string(), // Model name as specified by the user
|
||||
messages,
|
||||
provider_type, // Pass the provider_type
|
||||
)
|
||||
.temperature(1.0)
|
||||
.max_tokens(8192)
|
||||
.top_p(1.0)
|
||||
.reasoning_effort("medium")
|
||||
.stream(false)
|
||||
.openrouter_options(|builder| {
|
||||
builder.provider(
|
||||
codemonkey::OpenRouterProviderOptionsBuilder::new()
|
||||
.order(vec!["cerebras"])
|
||||
.build(),
|
||||
)
|
||||
})
|
||||
.completion()
|
||||
.await?;
|
||||
|
||||
for choice in response.choices {
|
||||
if let Some(content) = choice.message.content {
|
||||
print!("{}", content);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
Ok(())
|
||||
}
|
13
examples_rust/ai/run.sh
Executable file
13
examples_rust/ai/run.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Change to directory where this script is located
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
source ../../config/myenv.sh
|
||||
|
||||
# Build the example
|
||||
cargo build
|
||||
|
||||
# Run the example
|
||||
cargo run --bin openrouter_example
|
@@ -3,7 +3,7 @@
|
||||
//! This library loads the Rhai engine, registers all SAL modules,
|
||||
//! and executes Rhai scripts from a specified directory in sorted order.
|
||||
|
||||
use rhai::Engine;
|
||||
use rhai::{Engine, Scope};
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
@@ -30,6 +30,19 @@ pub fn run(script_path: &str) -> Result<(), Box<dyn Error>> {
|
||||
// Create a new Rhai engine
|
||||
let mut engine = Engine::new();
|
||||
|
||||
// TODO: if we create a scope here we could clean up all the different functionsand types regsitered wit the engine
|
||||
// We should generalize the way we add things to the scope for each module sepeartely
|
||||
let mut scope = Scope::new();
|
||||
// Conditionally add Hetzner client only when env config is present
|
||||
if let Ok(cfg) = sal::hetzner::config::Config::from_env() {
|
||||
let hetzner_client = sal::hetzner::api::Client::new(cfg);
|
||||
scope.push("hetzner", hetzner_client);
|
||||
}
|
||||
// This makes it easy to call e.g. `hetzner.get_server()` or `mycelium.get_connected_peers()`
|
||||
// --> without the need of manually created a client for each one first
|
||||
// --> could be conditionally compiled to only use those who we need (we only push the things to the scope that we actually need to run the script)
|
||||
|
||||
|
||||
// Register println function for output
|
||||
engine.register_fn("println", |s: &str| println!("{}", s));
|
||||
|
||||
@@ -78,19 +91,20 @@ pub fn run(script_path: &str) -> Result<(), Box<dyn Error>> {
|
||||
let script = fs::read_to_string(&script_file)?;
|
||||
|
||||
// Execute the script
|
||||
match engine.eval::<rhai::Dynamic>(&script) {
|
||||
Ok(result) => {
|
||||
println!("Script executed successfully");
|
||||
if !result.is_unit() {
|
||||
println!("Result: {}", result);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("Error executing script: {}", err);
|
||||
// Exit with error code when a script fails
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
// match engine.eval::<rhai::Dynamic>(&script) {
|
||||
// Ok(result) => {
|
||||
// println!("Script executed successfully");
|
||||
// if !result.is_unit() {
|
||||
// println!("Result: {}", result);
|
||||
// }
|
||||
// }
|
||||
// Err(err) => {
|
||||
// eprintln!("Error executing script: {}", err);
|
||||
// // Exit with error code when a script fails
|
||||
// process::exit(1);
|
||||
// }
|
||||
// }
|
||||
engine.run_with_scope(&mut scope, &script)?;
|
||||
}
|
||||
|
||||
println!("\nAll scripts executed successfully!");
|
||||
|
10
packages/ai/codemonkey/Cargo.toml
Normal file
10
packages/ai/codemonkey/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "codemonkey"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
async-trait = "0.1.80"
|
||||
openrouter-rs = "0.4.5"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
216
packages/ai/codemonkey/src/lib.rs
Normal file
216
packages/ai/codemonkey/src/lib.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use async_trait::async_trait;
|
||||
use openrouter_rs::{OpenRouterClient, api::chat::{ChatCompletionRequest, Message}, types::completion::CompletionsResponse};
|
||||
use std::env;
|
||||
use std::error::Error;
|
||||
|
||||
// Re-export MessageRole for easier use in client code
|
||||
pub use openrouter_rs::types::Role as MessageRole;
|
||||
|
||||
#[async_trait]
|
||||
pub trait AIProvider {
|
||||
async fn completion(
|
||||
&mut self,
|
||||
request: CompletionRequest,
|
||||
) -> Result<CompletionsResponse, Box<dyn Error>>;
|
||||
}
|
||||
|
||||
pub struct CompletionRequest {
|
||||
pub model: String,
|
||||
pub messages: Vec<Message>,
|
||||
pub temperature: Option<f64>,
|
||||
pub max_tokens: Option<i64>,
|
||||
pub top_p: Option<f64>,
|
||||
pub stream: Option<bool>,
|
||||
pub stop: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
pub struct CompletionRequestBuilder<'a> {
|
||||
provider: &'a mut dyn AIProvider,
|
||||
model: String,
|
||||
messages: Vec<Message>,
|
||||
temperature: Option<f64>,
|
||||
max_tokens: Option<i64>,
|
||||
top_p: Option<f64>,
|
||||
stream: Option<bool>,
|
||||
stop: Option<Vec<String>>,
|
||||
provider_type: AIProviderType,
|
||||
}
|
||||
|
||||
impl<'a> CompletionRequestBuilder<'a> {
|
||||
pub fn new(provider: &'a mut dyn AIProvider, model: String, messages: Vec<Message>, provider_type: AIProviderType) -> Self {
|
||||
Self {
|
||||
provider,
|
||||
model,
|
||||
messages,
|
||||
temperature: None,
|
||||
max_tokens: None,
|
||||
top_p: None,
|
||||
stream: None,
|
||||
stop: None,
|
||||
provider_type,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn temperature(mut self, temperature: f64) -> Self {
|
||||
self.temperature = Some(temperature);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn max_tokens(mut self, max_tokens: i64) -> Self {
|
||||
self.max_tokens = Some(max_tokens);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn top_p(mut self, top_p: f64) -> Self {
|
||||
self.top_p = Some(top_p);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn stream(mut self, stream: bool) -> Self {
|
||||
self.stream = Some(stream);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn stop(mut self, stop: Vec<String>) -> Self {
|
||||
self.stop = Some(stop);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn completion(self) -> Result<CompletionsResponse, Box<dyn Error>> {
|
||||
let request = CompletionRequest {
|
||||
model: self.model,
|
||||
messages: self.messages,
|
||||
temperature: self.temperature,
|
||||
max_tokens: self.max_tokens,
|
||||
top_p: self.top_p,
|
||||
stream: self.stream,
|
||||
stop: self.stop,
|
||||
};
|
||||
self.provider.completion(request).await
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GroqAIProvider {
|
||||
client: OpenRouterClient,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AIProvider for GroqAIProvider {
|
||||
async fn completion(
|
||||
&mut self,
|
||||
request: CompletionRequest,
|
||||
) -> Result<CompletionsResponse, Box<dyn Error>> {
|
||||
let chat_request = ChatCompletionRequest::builder()
|
||||
.model(request.model)
|
||||
.messages(request.messages)
|
||||
.temperature(request.temperature.unwrap_or(1.0))
|
||||
.max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048))
|
||||
.top_p(request.top_p.unwrap_or(1.0))
|
||||
.build()?;
|
||||
|
||||
let result = self.client.send_chat_completion(&chat_request).await?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OpenAIProvider {
|
||||
client: OpenRouterClient,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AIProvider for OpenAIProvider {
|
||||
async fn completion(
|
||||
&mut self,
|
||||
request: CompletionRequest,
|
||||
) -> Result<CompletionsResponse, Box<dyn Error>> {
|
||||
let chat_request = ChatCompletionRequest::builder()
|
||||
.model(request.model)
|
||||
.messages(request.messages)
|
||||
.temperature(request.temperature.unwrap_or(1.0))
|
||||
.max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048))
|
||||
.top_p(request.top_p.unwrap_or(1.0))
|
||||
.build()?;
|
||||
|
||||
let result = self.client.send_chat_completion(&chat_request).await?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OpenRouterAIProvider {
|
||||
client: OpenRouterClient,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AIProvider for OpenRouterAIProvider {
|
||||
async fn completion(
|
||||
&mut self,
|
||||
request: CompletionRequest,
|
||||
) -> Result<CompletionsResponse, Box<dyn Error>> {
|
||||
let chat_request = ChatCompletionRequest::builder()
|
||||
.model(request.model)
|
||||
.messages(request.messages)
|
||||
.temperature(request.temperature.unwrap_or(1.0))
|
||||
.max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048))
|
||||
.top_p(request.top_p.unwrap_or(1.0))
|
||||
.build()?;
|
||||
|
||||
let result = self.client.send_chat_completion(&chat_request).await?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CerebrasAIProvider {
|
||||
client: OpenRouterClient,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AIProvider for CerebrasAIProvider {
|
||||
async fn completion(
|
||||
&mut self,
|
||||
request: CompletionRequest,
|
||||
) -> Result<CompletionsResponse, Box<dyn Error>> {
|
||||
let chat_request = ChatCompletionRequest::builder()
|
||||
.model(request.model)
|
||||
.messages(request.messages)
|
||||
.temperature(request.temperature.unwrap_or(1.0))
|
||||
.max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048))
|
||||
.top_p(request.top_p.unwrap_or(1.0))
|
||||
.build()?;
|
||||
|
||||
let result = self.client.send_chat_completion(&chat_request).await?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
pub enum AIProviderType {
|
||||
Groq,
|
||||
OpenAI,
|
||||
OpenRouter,
|
||||
Cerebras,
|
||||
}
|
||||
|
||||
pub fn create_ai_provider(provider_type: AIProviderType) -> Result<(Box<dyn AIProvider>, AIProviderType), Box<dyn Error>> {
|
||||
match provider_type {
|
||||
AIProviderType::Groq => {
|
||||
let api_key = env::var("GROQ_API_KEY")?;
|
||||
let client = OpenRouterClient::builder().api_key(api_key).build()?;
|
||||
Ok((Box::new(GroqAIProvider { client }), AIProviderType::Groq))
|
||||
}
|
||||
AIProviderType::OpenAI => {
|
||||
let api_key = env::var("OPENAI_API_KEY")?;
|
||||
let client = OpenRouterClient::builder().api_key(api_key).build()?;
|
||||
Ok((Box::new(OpenAIProvider { client }), AIProviderType::OpenAI))
|
||||
}
|
||||
AIProviderType::OpenRouter => {
|
||||
let api_key = env::var("OPENROUTER_API_KEY")?;
|
||||
let client = OpenRouterClient::builder().api_key(api_key).build()?;
|
||||
Ok((Box::new(OpenRouterAIProvider { client }), AIProviderType::OpenRouter))
|
||||
}
|
||||
AIProviderType::Cerebras => {
|
||||
let api_key = env::var("CEREBRAS_API_KEY")?;
|
||||
let client = OpenRouterClient::builder().api_key(api_key).build()?;
|
||||
Ok((Box::new(CerebrasAIProvider { client }), AIProviderType::Cerebras))
|
||||
}
|
||||
}
|
||||
}
|
12
packages/clients/hetznerclient/Cargo.toml
Normal file
12
packages/clients/hetznerclient/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "sal-hetzner"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
prettytable = "0.10.0"
|
||||
reqwest.workspace = true
|
||||
rhai = { workspace = true, features = ["serde"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
54
packages/clients/hetznerclient/src/api/error.rs
Normal file
54
packages/clients/hetznerclient/src/api/error.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use std::fmt;
|
||||
|
||||
use serde::Deserialize;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AppError {
|
||||
#[error("Request failed: {0}")]
|
||||
RequestError(#[from] reqwest::Error),
|
||||
#[error("API error: {0}")]
|
||||
ApiError(ApiError),
|
||||
#[error("Deserialization Error: {0:?}")]
|
||||
SerdeJsonError(#[from] serde_json::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ApiError {
|
||||
pub status: u16,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl From<reqwest::blocking::Response> for ApiError {
|
||||
fn from(value: reqwest::blocking::Response) -> Self {
|
||||
ApiError {
|
||||
status: value.status().into(),
|
||||
message: value.text().unwrap_or("The API call returned an error.".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ApiError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
#[derive(Deserialize)]
|
||||
struct HetznerApiError {
|
||||
code: String,
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct HetznerApiErrorWrapper {
|
||||
error: HetznerApiError,
|
||||
}
|
||||
|
||||
if let Ok(wrapper) = serde_json::from_str::<HetznerApiErrorWrapper>(&self.message) {
|
||||
write!(
|
||||
f,
|
||||
"Status: {}, Code: {}, Message: {}",
|
||||
self.status, wrapper.error.code, wrapper.error.message
|
||||
)
|
||||
} else {
|
||||
write!(f, "Status: {}: {}", self.status, self.message)
|
||||
}
|
||||
}
|
||||
}
|
513
packages/clients/hetznerclient/src/api/mod.rs
Normal file
513
packages/clients/hetznerclient/src/api/mod.rs
Normal file
@@ -0,0 +1,513 @@
|
||||
pub mod error;
|
||||
pub mod models;
|
||||
|
||||
use self::models::{
|
||||
Boot, Rescue, Server, SshKey, ServerAddonProduct, ServerAddonProductWrapper,
|
||||
AuctionServerProduct, AuctionServerProductWrapper, AuctionTransaction,
|
||||
AuctionTransactionWrapper, BootWrapper, Cancellation, CancellationWrapper,
|
||||
OrderServerBuilder, OrderServerProduct, OrderServerProductWrapper, RescueWrapped,
|
||||
ServerWrapper, SshKeyWrapper, Transaction, TransactionWrapper,
|
||||
ServerAddonTransaction, ServerAddonTransactionWrapper,
|
||||
OrderServerAddonBuilder,
|
||||
};
|
||||
use crate::api::error::ApiError;
|
||||
use crate::config::Config;
|
||||
use error::AppError;
|
||||
use reqwest::blocking::Client as HttpClient;
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Client {
|
||||
http_client: HttpClient,
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(config: Config) -> Self {
|
||||
Self {
|
||||
http_client: HttpClient::new(),
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_response<T>(&self, response: reqwest::blocking::Response) -> Result<T, AppError>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
let status = response.status();
|
||||
let body = response.text()?;
|
||||
|
||||
if status.is_success() {
|
||||
serde_json::from_str::<T>(&body).map_err(Into::into)
|
||||
} else {
|
||||
Err(AppError::ApiError(ApiError {
|
||||
status: status.as_u16(),
|
||||
message: body,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_server(&self, server_number: i32) -> Result<Server, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/server/{}", self.config.api_url, server_number))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: ServerWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.server)
|
||||
}
|
||||
|
||||
pub fn get_servers(&self) -> Result<Vec<Server>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/server", self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<ServerWrapper> = self.handle_response(response)?;
|
||||
let servers = wrapped.into_iter().map(|sw| sw.server).collect();
|
||||
Ok(servers)
|
||||
}
|
||||
|
||||
pub fn update_server_name(&self, server_number: i32, name: &str) -> Result<Server, AppError> {
|
||||
let params = [("server_name", name)];
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!("{}/server/{}", self.config.api_url, server_number))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.form(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: ServerWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.server)
|
||||
}
|
||||
|
||||
pub fn get_cancellation_data(&self, server_number: i32) -> Result<Cancellation, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!(
|
||||
"{}/server/{}/cancellation",
|
||||
self.config.api_url, server_number
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: CancellationWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.cancellation)
|
||||
}
|
||||
|
||||
pub fn cancel_server(
|
||||
&self,
|
||||
server_number: i32,
|
||||
cancellation_date: &str,
|
||||
) -> Result<Cancellation, AppError> {
|
||||
let params = [("cancellation_date", cancellation_date)];
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!(
|
||||
"{}/server/{}/cancellation",
|
||||
self.config.api_url, server_number
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.form(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: CancellationWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.cancellation)
|
||||
}
|
||||
|
||||
pub fn withdraw_cancellation(&self, server_number: i32) -> Result<(), AppError> {
|
||||
self.http_client
|
||||
.delete(format!(
|
||||
"{}/server/{}/cancellation",
|
||||
self.config.api_url, server_number
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_ssh_keys(&self) -> Result<Vec<SshKey>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/key", self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<SshKeyWrapper> = self.handle_response(response)?;
|
||||
let keys = wrapped.into_iter().map(|sk| sk.key).collect();
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
pub fn get_ssh_key(&self, fingerprint: &str) -> Result<SshKey, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/key/{}", self.config.api_url, fingerprint))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: SshKeyWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.key)
|
||||
}
|
||||
|
||||
pub fn add_ssh_key(&self, name: &str, data: &str) -> Result<SshKey, AppError> {
|
||||
let params = [("name", name), ("data", data)];
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!("{}/key", self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.form(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: SshKeyWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.key)
|
||||
}
|
||||
|
||||
pub fn update_ssh_key_name(&self, fingerprint: &str, name: &str) -> Result<SshKey, AppError> {
|
||||
let params = [("name", name)];
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!("{}/key/{}", self.config.api_url, fingerprint))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.form(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: SshKeyWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.key)
|
||||
}
|
||||
|
||||
pub fn delete_ssh_key(&self, fingerprint: &str) -> Result<(), AppError> {
|
||||
self.http_client
|
||||
.delete(format!("{}/key/{}", self.config.api_url, fingerprint))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
pub fn get_boot_configuration(&self, server_number: i32) -> Result<Boot, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/boot/{}", self.config.api_url, server_number))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: BootWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.boot)
|
||||
}
|
||||
|
||||
pub fn get_rescue_boot_configuration(&self, server_number: i32) -> Result<Rescue, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!(
|
||||
"{}/boot/{}/rescue",
|
||||
self.config.api_url, server_number
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: RescueWrapped = self.handle_response(response)?;
|
||||
Ok(wrapped.rescue)
|
||||
}
|
||||
|
||||
pub fn enable_rescue_mode(
|
||||
&self,
|
||||
server_number: i32,
|
||||
os: &str,
|
||||
authorized_keys: Option<&[String]>,
|
||||
) -> Result<Rescue, AppError> {
|
||||
let mut params = vec![("os", os)];
|
||||
if let Some(keys) = authorized_keys {
|
||||
for key in keys {
|
||||
params.push(("authorized_key[]", key));
|
||||
}
|
||||
}
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!(
|
||||
"{}/boot/{}/rescue",
|
||||
self.config.api_url, server_number
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.form(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: RescueWrapped = self.handle_response(response)?;
|
||||
Ok(wrapped.rescue)
|
||||
}
|
||||
|
||||
pub fn disable_rescue_mode(&self, server_number: i32) -> Result<Rescue, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.delete(format!(
|
||||
"{}/boot/{}/rescue",
|
||||
self.config.api_url, server_number
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: RescueWrapped = self.handle_response(response)?;
|
||||
Ok(wrapped.rescue)
|
||||
}
|
||||
|
||||
pub fn get_server_products(
|
||||
&self,
|
||||
) -> Result<Vec<OrderServerProduct>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/order/server/product", &self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<OrderServerProductWrapper> = self.handle_response(response)?;
|
||||
let products = wrapped.into_iter().map(|sop| sop.product).collect();
|
||||
Ok(products)
|
||||
}
|
||||
|
||||
pub fn get_server_product_by_id(
|
||||
&self,
|
||||
product_id: &str,
|
||||
) -> Result<OrderServerProduct, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!(
|
||||
"{}/order/server/product/{}",
|
||||
&self.config.api_url, product_id
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: OrderServerProductWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.product)
|
||||
}
|
||||
pub fn order_server(&self, order: OrderServerBuilder) -> Result<Transaction, AppError> {
|
||||
let mut params = json!({
|
||||
"product_id": order.product_id,
|
||||
"dist": order.dist,
|
||||
"location": order.location,
|
||||
"authorized_key": order.authorized_keys.unwrap_or_default(),
|
||||
});
|
||||
|
||||
if let Some(addons) = order.addons {
|
||||
params["addon"] = json!(addons);
|
||||
}
|
||||
|
||||
if let Some(test) = order.test {
|
||||
if test {
|
||||
params["test"] = json!(test);
|
||||
}
|
||||
}
|
||||
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!("{}/order/server/transaction", &self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.json(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: TransactionWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.transaction)
|
||||
}
|
||||
|
||||
pub fn get_transaction_by_id(&self, transaction_id: &str) -> Result<Transaction, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!(
|
||||
"{}/order/server/transaction/{}",
|
||||
&self.config.api_url, transaction_id
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: TransactionWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.transaction)
|
||||
}
|
||||
pub fn get_transactions(&self) -> Result<Vec<Transaction>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/order/server/transaction", &self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<TransactionWrapper> = self.handle_response(response)?;
|
||||
let transactions = wrapped.into_iter().map(|t| t.transaction).collect();
|
||||
Ok(transactions)
|
||||
}
|
||||
pub fn get_auction_server_products(&self) -> Result<Vec<AuctionServerProduct>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!(
|
||||
"{}/order/server_market/product",
|
||||
&self.config.api_url
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<AuctionServerProductWrapper> = self.handle_response(response)?;
|
||||
let products = wrapped.into_iter().map(|asp| asp.product).collect();
|
||||
Ok(products)
|
||||
}
|
||||
pub fn get_auction_server_product_by_id(&self, product_id: &str) -> Result<AuctionServerProduct, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/order/server_market/product/{}", &self.config.api_url, product_id))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: AuctionServerProductWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.product)
|
||||
}
|
||||
pub fn get_auction_transactions(&self) -> Result<Vec<AuctionTransaction>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/order/server_market/transaction", &self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<AuctionTransactionWrapper> = self.handle_response(response)?;
|
||||
let transactions = wrapped.into_iter().map(|t| t.transaction).collect();
|
||||
Ok(transactions)
|
||||
}
|
||||
|
||||
pub fn get_auction_transaction_by_id(&self, transaction_id: &str) -> Result<AuctionTransaction, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/order/server_market/transaction/{}", &self.config.api_url, transaction_id))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: AuctionTransactionWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.transaction)
|
||||
}
|
||||
|
||||
pub fn get_server_addon_products(
|
||||
&self,
|
||||
server_number: i64,
|
||||
) -> Result<Vec<ServerAddonProduct>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!(
|
||||
"{}/order/server_addon/{}/product",
|
||||
&self.config.api_url, server_number
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<ServerAddonProductWrapper> = self.handle_response(response)?;
|
||||
let products = wrapped.into_iter().map(|sap| sap.product).collect();
|
||||
Ok(products)
|
||||
}
|
||||
|
||||
pub fn order_auction_server(
|
||||
&self,
|
||||
product_id: i64,
|
||||
authorized_keys: Vec<String>,
|
||||
dist: Option<String>,
|
||||
arch: Option<String>,
|
||||
lang: Option<String>,
|
||||
comment: Option<String>,
|
||||
addons: Option<Vec<String>>,
|
||||
test: Option<bool>,
|
||||
) -> Result<AuctionTransaction, AppError> {
|
||||
let mut params: Vec<(&str, String)> = Vec::new();
|
||||
|
||||
params.push(("product_id", product_id.to_string()));
|
||||
|
||||
for key in &authorized_keys {
|
||||
params.push(("authorized_key[]", key.clone()));
|
||||
}
|
||||
|
||||
if let Some(dist) = dist {
|
||||
params.push(("dist", dist));
|
||||
}
|
||||
if let Some(arch) = arch {
|
||||
params.push(("@deprecated arch", arch));
|
||||
}
|
||||
if let Some(lang) = lang {
|
||||
params.push(("lang", lang));
|
||||
}
|
||||
if let Some(comment) = comment {
|
||||
params.push(("comment", comment));
|
||||
}
|
||||
if let Some(addons) = addons {
|
||||
for addon in addons {
|
||||
params.push(("addon[]", addon));
|
||||
}
|
||||
}
|
||||
if let Some(test) = test {
|
||||
params.push(("test", test.to_string()));
|
||||
}
|
||||
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!("{}/order/server_market/transaction", &self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.form(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: AuctionTransactionWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.transaction)
|
||||
}
|
||||
|
||||
pub fn get_server_addon_transactions(&self) -> Result<Vec<ServerAddonTransaction>, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!("{}/order/server_addon/transaction", &self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: Vec<ServerAddonTransactionWrapper> = self.handle_response(response)?;
|
||||
let transactions = wrapped.into_iter().map(|satw| satw.transaction).collect();
|
||||
Ok(transactions)
|
||||
}
|
||||
|
||||
pub fn get_server_addon_transaction_by_id(
|
||||
&self,
|
||||
transaction_id: &str,
|
||||
) -> Result<ServerAddonTransaction, AppError> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(format!(
|
||||
"{}/order/server_addon/transaction/{}",
|
||||
&self.config.api_url, transaction_id
|
||||
))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.send()?;
|
||||
|
||||
let wrapped: ServerAddonTransactionWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.transaction)
|
||||
}
|
||||
|
||||
pub fn order_server_addon(
|
||||
&self,
|
||||
order: OrderServerAddonBuilder,
|
||||
) -> Result<ServerAddonTransaction, AppError> {
|
||||
let mut params = json!({
|
||||
"server_number": order.server_number,
|
||||
"product_id": order.product_id,
|
||||
});
|
||||
|
||||
if let Some(reason) = order.reason {
|
||||
params["reason"] = json!(reason);
|
||||
}
|
||||
if let Some(gateway) = order.gateway {
|
||||
params["gateway"] = json!(gateway);
|
||||
}
|
||||
if let Some(test) = order.test {
|
||||
if test {
|
||||
params["test"] = json!(test);
|
||||
}
|
||||
}
|
||||
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!("{}/order/server_addon/transaction", &self.config.api_url))
|
||||
.basic_auth(&self.config.username, Some(&self.config.password))
|
||||
.form(¶ms)
|
||||
.send()?;
|
||||
|
||||
let wrapped: ServerAddonTransactionWrapper = self.handle_response(response)?;
|
||||
Ok(wrapped.transaction)
|
||||
}
|
||||
}
|
1894
packages/clients/hetznerclient/src/api/models.rs
Normal file
1894
packages/clients/hetznerclient/src/api/models.rs
Normal file
File diff suppressed because it is too large
Load Diff
25
packages/clients/hetznerclient/src/config.rs
Normal file
25
packages/clients/hetznerclient/src/config.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use std::env;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Config {
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
pub api_url: String,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn from_env() -> Result<Self, String> {
|
||||
let username = env::var("HETZNER_USERNAME")
|
||||
.map_err(|_| "HETZNER_USERNAME environment variable not set".to_string())?;
|
||||
let password = env::var("HETZNER_PASSWORD")
|
||||
.map_err(|_| "HETZNER_PASSWORD environment variable not set".to_string())?;
|
||||
let api_url = env::var("HETZNER_API_URL")
|
||||
.unwrap_or_else(|_| "https://robot-ws.your-server.de".to_string());
|
||||
|
||||
Ok(Config {
|
||||
username,
|
||||
password,
|
||||
api_url,
|
||||
})
|
||||
}
|
||||
}
|
3
packages/clients/hetznerclient/src/lib.rs
Normal file
3
packages/clients/hetznerclient/src/lib.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod api;
|
||||
pub mod config;
|
||||
pub mod rhai;
|
63
packages/clients/hetznerclient/src/rhai/boot.rs
Normal file
63
packages/clients/hetznerclient/src/rhai/boot.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use crate::api::{
|
||||
models::{Boot, Rescue},
|
||||
Client,
|
||||
};
|
||||
use rhai::{plugin::*, Engine};
|
||||
|
||||
pub fn register(engine: &mut Engine) {
|
||||
let boot_module = exported_module!(boot_api);
|
||||
engine.register_global_module(boot_module.into());
|
||||
}
|
||||
|
||||
#[export_module]
|
||||
pub mod boot_api {
|
||||
use super::*;
|
||||
use rhai::EvalAltResult;
|
||||
|
||||
#[rhai_fn(name = "get_boot_configuration", return_raw)]
|
||||
pub fn get_boot_configuration(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
) -> Result<Boot, Box<EvalAltResult>> {
|
||||
client
|
||||
.get_boot_configuration(server_number as i32)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_rescue_boot_configuration", return_raw)]
|
||||
pub fn get_rescue_boot_configuration(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
) -> Result<Rescue, Box<EvalAltResult>> {
|
||||
client
|
||||
.get_rescue_boot_configuration(server_number as i32)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "enable_rescue_mode", return_raw)]
|
||||
pub fn enable_rescue_mode(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
os: &str,
|
||||
authorized_keys: rhai::Array,
|
||||
) -> Result<Rescue, Box<EvalAltResult>> {
|
||||
let keys: Vec<String> = authorized_keys
|
||||
.into_iter()
|
||||
.map(|k| k.into_string().unwrap())
|
||||
.collect();
|
||||
|
||||
client
|
||||
.enable_rescue_mode(server_number as i32, os, Some(&keys))
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "disable_rescue_mode", return_raw)]
|
||||
pub fn disable_rescue_mode(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
) -> Result<Rescue, Box<EvalAltResult>> {
|
||||
client
|
||||
.disable_rescue_mode(server_number as i32)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
}
|
54
packages/clients/hetznerclient/src/rhai/mod.rs
Normal file
54
packages/clients/hetznerclient/src/rhai/mod.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use rhai::{Engine, EvalAltResult};
|
||||
|
||||
use crate::api::models::{
|
||||
AuctionServerProduct, AuctionTransaction, AuctionTransactionProduct, AuthorizedKey, Boot,
|
||||
Cancellation, Cpanel, HostKey, Linux, OrderAuctionServerBuilder, OrderServerAddonBuilder,
|
||||
OrderServerBuilder, OrderServerProduct, Plesk, Rescue, Server, ServerAddonProduct,
|
||||
ServerAddonResource, ServerAddonTransaction, SshKey, Transaction, TransactionProduct, Vnc,
|
||||
Windows,
|
||||
};
|
||||
|
||||
pub mod boot;
|
||||
pub mod printing;
|
||||
pub mod server;
|
||||
pub mod server_ordering;
|
||||
pub mod ssh_keys;
|
||||
|
||||
// here just register the hetzner module
|
||||
pub fn register_hetzner_module(engine: &mut Engine) -> Result<(), Box<EvalAltResult>> {
|
||||
// TODO:register types
|
||||
engine.build_type::<Server>();
|
||||
engine.build_type::<SshKey>();
|
||||
engine.build_type::<Boot>();
|
||||
engine.build_type::<Rescue>();
|
||||
engine.build_type::<Linux>();
|
||||
engine.build_type::<Vnc>();
|
||||
engine.build_type::<Windows>();
|
||||
engine.build_type::<Plesk>();
|
||||
engine.build_type::<Cpanel>();
|
||||
engine.build_type::<Cancellation>();
|
||||
engine.build_type::<OrderServerProduct>();
|
||||
engine.build_type::<Transaction>();
|
||||
engine.build_type::<AuthorizedKey>();
|
||||
engine.build_type::<TransactionProduct>();
|
||||
engine.build_type::<HostKey>();
|
||||
engine.build_type::<AuctionServerProduct>();
|
||||
engine.build_type::<AuctionTransaction>();
|
||||
engine.build_type::<AuctionTransactionProduct>();
|
||||
engine.build_type::<OrderAuctionServerBuilder>();
|
||||
engine.build_type::<OrderServerBuilder>();
|
||||
engine.build_type::<ServerAddonProduct>();
|
||||
engine.build_type::<ServerAddonTransaction>();
|
||||
engine.build_type::<ServerAddonResource>();
|
||||
engine.build_type::<OrderServerAddonBuilder>();
|
||||
|
||||
server::register(engine);
|
||||
ssh_keys::register(engine);
|
||||
boot::register(engine);
|
||||
server_ordering::register(engine);
|
||||
|
||||
// TODO: push hetzner to scope as value client:
|
||||
// scope.push("hetzner", client);
|
||||
|
||||
Ok(())
|
||||
}
|
43
packages/clients/hetznerclient/src/rhai/printing/mod.rs
Normal file
43
packages/clients/hetznerclient/src/rhai/printing/mod.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use rhai::{Array, Engine};
|
||||
use crate::{api::models::{OrderServerProduct, AuctionServerProduct, AuctionTransaction, ServerAddonProduct, ServerAddonTransaction, Server, SshKey}};
|
||||
|
||||
mod servers_table;
|
||||
mod ssh_keys_table;
|
||||
mod server_ordering_table;
|
||||
|
||||
// This will be called when we print(...) or pretty_print() an Array (with Dynamic values)
|
||||
pub fn pretty_print_dispatch(array: Array) {
|
||||
if array.is_empty() {
|
||||
println!("<empty table>");
|
||||
return;
|
||||
}
|
||||
|
||||
let first = &array[0];
|
||||
|
||||
if first.is::<Server>() {
|
||||
println!("Yeah first is server!");
|
||||
servers_table::pretty_print_servers(array);
|
||||
} else if first.is::<SshKey>() {
|
||||
ssh_keys_table::pretty_print_ssh_keys(array);
|
||||
}
|
||||
else if first.is::<OrderServerProduct>() {
|
||||
server_ordering_table::pretty_print_server_products(array);
|
||||
} else if first.is::<AuctionServerProduct>() {
|
||||
server_ordering_table::pretty_print_auction_server_products(array);
|
||||
} else if first.is::<AuctionTransaction>() {
|
||||
server_ordering_table::pretty_print_auction_transactions(array);
|
||||
} else if first.is::<ServerAddonProduct>() {
|
||||
server_ordering_table::pretty_print_server_addon_products(array);
|
||||
} else if first.is::<ServerAddonTransaction>() {
|
||||
server_ordering_table::pretty_print_server_addon_transactions(array);
|
||||
} else {
|
||||
// Generic fallback for other types
|
||||
for item in array {
|
||||
println!("{}", item.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register(engine: &mut Engine) {
|
||||
engine.register_fn("pretty_print", pretty_print_dispatch);
|
||||
}
|
@@ -0,0 +1,293 @@
|
||||
use prettytable::{row, Table};
|
||||
use crate::api::models::{OrderServerProduct, ServerAddonProduct, ServerAddonTransaction, ServerAddonResource};
|
||||
|
||||
pub fn pretty_print_server_products(products: rhai::Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"ID",
|
||||
"Name",
|
||||
"Description",
|
||||
"Traffic",
|
||||
"Location",
|
||||
"Price (Net)",
|
||||
"Price (Gross)",
|
||||
]);
|
||||
|
||||
for product_dyn in products {
|
||||
if let Some(product) = product_dyn.try_cast::<OrderServerProduct>() {
|
||||
let mut price_net = "N/A".to_string();
|
||||
let mut price_gross = "N/A".to_string();
|
||||
|
||||
if let Some(first_price) = product.prices.first() {
|
||||
price_net = first_price.price.net.clone();
|
||||
price_gross = first_price.price.gross.clone();
|
||||
}
|
||||
|
||||
table.add_row(row![
|
||||
product.id,
|
||||
product.name,
|
||||
product.description.join(", "),
|
||||
product.traffic,
|
||||
product.location.join(", "),
|
||||
price_net,
|
||||
price_gross,
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
||||
|
||||
pub fn pretty_print_auction_server_products(products: rhai::Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"ID",
|
||||
"Name",
|
||||
"Description",
|
||||
"Traffic",
|
||||
"Distributions",
|
||||
"Architectures",
|
||||
"Languages",
|
||||
"CPU",
|
||||
"CPU Benchmark",
|
||||
"Memory Size (GB)",
|
||||
"HDD Size (GB)",
|
||||
"HDD Text",
|
||||
"HDD Count",
|
||||
"Datacenter",
|
||||
"Network Speed",
|
||||
"Price (Net)",
|
||||
"Price (Hourly Net)",
|
||||
"Price (Setup Net)",
|
||||
"Price (VAT)",
|
||||
"Price (Hourly VAT)",
|
||||
"Price (Setup VAT)",
|
||||
"Fixed Price",
|
||||
"Next Reduce (seconds)",
|
||||
"Next Reduce Date",
|
||||
"Orderable Addons",
|
||||
]);
|
||||
|
||||
for product_dyn in products {
|
||||
if let Some(product) = product_dyn.try_cast::<crate::api::models::AuctionServerProduct>() {
|
||||
let mut addons_table = Table::new();
|
||||
addons_table.add_row(row![b => "ID", "Name", "Min", "Max", "Prices"]);
|
||||
for addon in &product.orderable_addons {
|
||||
let mut addon_prices_table = Table::new();
|
||||
addon_prices_table.add_row(row![b => "Location", "Net", "Gross", "Hourly Net", "Hourly Gross", "Setup Net", "Setup Gross"]);
|
||||
for price in &addon.prices {
|
||||
addon_prices_table.add_row(row![
|
||||
price.location,
|
||||
price.price.net,
|
||||
price.price.gross,
|
||||
price.price.hourly_net,
|
||||
price.price.hourly_gross,
|
||||
price.price_setup.net,
|
||||
price.price_setup.gross
|
||||
]);
|
||||
}
|
||||
addons_table.add_row(row![
|
||||
addon.id,
|
||||
addon.name,
|
||||
addon.min,
|
||||
addon.max,
|
||||
addon_prices_table
|
||||
]);
|
||||
}
|
||||
|
||||
table.add_row(row![
|
||||
product.id,
|
||||
product.name,
|
||||
product.description.join(", "),
|
||||
product.traffic,
|
||||
product.dist.join(", "),
|
||||
product.dist.join(", "),
|
||||
product.lang.join(", "),
|
||||
product.cpu,
|
||||
product.cpu_benchmark,
|
||||
product.memory_size,
|
||||
product.hdd_size,
|
||||
product.hdd_text,
|
||||
product.hdd_count,
|
||||
product.datacenter,
|
||||
product.network_speed,
|
||||
product.price,
|
||||
product.price_hourly.as_deref().unwrap_or("N/A"),
|
||||
product.price_setup,
|
||||
product.price_with_vat,
|
||||
product.price_hourly_with_vat.as_deref().unwrap_or("N/A"),
|
||||
product.price_setup_with_vat,
|
||||
product.fixed_price,
|
||||
product.next_reduce,
|
||||
product.next_reduce_date,
|
||||
addons_table,
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
||||
|
||||
pub fn pretty_print_server_addon_products(products: rhai::Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"ID",
|
||||
"Name",
|
||||
"Type",
|
||||
"Location",
|
||||
"Price (Net)",
|
||||
"Price (Gross)",
|
||||
"Hourly Net",
|
||||
"Hourly Gross",
|
||||
"Setup Net",
|
||||
"Setup Gross",
|
||||
]);
|
||||
|
||||
for product_dyn in products {
|
||||
if let Some(product) = product_dyn.try_cast::<ServerAddonProduct>() {
|
||||
table.add_row(row![
|
||||
product.id,
|
||||
product.name,
|
||||
product.product_type,
|
||||
product.price.location,
|
||||
product.price.price.net,
|
||||
product.price.price.gross,
|
||||
product.price.price.hourly_net,
|
||||
product.price.price.hourly_gross,
|
||||
product.price.price_setup.net,
|
||||
product.price.price_setup.gross,
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
||||
|
||||
pub fn pretty_print_auction_transactions(transactions: rhai::Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"ID",
|
||||
"Date",
|
||||
"Status",
|
||||
"Server Number",
|
||||
"Server IP",
|
||||
"Comment",
|
||||
"Product ID",
|
||||
"Product Name",
|
||||
"Product Traffic",
|
||||
"Product Distributions",
|
||||
"Product Architectures",
|
||||
"Product Languages",
|
||||
"Product CPU",
|
||||
"Product CPU Benchmark",
|
||||
"Product Memory Size (GB)",
|
||||
"Product HDD Size (GB)",
|
||||
"Product HDD Text",
|
||||
"Product HDD Count",
|
||||
"Product Datacenter",
|
||||
"Product Network Speed",
|
||||
"Product Fixed Price",
|
||||
"Product Next Reduce (seconds)",
|
||||
"Product Next Reduce Date",
|
||||
"Addons",
|
||||
]);
|
||||
|
||||
for transaction_dyn in transactions {
|
||||
if let Some(transaction) = transaction_dyn.try_cast::<crate::api::models::AuctionTransaction>() {
|
||||
let _authorized_keys_table = {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b => "Name", "Fingerprint", "Type", "Size"]);
|
||||
for key in &transaction.authorized_key {
|
||||
table.add_row(row![
|
||||
key.key.name.as_deref().unwrap_or("N/A"),
|
||||
key.key.fingerprint.as_deref().unwrap_or("N/A"),
|
||||
key.key.key_type.as_deref().unwrap_or("N/A"),
|
||||
key.key.size.map_or("N/A".to_string(), |s| s.to_string())
|
||||
]);
|
||||
}
|
||||
table
|
||||
};
|
||||
|
||||
let _host_keys_table = {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b => "Fingerprint", "Type", "Size"]);
|
||||
for key in &transaction.host_key {
|
||||
table.add_row(row![
|
||||
key.key.fingerprint.as_deref().unwrap_or("N/A"),
|
||||
key.key.key_type.as_deref().unwrap_or("N/A"),
|
||||
key.key.size.map_or("N/A".to_string(), |s| s.to_string())
|
||||
]);
|
||||
}
|
||||
table
|
||||
};
|
||||
|
||||
table.add_row(row![
|
||||
transaction.id,
|
||||
transaction.date,
|
||||
transaction.status,
|
||||
transaction.server_number.map_or("N/A".to_string(), |id| id.to_string()),
|
||||
transaction.server_ip.as_deref().unwrap_or("N/A"),
|
||||
transaction.comment.as_deref().unwrap_or("N/A"),
|
||||
transaction.product.id,
|
||||
transaction.product.name,
|
||||
transaction.product.traffic,
|
||||
transaction.product.dist,
|
||||
transaction.product.arch.as_deref().unwrap_or("N/A"),
|
||||
transaction.product.lang,
|
||||
transaction.product.cpu,
|
||||
transaction.product.cpu_benchmark,
|
||||
transaction.product.memory_size,
|
||||
transaction.product.hdd_size,
|
||||
transaction.product.hdd_text,
|
||||
transaction.product.hdd_count,
|
||||
transaction.product.datacenter,
|
||||
transaction.product.network_speed,
|
||||
transaction.product.fixed_price.unwrap_or_default().to_string(),
|
||||
transaction
|
||||
.product
|
||||
.next_reduce
|
||||
.map_or("N/A".to_string(), |r| r.to_string()),
|
||||
transaction
|
||||
.product
|
||||
.next_reduce_date
|
||||
.as_deref()
|
||||
.unwrap_or("N/A"),
|
||||
transaction.addons.join(", "),
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
||||
|
||||
pub fn pretty_print_server_addon_transactions(transactions: rhai::Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"ID",
|
||||
"Date",
|
||||
"Status",
|
||||
"Server Number",
|
||||
"Product ID",
|
||||
"Product Name",
|
||||
"Product Price",
|
||||
"Resources",
|
||||
]);
|
||||
|
||||
for transaction_dyn in transactions {
|
||||
if let Some(transaction) = transaction_dyn.try_cast::<ServerAddonTransaction>() {
|
||||
let mut resources_table = Table::new();
|
||||
resources_table.add_row(row![b => "Type", "ID"]);
|
||||
for resource in &transaction.resources {
|
||||
resources_table.add_row(row![resource.resource_type, resource.id]);
|
||||
}
|
||||
|
||||
table.add_row(row![
|
||||
transaction.id,
|
||||
transaction.date,
|
||||
transaction.status,
|
||||
transaction.server_number,
|
||||
transaction.product.id,
|
||||
transaction.product.name,
|
||||
transaction.product.price.to_string(),
|
||||
resources_table,
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
use prettytable::{row, Table};
|
||||
use rhai::Array;
|
||||
|
||||
use super::Server;
|
||||
|
||||
pub fn pretty_print_servers(servers: Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"Number",
|
||||
"Name",
|
||||
"IP",
|
||||
"Product",
|
||||
"DC",
|
||||
"Status"
|
||||
]);
|
||||
|
||||
for server_dyn in servers {
|
||||
if let Some(server) = server_dyn.try_cast::<Server>() {
|
||||
table.add_row(row![
|
||||
server.server_number.to_string(),
|
||||
server.server_name,
|
||||
server.server_ip.unwrap_or("N/A".to_string()),
|
||||
server.product,
|
||||
server.dc,
|
||||
server.status
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
@@ -0,0 +1,26 @@
|
||||
use prettytable::{row, Table};
|
||||
use super::SshKey;
|
||||
|
||||
pub fn pretty_print_ssh_keys(keys: rhai::Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"Name",
|
||||
"Fingerprint",
|
||||
"Type",
|
||||
"Size",
|
||||
"Created At"
|
||||
]);
|
||||
|
||||
for key_dyn in keys {
|
||||
if let Some(key) = key_dyn.try_cast::<SshKey>() {
|
||||
table.add_row(row![
|
||||
key.name,
|
||||
key.fingerprint,
|
||||
key.key_type,
|
||||
key.size.to_string(),
|
||||
key.created_at
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
76
packages/clients/hetznerclient/src/rhai/server.rs
Normal file
76
packages/clients/hetznerclient/src/rhai/server.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use crate::api::{Client, models::Server};
|
||||
use rhai::{Array, Dynamic, plugin::*};
|
||||
|
||||
pub fn register(engine: &mut Engine) {
|
||||
let server_module = exported_module!(server_api);
|
||||
engine.register_global_module(server_module.into());
|
||||
}
|
||||
|
||||
#[export_module]
|
||||
pub mod server_api {
|
||||
use crate::api::models::Cancellation;
|
||||
|
||||
use super::*;
|
||||
use rhai::EvalAltResult;
|
||||
|
||||
#[rhai_fn(name = "get_server", return_raw)]
|
||||
pub fn get_server(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
) -> Result<Server, Box<EvalAltResult>> {
|
||||
client
|
||||
.get_server(server_number as i32)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_servers", return_raw)]
|
||||
pub fn get_servers(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||
let servers = client
|
||||
.get_servers()
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
println!("number of SERVERS we got: {:#?}", servers.len());
|
||||
Ok(servers.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "update_server_name", return_raw)]
|
||||
pub fn update_server_name(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
name: &str,
|
||||
) -> Result<Server, Box<EvalAltResult>> {
|
||||
client
|
||||
.update_server_name(server_number as i32, name)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_cancellation_data", return_raw)]
|
||||
pub fn get_cancellation_data(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
) -> Result<Cancellation, Box<EvalAltResult>> {
|
||||
client
|
||||
.get_cancellation_data(server_number as i32)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "cancel_server", return_raw)]
|
||||
pub fn cancel_server(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
cancellation_date: &str,
|
||||
) -> Result<Cancellation, Box<EvalAltResult>> {
|
||||
client
|
||||
.cancel_server(server_number as i32, cancellation_date)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "withdraw_cancellation", return_raw)]
|
||||
pub fn withdraw_cancellation(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
) -> Result<(), Box<EvalAltResult>> {
|
||||
client
|
||||
.withdraw_cancellation(server_number as i32)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
}
|
170
packages/clients/hetznerclient/src/rhai/server_ordering.rs
Normal file
170
packages/clients/hetznerclient/src/rhai/server_ordering.rs
Normal file
@@ -0,0 +1,170 @@
|
||||
use crate::api::{
|
||||
Client,
|
||||
models::{
|
||||
AuctionServerProduct, AuctionTransaction, OrderAuctionServerBuilder, OrderServerBuilder,
|
||||
OrderServerProduct, ServerAddonProduct, ServerAddonTransaction, Transaction,
|
||||
},
|
||||
};
|
||||
use rhai::{Array, Dynamic, plugin::*};
|
||||
|
||||
pub fn register(engine: &mut Engine) {
|
||||
let server_order_module = exported_module!(server_order_api);
|
||||
engine.register_global_module(server_order_module.into());
|
||||
}
|
||||
|
||||
#[export_module]
|
||||
pub mod server_order_api {
|
||||
use crate::api::models::OrderServerAddonBuilder;
|
||||
|
||||
#[rhai_fn(name = "get_server_products", return_raw)]
|
||||
pub fn get_server_ordering_product_overview(
|
||||
client: &mut Client,
|
||||
) -> Result<Array, Box<EvalAltResult>> {
|
||||
let overview_servers = client
|
||||
.get_server_products()
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(overview_servers.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_server_product_by_id", return_raw)]
|
||||
pub fn get_server_ordering_product_by_id(
|
||||
client: &mut Client,
|
||||
product_id: &str,
|
||||
) -> Result<OrderServerProduct, Box<EvalAltResult>> {
|
||||
let product = client
|
||||
.get_server_product_by_id(product_id)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(product)
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "order_server", return_raw)]
|
||||
pub fn order_server(
|
||||
client: &mut Client,
|
||||
order: OrderServerBuilder,
|
||||
) -> Result<Transaction, Box<EvalAltResult>> {
|
||||
let transaction = client
|
||||
.order_server(order)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_transaction_by_id", return_raw)]
|
||||
pub fn get_transaction_by_id(
|
||||
client: &mut Client,
|
||||
transaction_id: &str,
|
||||
) -> Result<Transaction, Box<EvalAltResult>> {
|
||||
let transaction = client
|
||||
.get_transaction_by_id(transaction_id)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_transactions", return_raw)]
|
||||
pub fn get_transactions(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||
let transactions = client
|
||||
.get_transactions()
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transactions.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_auction_server_products", return_raw)]
|
||||
pub fn get_auction_server_products(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||
let products = client
|
||||
.get_auction_server_products()
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(products.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_auction_server_product_by_id", return_raw)]
|
||||
pub fn get_auction_server_product_by_id(
|
||||
client: &mut Client,
|
||||
product_id: &str,
|
||||
) -> Result<AuctionServerProduct, Box<EvalAltResult>> {
|
||||
let product = client
|
||||
.get_auction_server_product_by_id(product_id)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(product)
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_auction_transactions", return_raw)]
|
||||
pub fn get_auction_transactions(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||
let transactions = client
|
||||
.get_auction_transactions()
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transactions.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_auction_transaction_by_id", return_raw)]
|
||||
pub fn get_auction_transaction_by_id(
|
||||
client: &mut Client,
|
||||
transaction_id: &str,
|
||||
) -> Result<AuctionTransaction, Box<EvalAltResult>> {
|
||||
let transaction = client
|
||||
.get_auction_transaction_by_id(transaction_id)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_server_addon_products", return_raw)]
|
||||
pub fn get_server_addon_products(
|
||||
client: &mut Client,
|
||||
server_number: i64,
|
||||
) -> Result<Array, Box<EvalAltResult>> {
|
||||
let products = client
|
||||
.get_server_addon_products(server_number)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(products.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_server_addon_transactions", return_raw)]
|
||||
pub fn get_server_addon_transactions(
|
||||
client: &mut Client,
|
||||
) -> Result<Array, Box<EvalAltResult>> {
|
||||
let transactions = client
|
||||
.get_server_addon_transactions()
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transactions.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_server_addon_transaction_by_id", return_raw)]
|
||||
pub fn get_server_addon_transaction_by_id(
|
||||
client: &mut Client,
|
||||
transaction_id: &str,
|
||||
) -> Result<ServerAddonTransaction, Box<EvalAltResult>> {
|
||||
let transaction = client
|
||||
.get_server_addon_transaction_by_id(transaction_id)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "order_auction_server", return_raw)]
|
||||
pub fn order_auction_server(
|
||||
client: &mut Client,
|
||||
order: OrderAuctionServerBuilder,
|
||||
) -> Result<AuctionTransaction, Box<EvalAltResult>> {
|
||||
println!("Builder struct being used to order server: {:#?}", order);
|
||||
let transaction = client.order_auction_server(
|
||||
order.product_id,
|
||||
order.authorized_keys.unwrap_or(vec![]),
|
||||
order.dist,
|
||||
None,
|
||||
order.lang,
|
||||
order.comment,
|
||||
order.addon,
|
||||
order.test,
|
||||
).map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "order_server_addon", return_raw)]
|
||||
pub fn order_server_addon(
|
||||
client: &mut Client,
|
||||
order: OrderServerAddonBuilder,
|
||||
) -> Result<ServerAddonTransaction, Box<EvalAltResult>> {
|
||||
println!("Builder struct being used to order server addon: {:#?}", order);
|
||||
let transaction = client
|
||||
.order_server_addon(order)
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(transaction)
|
||||
}
|
||||
}
|
89
packages/clients/hetznerclient/src/rhai/ssh_keys.rs
Normal file
89
packages/clients/hetznerclient/src/rhai/ssh_keys.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use crate::api::{Client, models::SshKey};
|
||||
use prettytable::{Table, row};
|
||||
use rhai::{Array, Dynamic, Engine, plugin::*};
|
||||
|
||||
pub fn register(engine: &mut Engine) {
|
||||
let ssh_keys_module = exported_module!(ssh_keys_api);
|
||||
engine.register_global_module(ssh_keys_module.into());
|
||||
}
|
||||
|
||||
#[export_module]
|
||||
pub mod ssh_keys_api {
|
||||
use super::*;
|
||||
use rhai::EvalAltResult;
|
||||
|
||||
#[rhai_fn(name = "get_ssh_keys", return_raw)]
|
||||
pub fn get_ssh_keys(client: &mut Client) -> Result<Array, Box<EvalAltResult>> {
|
||||
let ssh_keys = client
|
||||
.get_ssh_keys()
|
||||
.map_err(|e| Into::<Box<EvalAltResult>>::into(e.to_string()))?;
|
||||
Ok(ssh_keys.into_iter().map(Dynamic::from).collect())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_ssh_key", return_raw)]
|
||||
pub fn get_ssh_key(
|
||||
client: &mut Client,
|
||||
fingerprint: &str,
|
||||
) -> Result<SshKey, Box<EvalAltResult>> {
|
||||
client
|
||||
.get_ssh_key(fingerprint)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "add_ssh_key", return_raw)]
|
||||
pub fn add_ssh_key(
|
||||
client: &mut Client,
|
||||
name: &str,
|
||||
data: &str,
|
||||
) -> Result<SshKey, Box<EvalAltResult>> {
|
||||
client
|
||||
.add_ssh_key(name, data)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "update_ssh_key_name", return_raw)]
|
||||
pub fn update_ssh_key_name(
|
||||
client: &mut Client,
|
||||
fingerprint: &str,
|
||||
name: &str,
|
||||
) -> Result<SshKey, Box<EvalAltResult>> {
|
||||
client
|
||||
.update_ssh_key_name(fingerprint, name)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "delete_ssh_key", return_raw)]
|
||||
pub fn delete_ssh_key(
|
||||
client: &mut Client,
|
||||
fingerprint: &str,
|
||||
) -> Result<(), Box<EvalAltResult>> {
|
||||
client
|
||||
.delete_ssh_key(fingerprint)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "pretty_print")]
|
||||
pub fn pretty_print_ssh_keys(keys: Array) {
|
||||
let mut table = Table::new();
|
||||
table.add_row(row![b =>
|
||||
"Name",
|
||||
"Fingerprint",
|
||||
"Type",
|
||||
"Size",
|
||||
"Created At"
|
||||
]);
|
||||
|
||||
for key_dyn in keys {
|
||||
if let Some(key) = key_dyn.try_cast::<SshKey>() {
|
||||
table.add_row(row![
|
||||
key.name,
|
||||
key.fingerprint,
|
||||
key.key_type,
|
||||
key.size.to_string(),
|
||||
key.created_at
|
||||
]);
|
||||
}
|
||||
}
|
||||
table.printstd();
|
||||
}
|
||||
}
|
@@ -9,22 +9,22 @@ license = "Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
# HTTP client for async requests
|
||||
reqwest = { version = "0.12.15", features = ["json"] }
|
||||
reqwest = { workspace = true }
|
||||
# JSON handling
|
||||
serde_json = "1.0"
|
||||
serde_json = { workspace = true }
|
||||
# Base64 encoding/decoding for message payloads
|
||||
base64 = "0.22.1"
|
||||
base64 = { workspace = true }
|
||||
# Async runtime
|
||||
tokio = { version = "1.45.0", features = ["full"] }
|
||||
tokio = { workspace = true }
|
||||
# Rhai scripting support
|
||||
rhai = { version = "1.12.0", features = ["sync"] }
|
||||
rhai = { workspace = true }
|
||||
# Logging
|
||||
log = "0.4"
|
||||
log = { workspace = true }
|
||||
# URL encoding for API parameters
|
||||
urlencoding = "2.1.3"
|
||||
urlencoding = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
# For async testing
|
||||
tokio-test = "0.4.4"
|
||||
tokio-test = { workspace = true }
|
||||
# For temporary files in tests
|
||||
tempfile = "3.5"
|
||||
tempfile = { workspace = true }
|
@@ -11,24 +11,24 @@ categories = ["database", "api-bindings"]
|
||||
|
||||
[dependencies]
|
||||
# PostgreSQL client dependencies
|
||||
postgres = "0.19.4"
|
||||
postgres-types = "0.2.5"
|
||||
tokio-postgres = "0.7.8"
|
||||
postgres = { workspace = true }
|
||||
postgres-types = { workspace = true }
|
||||
tokio-postgres = { workspace = true }
|
||||
|
||||
# Connection pooling
|
||||
r2d2 = "0.8.10"
|
||||
r2d2_postgres = "0.18.2"
|
||||
r2d2 = { workspace = true }
|
||||
r2d2_postgres = { workspace = true }
|
||||
|
||||
# Utility dependencies
|
||||
lazy_static = "1.4.0"
|
||||
thiserror = "2.0.12"
|
||||
lazy_static = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
|
||||
# Rhai scripting support
|
||||
rhai = { version = "1.12.0", features = ["sync"] }
|
||||
rhai = { workspace = true }
|
||||
|
||||
# SAL dependencies
|
||||
sal-virt = { path = "../virt" }
|
||||
sal-virt = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.5"
|
||||
tokio-test = "0.4.4"
|
||||
tempfile = { workspace = true }
|
||||
tokio-test = { workspace = true }
|
@@ -11,11 +11,11 @@ categories = ["database", "caching", "api-bindings"]
|
||||
|
||||
[dependencies]
|
||||
# Core Redis functionality
|
||||
redis = "0.31.0"
|
||||
lazy_static = "1.4.0"
|
||||
redis = { workspace = true }
|
||||
lazy_static = { workspace = true }
|
||||
|
||||
# Rhai integration (optional)
|
||||
rhai = { version = "1.12.0", features = ["sync"], optional = true }
|
||||
rhai = { workspace = true, optional = true }
|
||||
|
||||
[features]
|
||||
default = ["rhai"]
|
||||
@@ -23,4 +23,4 @@ rhai = ["dep:rhai"]
|
||||
|
||||
[dev-dependencies]
|
||||
# For testing
|
||||
tempfile = "3.5"
|
||||
tempfile = { workspace = true }
|
26
packages/clients/rfsclient/Cargo.toml
Normal file
26
packages/clients/rfsclient/Cargo.toml
Normal file
@@ -0,0 +1,26 @@
|
||||
[package]
|
||||
name = "sal-rfs-client"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "SAL RFS Client - Client library for Remote File System server"
|
||||
repository = "https://git.threefold.info/herocode/sal"
|
||||
license = "Apache-2.0"
|
||||
keywords = ["rfs", "client", "filesystem", "remote"]
|
||||
categories = ["filesystem", "api-bindings"]
|
||||
|
||||
[dependencies]
|
||||
openapi = { path = "./openapi" }
|
||||
thiserror.workspace = true
|
||||
url.workspace = true
|
||||
reqwest = { workspace = true, features = ["json", "multipart"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json.workspace = true
|
||||
log.workspace = true
|
||||
bytes.workspace = true
|
||||
futures.workspace = true
|
||||
rhai.workspace = true
|
||||
lazy_static.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.0"
|
195
packages/clients/rfsclient/README.md
Normal file
195
packages/clients/rfsclient/README.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# RFS Client
|
||||
|
||||
A Rust client library for interacting with the Remote File System (RFS) server.
|
||||
|
||||
## Overview
|
||||
|
||||
This client library provides a user-friendly wrapper around the OpenAPI-generated client code. It offers high-level abstractions for common operations such as:
|
||||
|
||||
- Authentication and session management
|
||||
- File uploads and downloads with progress tracking
|
||||
- Block-level operations and verification
|
||||
- FList creation, monitoring, and management
|
||||
- Timeout configuration and error handling
|
||||
|
||||
## Structure
|
||||
|
||||
The library is organized as follows:
|
||||
|
||||
- `client.rs`: Main client implementation with methods for interacting with the RFS server
|
||||
- `error.rs`: Error types and handling
|
||||
- `types.rs`: Type definitions and utilities
|
||||
|
||||
## Quick Start
|
||||
|
||||
```rust
|
||||
use rfs_client::RfsClient;
|
||||
use rfs_client::types::{ClientConfig, Credentials};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Create a client with custom configuration
|
||||
let config = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: Some(Credentials {
|
||||
username: "user".to_string(),
|
||||
password: "password".to_string(),
|
||||
}),
|
||||
timeout_seconds: 60,
|
||||
};
|
||||
|
||||
let mut client = RfsClient::new(config);
|
||||
|
||||
// Authenticate
|
||||
client.authenticate().await?;
|
||||
println!("Authentication successful");
|
||||
|
||||
// Upload a file
|
||||
let file_path = "/path/to/file.txt";
|
||||
let file_hash = client.upload_file(file_path, None).await?;
|
||||
println!("File uploaded with hash: {}", file_hash);
|
||||
|
||||
// Download the file
|
||||
let output_path = "/path/to/output.txt";
|
||||
client.download_file(&file_hash, output_path, None).await?;
|
||||
println!("File downloaded to {}", output_path);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
## Feature Examples
|
||||
|
||||
### Authentication
|
||||
|
||||
```rust
|
||||
// Create a client with authentication
|
||||
let config = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: Some(Credentials {
|
||||
username: "user".to_string(),
|
||||
password: "password".to_string(),
|
||||
}),
|
||||
timeout_seconds: 30,
|
||||
};
|
||||
|
||||
let mut client = RfsClient::new(config);
|
||||
|
||||
// Authenticate with the server
|
||||
client.authenticate().await?;
|
||||
if client.is_authenticated() {
|
||||
println!("Authentication successful");
|
||||
}
|
||||
```
|
||||
|
||||
### File Management
|
||||
|
||||
```rust
|
||||
// Upload a file with options
|
||||
let upload_options = UploadOptions {
|
||||
chunk_size: Some(1024 * 1024), // 1MB chunks
|
||||
verify: true,
|
||||
};
|
||||
|
||||
let file_hash = client.upload_file("/path/to/file.txt", Some(upload_options)).await?;
|
||||
|
||||
// Download the file
|
||||
let download_options = DownloadOptions {
|
||||
verify: true,
|
||||
};
|
||||
|
||||
client.download_file(&file_hash, "/path/to/output.txt", Some(download_options)).await?;
|
||||
```
|
||||
|
||||
### FList Operations
|
||||
|
||||
```rust
|
||||
// Create an FList from a Docker image
|
||||
let options = FlistOptions {
|
||||
auth: None,
|
||||
username: None,
|
||||
password: None,
|
||||
email: None,
|
||||
server_address: Some("docker.io".to_string()),
|
||||
identity_token: None,
|
||||
registry_token: None,
|
||||
};
|
||||
|
||||
let job_id = client.create_flist("alpine:latest", Some(options)).await?;
|
||||
|
||||
// Wait for FList creation with progress tracking
|
||||
let wait_options = WaitOptions {
|
||||
timeout_seconds: 60,
|
||||
poll_interval_ms: 1000,
|
||||
progress_callback: Some(Box::new(|state| {
|
||||
println!("Progress: FList state is now {:?}", state);
|
||||
})),
|
||||
};
|
||||
|
||||
let final_state = client.wait_for_flist_creation(&job_id, Some(wait_options)).await?;
|
||||
|
||||
// List available FLists
|
||||
let flists = client.list_flists().await?;
|
||||
|
||||
// Preview an FList
|
||||
let preview = client.preview_flist("flists/user/alpine-latest.fl").await?;
|
||||
|
||||
// Download an FList
|
||||
client.download_flist("flists/user/alpine-latest.fl", "/tmp/downloaded_flist.fl").await?;
|
||||
```
|
||||
|
||||
### Block Management
|
||||
|
||||
```rust
|
||||
// List blocks
|
||||
let blocks_list = client.list_blocks(None).await?;
|
||||
|
||||
// Check if a block exists
|
||||
let exists = client.check_block("block_hash").await?;
|
||||
|
||||
// Get block content
|
||||
let block_content = client.get_block("block_hash").await?;
|
||||
|
||||
// Upload a block
|
||||
let block_hash = client.upload_block("file_hash", 0, data).await?;
|
||||
|
||||
// Verify blocks
|
||||
let request = VerifyBlocksRequest { blocks: verify_blocks };
|
||||
let verify_result = client.verify_blocks(request).await?;
|
||||
```
|
||||
|
||||
## Complete Examples
|
||||
|
||||
For more detailed examples, check the `examples` directory:
|
||||
|
||||
- `authentication.rs`: Authentication and health check examples
|
||||
- `file_management.rs`: File upload and download with verification
|
||||
- `flist_operations.rs`: Complete FList creation, monitoring, listing, preview, and download
|
||||
- `block_management.rs`: Block-level operations including listing, verification, and upload
|
||||
- `wait_for_flist.rs`: Advanced FList creation with progress monitoring
|
||||
|
||||
Run an example with:
|
||||
|
||||
```bash
|
||||
cargo run --example flist_operations
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
This library wraps the OpenAPI-generated client located in the `openapi` directory. The OpenAPI client was generated using the OpenAPI Generator CLI.
|
||||
|
||||
To build the library:
|
||||
|
||||
```bash
|
||||
cargo build
|
||||
```
|
||||
|
||||
To run tests:
|
||||
|
||||
```bash
|
||||
cargo test -- --test-threads=1
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
42
packages/clients/rfsclient/examples/authentication.rs
Normal file
42
packages/clients/rfsclient/examples/authentication.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use sal_rfs_client::types::{ClientConfig, Credentials};
|
||||
use sal_rfs_client::RfsClient;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Create a client with authentication credentials
|
||||
let config = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: Some(Credentials {
|
||||
username: "user".to_string(),
|
||||
password: "password".to_string(),
|
||||
}),
|
||||
timeout_seconds: 30,
|
||||
};
|
||||
|
||||
let mut client = RfsClient::new(config);
|
||||
println!("Client created with authentication credentials");
|
||||
|
||||
// Authenticate with the server
|
||||
client.authenticate().await?;
|
||||
if client.is_authenticated() {
|
||||
println!("Authentication successful");
|
||||
} else {
|
||||
println!("Authentication failed");
|
||||
}
|
||||
|
||||
// Create a client without authentication
|
||||
let config_no_auth = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: None,
|
||||
timeout_seconds: 30,
|
||||
};
|
||||
|
||||
let client_no_auth = RfsClient::new(config_no_auth);
|
||||
println!("Client created without authentication credentials");
|
||||
|
||||
// Check health endpoint (doesn't require authentication)
|
||||
let health = client_no_auth.health_check().await?;
|
||||
println!("Server health: {:?}", health);
|
||||
|
||||
Ok(())
|
||||
}
|
153
packages/clients/rfsclient/examples/block_management.rs
Normal file
153
packages/clients/rfsclient/examples/block_management.rs
Normal file
@@ -0,0 +1,153 @@
|
||||
use openapi::models::{VerifyBlock, VerifyBlocksRequest};
|
||||
use sal_rfs_client::types::{ClientConfig, Credentials};
|
||||
use sal_rfs_client::RfsClient;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Create a client with authentication
|
||||
let config = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: Some(Credentials {
|
||||
username: "user".to_string(),
|
||||
password: "password".to_string(),
|
||||
}),
|
||||
timeout_seconds: 60,
|
||||
};
|
||||
|
||||
let mut client = RfsClient::new(config);
|
||||
|
||||
// Authenticate with the server
|
||||
client.authenticate().await?;
|
||||
println!("Authentication successful");
|
||||
|
||||
// Create a test file to upload for block testing
|
||||
let test_file_path = "/tmp/block_test.txt";
|
||||
let test_content = "This is a test file for RFS client block management";
|
||||
std::fs::write(test_file_path, test_content)?;
|
||||
println!("Created test file at {}", test_file_path);
|
||||
|
||||
// Upload the file to get blocks
|
||||
println!("Uploading file to get blocks...");
|
||||
let file_hash = client.upload_file(test_file_path, None).await?;
|
||||
println!("File uploaded with hash: {}", file_hash);
|
||||
|
||||
// Get blocks by file hash
|
||||
println!("Getting blocks for file hash: {}", file_hash);
|
||||
let blocks = client.get_blocks_by_hash(&file_hash).await?;
|
||||
println!("Found {} blocks for the file", blocks.blocks.len());
|
||||
|
||||
// Print block information
|
||||
for (i, block_data) in blocks.blocks.iter().enumerate() {
|
||||
println!(
|
||||
"Block {}: Hash={}, Index={}",
|
||||
i, block_data.hash, block_data.index
|
||||
);
|
||||
}
|
||||
|
||||
// Verify blocks with complete information
|
||||
println!("Verifying blocks...");
|
||||
|
||||
// Create a list of VerifyBlock objects with complete information
|
||||
let verify_blocks = blocks
|
||||
.blocks
|
||||
.iter()
|
||||
.map(|block| {
|
||||
VerifyBlock {
|
||||
block_hash: block.hash.clone(),
|
||||
block_index: block.index,
|
||||
file_hash: file_hash.clone(), // Using the actual file hash
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Create the request with the complete block information
|
||||
for block in verify_blocks.iter() {
|
||||
println!("Block: {}", block.block_hash);
|
||||
println!("Block index: {}", block.block_index);
|
||||
println!("File hash: {}", block.file_hash);
|
||||
}
|
||||
let request = VerifyBlocksRequest {
|
||||
blocks: verify_blocks,
|
||||
};
|
||||
|
||||
// Send the verification request
|
||||
let verify_result = client.verify_blocks(request).await?;
|
||||
println!(
|
||||
"Verification result: {} missing blocks",
|
||||
verify_result.missing.len()
|
||||
);
|
||||
for block in verify_result.missing.iter() {
|
||||
println!("Missing block: {}", block);
|
||||
}
|
||||
|
||||
// List blocks (list_blocks_handler)
|
||||
println!("\n1. Listing all blocks with pagination...");
|
||||
let blocks_list = client.list_blocks(None).await?;
|
||||
println!("Server has {} blocks in total", blocks_list.len());
|
||||
if !blocks_list.is_empty() {
|
||||
let first_few = blocks_list
|
||||
.iter()
|
||||
.take(3)
|
||||
.map(|s| s.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
println!("First few blocks: {}", first_few);
|
||||
}
|
||||
|
||||
// Check if a block exists (check_block_handler)
|
||||
if !blocks.blocks.is_empty() {
|
||||
let block_to_check = &blocks.blocks[0].hash;
|
||||
println!("\n2. Checking if block exists: {}", block_to_check);
|
||||
let exists = client.check_block(block_to_check).await?;
|
||||
println!("Block exists: {}", exists);
|
||||
}
|
||||
|
||||
// Get block downloads statistics (get_block_downloads_handler)
|
||||
if !blocks.blocks.is_empty() {
|
||||
let block_to_check = &blocks.blocks[0].hash;
|
||||
println!(
|
||||
"\n3. Getting download statistics for block: {}",
|
||||
block_to_check
|
||||
);
|
||||
let downloads = client.get_block_downloads(block_to_check).await?;
|
||||
println!(
|
||||
"Block has been downloaded {} times",
|
||||
downloads.downloads_count
|
||||
);
|
||||
}
|
||||
|
||||
// Get a specific block content (get_block_handler)
|
||||
if !blocks.blocks.is_empty() {
|
||||
let block_to_get = &blocks.blocks[0].hash;
|
||||
println!("\n4. Getting content for block: {}", block_to_get);
|
||||
let block_content = client.get_block(block_to_get).await?;
|
||||
println!("Retrieved block with {} bytes", block_content.len());
|
||||
}
|
||||
|
||||
// Get user blocks (get_user_blocks_handler)
|
||||
println!("\n6. Listing user blocks...");
|
||||
let user_blocks = client.get_user_blocks(Some(1), Some(10)).await?;
|
||||
println!(
|
||||
"User has {} blocks (showing page 1 with 10 per page)",
|
||||
user_blocks.total
|
||||
);
|
||||
for block in user_blocks.blocks.iter().take(3) {
|
||||
println!(" - Block: {}, Size: {}", block.hash, block.size);
|
||||
}
|
||||
|
||||
// Upload a block (upload_block_handler)
|
||||
println!("\n7. Uploading a new test block...");
|
||||
let test_block_data = b"This is test block data for direct block upload";
|
||||
let new_file_hash = "test_file_hash_for_block_upload";
|
||||
let block_index = 0;
|
||||
let block_hash = client
|
||||
.upload_block(new_file_hash, block_index, test_block_data.to_vec())
|
||||
.await?;
|
||||
println!("Uploaded block with hash: {}", block_hash);
|
||||
|
||||
// Clean up
|
||||
std::fs::remove_file(test_file_path)?;
|
||||
println!("Test file cleaned up");
|
||||
|
||||
Ok(())
|
||||
}
|
66
packages/clients/rfsclient/examples/file_management.rs
Normal file
66
packages/clients/rfsclient/examples/file_management.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use sal_rfs_client::types::{ClientConfig, Credentials, DownloadOptions, UploadOptions};
|
||||
use sal_rfs_client::RfsClient;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Create a client with authentication
|
||||
let config = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: Some(Credentials {
|
||||
username: "user".to_string(),
|
||||
password: "password".to_string(),
|
||||
}),
|
||||
timeout_seconds: 60,
|
||||
};
|
||||
|
||||
let mut client = RfsClient::new(config);
|
||||
|
||||
// Authenticate with the server
|
||||
client.authenticate().await?;
|
||||
println!("Authentication successful");
|
||||
|
||||
// Create a test file to upload
|
||||
let test_file_path = "/tmp/test_upload.txt";
|
||||
std::fs::write(test_file_path, "This is a test file for RFS client upload")?;
|
||||
println!("Created test file at {}", test_file_path);
|
||||
|
||||
// Upload the file with options
|
||||
println!("Uploading file...");
|
||||
let upload_options = UploadOptions {
|
||||
chunk_size: Some(1024 * 1024), // 1MB chunks
|
||||
verify: true,
|
||||
};
|
||||
|
||||
let file_hash = client
|
||||
.upload_file(test_file_path, Some(upload_options))
|
||||
.await?;
|
||||
println!("File uploaded with hash: {}", file_hash);
|
||||
|
||||
// Download the file
|
||||
let download_path = "/tmp/test_download.txt";
|
||||
println!("Downloading file to {}...", download_path);
|
||||
|
||||
let download_options = DownloadOptions { verify: true };
|
||||
|
||||
client
|
||||
.download_file(&file_hash, download_path, Some(download_options))
|
||||
.await?;
|
||||
println!("File downloaded to {}", download_path);
|
||||
|
||||
// Verify the downloaded file matches the original
|
||||
let original_content = std::fs::read_to_string(test_file_path)?;
|
||||
let downloaded_content = std::fs::read_to_string(download_path)?;
|
||||
|
||||
if original_content == downloaded_content {
|
||||
println!("File contents match! Download successful.");
|
||||
} else {
|
||||
println!("ERROR: File contents do not match!");
|
||||
}
|
||||
|
||||
// Clean up test files
|
||||
std::fs::remove_file(test_file_path)?;
|
||||
std::fs::remove_file(download_path)?;
|
||||
println!("Test files cleaned up");
|
||||
|
||||
Ok(())
|
||||
}
|
176
packages/clients/rfsclient/examples/flist_operations.rs
Normal file
176
packages/clients/rfsclient/examples/flist_operations.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use sal_rfs_client::types::{ClientConfig, Credentials, FlistOptions, WaitOptions};
|
||||
use sal_rfs_client::RfsClient;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let parent_dir = "flists";
|
||||
// Create a client with authentication
|
||||
let config = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: Some(Credentials {
|
||||
username: "user".to_string(),
|
||||
password: "password".to_string(),
|
||||
}),
|
||||
timeout_seconds: 60,
|
||||
};
|
||||
|
||||
let mut client = RfsClient::new(config);
|
||||
|
||||
// Authenticate with the server
|
||||
client.authenticate().await?;
|
||||
println!("Authentication successful");
|
||||
|
||||
println!("\n1. CREATE FLIST - Creating an FList from a Docker image");
|
||||
let image_name = "alpine:latest";
|
||||
println!("Creating FList for image: {}", image_name);
|
||||
|
||||
// Use FlistOptions to specify additional parameters
|
||||
let options = FlistOptions {
|
||||
auth: None,
|
||||
username: None,
|
||||
password: None,
|
||||
email: None,
|
||||
server_address: Some("docker.io".to_string()),
|
||||
identity_token: None,
|
||||
registry_token: None,
|
||||
};
|
||||
|
||||
// Create the FList and handle potential conflict error
|
||||
let job_id = match client.create_flist(&image_name, Some(options)).await {
|
||||
Ok(id) => {
|
||||
println!("FList creation started with job ID: {}", id);
|
||||
Some(id)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.to_string().contains("Conflict") {
|
||||
println!("FList already exists");
|
||||
None
|
||||
} else {
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// 2. Check FList state if we have a job ID
|
||||
if let Some(job_id) = &job_id {
|
||||
println!("\n2. GET FLIST STATE - Checking FList creation state");
|
||||
let state = client.get_flist_state(job_id).await?;
|
||||
println!("Current FList state: {:?}", state.flist_state);
|
||||
|
||||
// 3. Wait for FList creation with progress reporting
|
||||
println!("\n3. WAIT FOR FLIST CREATION - Waiting for FList to be created with progress reporting");
|
||||
let wait_options = WaitOptions {
|
||||
timeout_seconds: 60, // Shorter timeout for the example
|
||||
poll_interval_ms: 1000,
|
||||
progress_callback: Some(Box::new(|state| {
|
||||
println!("Progress: FList state is now {:?}", state);
|
||||
// No return value needed (returns unit type)
|
||||
})),
|
||||
};
|
||||
|
||||
// Wait for the FList to be created (with a timeout)
|
||||
match client
|
||||
.wait_for_flist_creation(job_id, Some(wait_options))
|
||||
.await
|
||||
{
|
||||
Ok(final_state) => {
|
||||
println!("FList creation completed with state: {:?}", final_state);
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error waiting for FList creation: {}", e);
|
||||
// Continue with the example even if waiting fails
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// 4. List all available FLists
|
||||
println!("\n4. LIST FLISTS - Listing all available FLists");
|
||||
|
||||
// Variable to store the FList path for preview and download
|
||||
let mut flist_path_for_preview: Option<String> = None;
|
||||
|
||||
match client.list_flists().await {
|
||||
Ok(flists) => {
|
||||
println!("Found {} FList categories", flists.len());
|
||||
|
||||
for (category, files) in &flists {
|
||||
println!("Category: {}", category);
|
||||
for file in files.iter().take(2) {
|
||||
// Show only first 2 files per category
|
||||
println!(" - {} (size: {} bytes)", file.name, file.size);
|
||||
|
||||
// Save the first FList path for preview
|
||||
if flist_path_for_preview.is_none() {
|
||||
let path = format!("{}/{}/{}", parent_dir, category, file.name);
|
||||
flist_path_for_preview = Some(path);
|
||||
}
|
||||
}
|
||||
if files.len() > 2 {
|
||||
println!(" - ... and {} more files", files.len() - 2);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Preview an FList if we found one
|
||||
if let Some(ref flist_path) = flist_path_for_preview {
|
||||
println!("\n5. PREVIEW FLIST - Previewing FList: {}", flist_path);
|
||||
match client.preview_flist(flist_path).await {
|
||||
Ok(preview) => {
|
||||
println!("FList preview for {}:", flist_path);
|
||||
println!(" - Checksum: {}", preview.checksum);
|
||||
println!(" - Metadata: {}", preview.metadata);
|
||||
|
||||
// Display content (list of strings)
|
||||
if !preview.content.is_empty() {
|
||||
println!(" - Content entries:");
|
||||
for (i, entry) in preview.content.iter().enumerate().take(5) {
|
||||
println!(" {}. {}", i + 1, entry);
|
||||
}
|
||||
if preview.content.len() > 5 {
|
||||
println!(" ... and {} more entries", preview.content.len() - 5);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => println!("Error previewing FList: {}", e),
|
||||
}
|
||||
} else {
|
||||
println!("No FLists available for preview");
|
||||
}
|
||||
}
|
||||
Err(e) => println!("Error listing FLists: {}", e),
|
||||
}
|
||||
|
||||
// 6. DOWNLOAD FLIST - Downloading an FList to a local file
|
||||
if let Some(ref flist_path) = flist_path_for_preview {
|
||||
println!("\n6. DOWNLOAD FLIST - Downloading FList: {}", flist_path);
|
||||
|
||||
// Create a temporary output path for the downloaded FList
|
||||
let output_path = "/tmp/downloaded_flist.fl";
|
||||
|
||||
match client.download_flist(flist_path, output_path).await {
|
||||
Ok(_) => {
|
||||
println!("FList successfully downloaded to {}", output_path);
|
||||
|
||||
// Get file size
|
||||
match std::fs::metadata(output_path) {
|
||||
Ok(metadata) => println!("Downloaded file size: {} bytes", metadata.len()),
|
||||
Err(e) => println!("Error getting file metadata: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => println!("Error downloading FList: {}", e),
|
||||
}
|
||||
} else {
|
||||
println!("\n6. DOWNLOAD FLIST - No FList available for download");
|
||||
}
|
||||
|
||||
println!("\nAll FList operations demonstrated:");
|
||||
println!("1. create_flist - Create a new FList from a Docker image");
|
||||
println!("2. get_flist_state - Check the state of an FList creation job");
|
||||
println!(
|
||||
"3. wait_for_flist_creation - Wait for an FList to be created with progress reporting"
|
||||
);
|
||||
println!("4. list_flists - List all available FLists");
|
||||
println!("5. preview_flist - Preview the content of an FList");
|
||||
println!("6. download_flist - Download an FList to a local file");
|
||||
|
||||
Ok(())
|
||||
}
|
64
packages/clients/rfsclient/examples/wait_for_flist.rs
Normal file
64
packages/clients/rfsclient/examples/wait_for_flist.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use openapi::models::FlistState;
|
||||
use sal_rfs_client::types::{ClientConfig, Credentials, WaitOptions};
|
||||
use sal_rfs_client::RfsClient;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Create a client with authentication
|
||||
let config = ClientConfig {
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
credentials: Some(Credentials {
|
||||
username: "user".to_string(),
|
||||
password: "password".to_string(),
|
||||
}),
|
||||
timeout_seconds: 60,
|
||||
};
|
||||
|
||||
let mut client = RfsClient::new(config);
|
||||
|
||||
// Authenticate with the server
|
||||
client.authenticate().await?;
|
||||
println!("Authentication successful");
|
||||
|
||||
// Create an FList from a Docker image
|
||||
let image_name = "redis:latest";
|
||||
println!("Creating FList for image: {}", image_name);
|
||||
|
||||
let job_id = client.create_flist(&image_name, None).await?;
|
||||
println!("FList creation started with job ID: {}", job_id);
|
||||
|
||||
// Set up options for waiting with progress reporting
|
||||
let options = WaitOptions {
|
||||
timeout_seconds: 600, // 10 minutes timeout
|
||||
poll_interval_ms: 2000, // Check every 2 seconds
|
||||
progress_callback: Some(Box::new(|state| match state {
|
||||
FlistState::FlistStateInProgress(info) => {
|
||||
println!(
|
||||
"Progress: {:.1}% - {}",
|
||||
info.in_progress.progress, info.in_progress.msg
|
||||
);
|
||||
}
|
||||
FlistState::FlistStateStarted(_) => {
|
||||
println!("FList creation started...");
|
||||
}
|
||||
FlistState::FlistStateAccepted(_) => {
|
||||
println!("FList creation request accepted...");
|
||||
}
|
||||
_ => println!("State: {:?}", state),
|
||||
})),
|
||||
};
|
||||
|
||||
// Wait for the FList to be created
|
||||
println!("Waiting for FList creation to complete...");
|
||||
|
||||
// Use ? operator to propagate errors properly
|
||||
let state = client
|
||||
.wait_for_flist_creation(&job_id, Some(options))
|
||||
.await
|
||||
.map_err(|e| -> Box<dyn std::error::Error> { Box::new(e) })?;
|
||||
|
||||
println!("FList created successfully!");
|
||||
println!("Final state: {:?}", state);
|
||||
|
||||
Ok(())
|
||||
}
|
1
packages/clients/rfsclient/openapi.json
Normal file
1
packages/clients/rfsclient/openapi.json
Normal file
File diff suppressed because one or more lines are too long
3
packages/clients/rfsclient/openapi/.gitignore
vendored
Normal file
3
packages/clients/rfsclient/openapi/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/target/
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
23
packages/clients/rfsclient/openapi/.openapi-generator-ignore
Normal file
23
packages/clients/rfsclient/openapi/.openapi-generator-ignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# OpenAPI Generator Ignore
|
||||
# Generated by openapi-generator https://github.com/openapitools/openapi-generator
|
||||
|
||||
# Use this file to prevent files from being overwritten by the generator.
|
||||
# The patterns follow closely to .gitignore or .dockerignore.
|
||||
|
||||
# As an example, the C# client generator defines ApiClient.cs.
|
||||
# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
|
||||
#ApiClient.cs
|
||||
|
||||
# You can match any string of characters against a directory, file or extension with a single asterisk (*):
|
||||
#foo/*/qux
|
||||
# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
|
||||
|
||||
# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
|
||||
#foo/**/qux
|
||||
# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
|
||||
|
||||
# You can also negate patterns with an exclamation (!).
|
||||
# For example, you can ignore all files in a docs folder with the file extension .md:
|
||||
#docs/*.md
|
||||
# Then explicitly reverse the ignore rule for a single file:
|
||||
#!docs/README.md
|
125
packages/clients/rfsclient/openapi/.openapi-generator/FILES
Normal file
125
packages/clients/rfsclient/openapi/.openapi-generator/FILES
Normal file
@@ -0,0 +1,125 @@
|
||||
.gitignore
|
||||
.travis.yml
|
||||
Cargo.toml
|
||||
README.md
|
||||
docs/AuthenticationApi.md
|
||||
docs/BlockDownloadsResponse.md
|
||||
docs/BlockInfo.md
|
||||
docs/BlockManagementApi.md
|
||||
docs/BlockUploadedResponse.md
|
||||
docs/BlocksResponse.md
|
||||
docs/DirListTemplate.md
|
||||
docs/DirLister.md
|
||||
docs/ErrorTemplate.md
|
||||
docs/FileDownloadRequest.md
|
||||
docs/FileInfo.md
|
||||
docs/FileManagementApi.md
|
||||
docs/FileUploadResponse.md
|
||||
docs/FlistBody.md
|
||||
docs/FlistManagementApi.md
|
||||
docs/FlistState.md
|
||||
docs/FlistStateAccepted.md
|
||||
docs/FlistStateCreated.md
|
||||
docs/FlistStateInProgress.md
|
||||
docs/FlistStateInfo.md
|
||||
docs/FlistStateResponse.md
|
||||
docs/FlistStateStarted.md
|
||||
docs/HealthResponse.md
|
||||
docs/Job.md
|
||||
docs/ListBlocksParams.md
|
||||
docs/ListBlocksResponse.md
|
||||
docs/PreviewResponse.md
|
||||
docs/ResponseError.md
|
||||
docs/ResponseErrorBadRequest.md
|
||||
docs/ResponseErrorConflict.md
|
||||
docs/ResponseErrorForbidden.md
|
||||
docs/ResponseErrorNotFound.md
|
||||
docs/ResponseErrorTemplateError.md
|
||||
docs/ResponseErrorUnauthorized.md
|
||||
docs/ResponseResult.md
|
||||
docs/ResponseResultBlockUploaded.md
|
||||
docs/ResponseResultDirTemplate.md
|
||||
docs/ResponseResultFileUploaded.md
|
||||
docs/ResponseResultFlistCreated.md
|
||||
docs/ResponseResultFlistState.md
|
||||
docs/ResponseResultFlists.md
|
||||
docs/ResponseResultPreviewFlist.md
|
||||
docs/ResponseResultRes.md
|
||||
docs/ResponseResultSignedIn.md
|
||||
docs/SignInBody.md
|
||||
docs/SignInResponse.md
|
||||
docs/SystemApi.md
|
||||
docs/TemplateErr.md
|
||||
docs/TemplateErrBadRequest.md
|
||||
docs/TemplateErrInternalServerError.md
|
||||
docs/TemplateErrNotFound.md
|
||||
docs/UploadBlockParams.md
|
||||
docs/UserBlockInfo.md
|
||||
docs/UserBlocksResponse.md
|
||||
docs/VerifyBlock.md
|
||||
docs/VerifyBlocksRequest.md
|
||||
docs/VerifyBlocksResponse.md
|
||||
docs/WebsiteServingApi.md
|
||||
git_push.sh
|
||||
src/apis/authentication_api.rs
|
||||
src/apis/block_management_api.rs
|
||||
src/apis/configuration.rs
|
||||
src/apis/file_management_api.rs
|
||||
src/apis/flist_management_api.rs
|
||||
src/apis/mod.rs
|
||||
src/apis/system_api.rs
|
||||
src/apis/website_serving_api.rs
|
||||
src/lib.rs
|
||||
src/models/block_downloads_response.rs
|
||||
src/models/block_info.rs
|
||||
src/models/block_uploaded_response.rs
|
||||
src/models/blocks_response.rs
|
||||
src/models/dir_list_template.rs
|
||||
src/models/dir_lister.rs
|
||||
src/models/error_template.rs
|
||||
src/models/file_download_request.rs
|
||||
src/models/file_info.rs
|
||||
src/models/file_upload_response.rs
|
||||
src/models/flist_body.rs
|
||||
src/models/flist_state.rs
|
||||
src/models/flist_state_accepted.rs
|
||||
src/models/flist_state_created.rs
|
||||
src/models/flist_state_in_progress.rs
|
||||
src/models/flist_state_info.rs
|
||||
src/models/flist_state_response.rs
|
||||
src/models/flist_state_started.rs
|
||||
src/models/health_response.rs
|
||||
src/models/job.rs
|
||||
src/models/list_blocks_params.rs
|
||||
src/models/list_blocks_response.rs
|
||||
src/models/mod.rs
|
||||
src/models/preview_response.rs
|
||||
src/models/response_error.rs
|
||||
src/models/response_error_bad_request.rs
|
||||
src/models/response_error_conflict.rs
|
||||
src/models/response_error_forbidden.rs
|
||||
src/models/response_error_not_found.rs
|
||||
src/models/response_error_template_error.rs
|
||||
src/models/response_error_unauthorized.rs
|
||||
src/models/response_result.rs
|
||||
src/models/response_result_block_uploaded.rs
|
||||
src/models/response_result_dir_template.rs
|
||||
src/models/response_result_file_uploaded.rs
|
||||
src/models/response_result_flist_created.rs
|
||||
src/models/response_result_flist_state.rs
|
||||
src/models/response_result_flists.rs
|
||||
src/models/response_result_preview_flist.rs
|
||||
src/models/response_result_res.rs
|
||||
src/models/response_result_signed_in.rs
|
||||
src/models/sign_in_body.rs
|
||||
src/models/sign_in_response.rs
|
||||
src/models/template_err.rs
|
||||
src/models/template_err_bad_request.rs
|
||||
src/models/template_err_internal_server_error.rs
|
||||
src/models/template_err_not_found.rs
|
||||
src/models/upload_block_params.rs
|
||||
src/models/user_block_info.rs
|
||||
src/models/user_blocks_response.rs
|
||||
src/models/verify_block.rs
|
||||
src/models/verify_blocks_request.rs
|
||||
src/models/verify_blocks_response.rs
|
@@ -0,0 +1 @@
|
||||
7.13.0
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user