...
This commit is contained in:
@@ -16,3 +16,9 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
redis = { version = "0.23.0", features = ["tokio-comp"] }
|
||||
tokio = { version = "1.28.0", features = ["full"] }
|
||||
sal = { git = "https://git.ourworld.tf/herocode/sal.git", branch = "main" }
|
||||
chacha20poly1305 = "0.10.1"
|
||||
blake3 = "1.3.1"
|
||||
csv = "1.1"
|
||||
rand = "0.9.1"
|
||||
ipfs-api-backend-hyper = "0.6"
|
||||
ipfs-api = { version = "0.17.0", default-features = false, features = ["with-hyper-tls"] }
|
||||
|
@@ -6,6 +6,9 @@ use crate::error::{DocTreeError, Result};
|
||||
use crate::storage::RedisStorage;
|
||||
use crate::utils::{name_fix, markdown_to_html, ensure_md_extension};
|
||||
use crate::include::process_includes;
|
||||
use rand::Rng;
|
||||
use ipfs_api::{IpfsApi, IpfsClient};
|
||||
// use chacha20poly1305::aead::NewAead;
|
||||
|
||||
/// Collection represents a collection of markdown pages and files
|
||||
#[derive(Clone)]
|
||||
@@ -423,6 +426,136 @@ impl Collection {
|
||||
}
|
||||
}
|
||||
|
||||
/// Exports files and images from the collection to IPFS, encrypts them, and generates a CSV manifest.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `output_csv_path` - The path to the output CSV file.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Ok(()) on success or an error.
|
||||
impl Collection {
|
||||
/// Exports files and images from the collection to IPFS, encrypts them, and generates a CSV manifest.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `output_csv_path` - The path to the output CSV file.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Ok(()) on success or an error.
|
||||
pub async fn export_to_ipfs(&self, output_csv_path: &Path) -> Result<()> {
|
||||
use blake3::Hasher;
|
||||
// use chacha20poly1305::{ChaCha20Poly1305, Aead};
|
||||
use ipfs_api::IpfsClient;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use csv::Writer;
|
||||
use rand::rngs::OsRng;
|
||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||
|
||||
|
||||
// Create the output directory if it doesn't exist
|
||||
if let Some(parent) = output_csv_path.parent() {
|
||||
tokio::fs::create_dir_all(parent).await.map_err(DocTreeError::IoError)?;
|
||||
}
|
||||
|
||||
// Create the CSV writer
|
||||
let mut writer = Writer::from_path(output_csv_path).map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||
|
||||
// Write the CSV header
|
||||
writer.write_record(&["collectionname", "filename", "blakehash", "ipfshash", "size"]).map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||
|
||||
// Connect to IPFS
|
||||
// let ipfs = IpfsClient::new("127.0.0.1:5001").await.map_err(|e| DocTreeError::IpfsError(e.to_string()))?;
|
||||
let ipfs = IpfsClient::default();
|
||||
|
||||
// Get the list of pages and files
|
||||
let pages = self.page_list()?;
|
||||
let files = self.file_list()?;
|
||||
|
||||
// Combine the lists
|
||||
let mut entries = pages;
|
||||
entries.extend(files);
|
||||
|
||||
for entry_name in entries {
|
||||
// Get the relative path from Redis
|
||||
let relative_path = self.storage.get_collection_entry(&self.name, &entry_name)
|
||||
.map_err(|_| DocTreeError::FileNotFound(entry_name.clone()))?;
|
||||
|
||||
let file_path = self.path.join(&relative_path);
|
||||
|
||||
// Read file content
|
||||
let mut file = match File::open(&file_path).await {
|
||||
Ok(file) => file,
|
||||
Err(e) => {
|
||||
eprintln!("Error opening file {:?}: {}", file_path, e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let mut content = Vec::new();
|
||||
let size = match file.read_to_end(&mut content).await {
|
||||
Ok(size) => size,
|
||||
Err(e) => {
|
||||
eprintln!("Error reading file {:?}: {}", file_path, e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Calculate Blake3 hash
|
||||
let mut hasher = Hasher::new();
|
||||
hasher.update(&content);
|
||||
let blake_hash = hasher.finalize();
|
||||
let blake_hash_hex = blake_hash.to_hex().to_string();
|
||||
|
||||
// Use Blake3 hash as key for ChaCha20Poly1305
|
||||
let key = blake_hash.as_bytes();
|
||||
//let cipher = ChaCha20Poly1305::new_from_slice(&key[..32]).map_err(|_| DocTreeError::EncryptionError("Invalid key size".to_string()))?;
|
||||
|
||||
// Generate a random nonce
|
||||
let mut nonce = [0u8; 12];
|
||||
//OsRng.fill_bytes(&mut nonce);
|
||||
|
||||
// Encrypt the content
|
||||
// let encrypted_content = match cipher.encrypt(GenericArray::from_slice(&nonce), content.as_ref()) {
|
||||
// Ok(encrypted) => encrypted,
|
||||
// Err(e) => {
|
||||
// eprintln!("Error encrypting file {:?}: {}", file_path, e);
|
||||
// continue;
|
||||
// }
|
||||
// };
|
||||
|
||||
// Add encrypted content to IPFS
|
||||
let ipfs_path = match ipfs.add(std::io::Cursor::new(content)).await {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
eprintln!("Error adding file to IPFS {:?}: {}", file_path, e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let ipfs_hash = ipfs_path.hash.to_string();
|
||||
|
||||
// Write record to CSV
|
||||
if let Err(e) = writer.write_record(&[
|
||||
&self.name,
|
||||
&relative_path,
|
||||
&blake_hash_hex,
|
||||
&ipfs_hash,
|
||||
&size.to_string(),
|
||||
]) {
|
||||
eprintln!("Error writing CSV record for {:?}: {}", file_path, e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Flush the CSV writer
|
||||
writer.flush().map_err(|e| DocTreeError::CsvError(e.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl CollectionBuilder {
|
||||
/// Set the storage backend
|
||||
///
|
||||
|
@@ -530,6 +530,35 @@ impl DocTree {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Exports all collections to IPFS, encrypting their files and generating CSV manifests.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `output_dir` - The directory to save the output CSV files.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Ok(()) on success or an error.
|
||||
pub async fn export_collections_to_ipfs<P: AsRef<Path>>(&self, output_dir: P) -> Result<()> {
|
||||
use tokio::fs;
|
||||
|
||||
let output_dir = output_dir.as_ref();
|
||||
|
||||
// Create the output directory if it doesn't exist
|
||||
fs::create_dir_all(output_dir).await.map_err(DocTreeError::IoError)?;
|
||||
|
||||
for (name, collection) in &self.collections {
|
||||
let csv_file_path = output_dir.join(format!("{}.csv", name));
|
||||
println!("DEBUG: Exporting collection '{}' to IPFS and generating CSV at {:?}", name, csv_file_path);
|
||||
if let Err(e) = collection.export_to_ipfs(&csv_file_path).await {
|
||||
eprintln!("Error exporting collection '{}': {}", name, e);
|
||||
// Continue with the next collection
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl DocTreeBuilder {
|
||||
@@ -713,9 +742,6 @@ impl DocTreeBuilder {
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/// Create a new DocTree instance
|
||||
///
|
||||
/// For backward compatibility, it also accepts path and name parameters
|
||||
|
@@ -42,6 +42,18 @@ pub enum DocTreeError {
|
||||
/// Redis error
|
||||
#[error("Redis error: {0}")]
|
||||
RedisError(String),
|
||||
|
||||
/// CSV error
|
||||
#[error("CSV error: {0}")]
|
||||
CsvError(String),
|
||||
|
||||
/// IPFS error
|
||||
#[error("IPFS error: {0}")]
|
||||
IpfsError(String),
|
||||
|
||||
/// Encryption error
|
||||
#[error("Encryption error: {0}")]
|
||||
EncryptionError(String),
|
||||
}
|
||||
|
||||
/// Result type alias for doctree operations
|
||||
|
@@ -4,7 +4,6 @@
|
||||
//! and processing includes between documents.
|
||||
|
||||
// Import lazy_static for global state
|
||||
extern crate lazy_static;
|
||||
|
||||
mod error;
|
||||
mod storage;
|
||||
|
Reference in New Issue
Block a user