feat: migrate to tracing

This commit is contained in:
2025-06-29 11:07:06 +03:00
parent 672e2fff59
commit f067da9835
6 changed files with 239 additions and 40 deletions

View File

@ -1,5 +1,6 @@
use anyhow::{Context, Result, bail};
use std::fs;
use tracing::{info, instrument};
pub mod cli;
pub mod config;
@ -9,17 +10,18 @@ use cli::Args;
use config::Config;
/// The main application logic.
#[instrument(skip_all, fields(
config_file = %args.input.display(),
output_dir = %args.output.display()
))]
pub async fn run(args: Args) -> Result<()> {
println!("▶️ Starting ruleset processor...");
println!(" Config file: {}", args.input.display());
println!(" Output directory: {}", args.output.display());
println!(" Concurrency level: {}", args.concurrency);
info!("Starting ruleset processor");
// Load and parse the configuration file into strongly-typed structs.
// Load and parse the configuration file.
let config = Config::load(&args.input)
.with_context(|| format!("Failed to load config from {}", args.input.display()))?;
// Ensure the output directory exists before any operations.
// Ensure the output directory exists.
fs::create_dir_all(&args.output).with_context(|| {
format!(
"Failed to create output directory '{}'",
@ -27,33 +29,34 @@ pub async fn run(args: Args) -> Result<()> {
)
})?;
// Determine the set of files that should exist based on the config.
// Determine the set of files that should exist.
let expected_files = config
.get_expected_files(&args.output)
.context("Failed to determine expected files from config")?;
// Clean up any files in the output directory that are not in our expected set.
// Clean up any stale files.
downloader::cleanup_stale_files(&args.output, &expected_files)?;
// Proceed to download files defined in the config.
let urls_to_download = config.extract_urls();
if urls_to_download.is_empty() {
println!("\n✔️ No rule sets with URLs found. Process complete.");
info!("No rule sets with URLs found in config. Process complete.");
return Ok(());
}
println!(
"\n✔️ Found {} rule sets to download/update.",
urls_to_download.len()
info!(
count = urls_to_download.len(),
"Found rule sets to download/update."
);
// Download all files concurrently.
let download_report =
downloader::download_all_rules(&urls_to_download, &args.output, args.concurrency).await?;
println!("\n✅ Download process finished.");
println!(
" {} successful, {} failed.",
download_report.successful, download_report.failed
info!(
successful = download_report.successful,
failed = download_report.failed,
"Download process finished."
);
// If any downloads failed, abort with an error message.
@ -64,6 +67,6 @@ pub async fn run(args: Args) -> Result<()> {
);
}
println!("\n✔️ Ruleset synchronization complete.");
info!("Ruleset synchronization completed successfully.");
Ok(())
}