reorganize debug components

This commit is contained in:
yggverse 2025-07-09 14:39:37 +03:00
parent 47e6899ccd
commit 7da1df7137
5 changed files with 65 additions and 97 deletions

View file

@ -56,13 +56,7 @@ aquatic-crawler --infohash /path/to/info-hash-ipv4.bin\
``` bash ``` bash
-d, --debug <DEBUG> -d, --debug <DEBUG>
Debug level Print debug output
* `e` - error
* `i` - info
* `t` - trace (run with `RUST_LOG=librqbit=trace`)
[default: ei]
--infohash <INFOHASH> --infohash <INFOHASH>
Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API

View file

@ -3,13 +3,9 @@ use clap::Parser;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(version, about, long_about = None)] #[command(version, about, long_about = None)]
pub struct Config { pub struct Config {
/// Debug level /// Print debug output
/// #[arg(short, long, default_value_t = false)]
/// * `e` - error pub debug: bool,
/// * `i` - info
/// * `t` - trace (run with `RUST_LOG=librqbit=trace`)
#[arg(short, long, default_value_t = String::from("ei"))]
pub debug: String,
/// Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API /// Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API
/// ///

View file

@ -1,30 +0,0 @@
mod level;
use level::Level;
pub struct Debug(Vec<Level>);
impl Debug {
pub fn init(levels: &str) -> anyhow::Result<Self> {
let mut l = Vec::with_capacity(levels.len());
for s in levels.to_lowercase().chars() {
l.push(Level::parse(s)?);
}
Ok(Self(l))
}
pub fn error(&self, message: &str) {
if self.0.contains(&Level::Error) {
eprintln!("[{}] [error] {message}", now());
}
}
pub fn info(&self, message: &str) {
if self.0.contains(&Level::Info) {
println!("[{}] [info] {message}", now());
}
}
}
fn now() -> String {
chrono::Local::now().to_rfc3339()
}

View file

@ -1,22 +0,0 @@
use anyhow::{Result, bail};
#[derive(PartialEq)]
pub enum Level {
Error,
Info,
Trace,
}
impl Level {
pub fn parse(value: char) -> Result<Self> {
match value {
'e' => Ok(Self::Error),
'i' => Ok(Self::Info),
't' => {
tracing_subscriber::fmt::init();
Ok(Self::Trace)
}
_ => bail!("Unsupported debug value `{value}`!"),
}
}
}

View file

@ -1,6 +1,5 @@
mod api; mod api;
mod config; mod config;
mod debug;
mod format; mod format;
mod index; mod index;
mod peers; mod peers;
@ -11,7 +10,6 @@ mod trackers;
use anyhow::Result; use anyhow::Result;
use config::Config; use config::Config;
use debug::Debug;
use index::Index; use index::Index;
use librqbit::{ use librqbit::{
AddTorrent, AddTorrentOptions, AddTorrentResponse, ByteBufOwned, ConnectionOptions, AddTorrent, AddTorrentOptions, AddTorrentResponse, ByteBufOwned, ConnectionOptions,
@ -31,7 +29,9 @@ async fn main() -> Result<()> {
// init components // init components
let config = Config::parse(); let config = Config::parse();
let debug = Debug::init(&config.debug)?; if config.debug {
tracing_subscriber::fmt::init()
}
let peers = Peers::init(&config.initial_peer)?; let peers = Peers::init(&config.initial_peer)?;
let preload = preload::init( let preload = preload::init(
config.preload, config.preload,
@ -73,7 +73,7 @@ async fn main() -> Result<()> {
.await?; .await?;
// begin // begin
debug.info("Crawler started"); println!("Crawler started");
let mut index = Index::init( let mut index = Index::init(
config.index_capacity, config.index_capacity,
config.index_timeout, config.index_timeout,
@ -82,19 +82,25 @@ async fn main() -> Result<()> {
config.export_rss.is_some() && config.index_list, config.export_rss.is_some() && config.index_list,
); );
loop { loop {
debug.info("Index queue begin..."); if config.debug {
println!("\tQueue crawl begin...")
}
index.refresh(); index.refresh();
for source in &config.infohash { for source in &config.infohash {
debug.info(&format!("Index source `{source}`...")); if config.debug {
println!("\tIndex source `{source}`...")
}
// grab latest info-hashes from this source // grab latest info-hashes from this source
// * aquatic server may update the stats at this moment, handle result manually // * aquatic server may update the stats at this moment, handle result manually
for i in match api::get(source, config.index_capacity) { for i in match api::get(source, config.index_capacity) {
Some(i) => i, Some(i) => i,
None => { None => {
// skip without panic // skip without panic
debug.error(&format!( if config.debug {
"The feed `{source}` has an incomplete format (or is still updating); skip." eprintln!(
)); "The feed `{source}` has an incomplete format (or is still updating); skip."
)
}
continue; continue;
} }
} { } {
@ -104,7 +110,9 @@ async fn main() -> Result<()> {
if index.has(&i) { if index.has(&i) {
continue; continue;
} }
debug.info(&format!("Index `{i}`...")); if config.debug {
println!("\t\tIndex `{i}`...")
}
// run the crawler in single thread for performance reasons, // run the crawler in single thread for performance reasons,
// use `timeout` argument option to skip the dead connections. // use `timeout` argument option to skip the dead connections.
match time::timeout( match time::timeout(
@ -158,17 +166,21 @@ async fn main() -> Result<()> {
if p.max_filesize.is_some_and(|limit| { if p.max_filesize.is_some_and(|limit| {
only_files_size + info.len > limit only_files_size + info.len > limit
}) { }) {
debug.info(&format!( if config.debug {
"Total files size limit `{i}` reached!" println!(
)); "\t\t\ttotal files size limit `{i}` reached!"
)
}
break; break;
} }
if p.max_filecount if p.max_filecount
.is_some_and(|limit| only_files.len() + 1 > limit) .is_some_and(|limit| only_files.len() + 1 > limit)
{ {
debug.info(&format!( if config.debug {
"Total files count limit for `{i}` reached!" println!(
)); "\t\t\ttotal files count limit for `{i}` reached!"
)
}
break; break;
} }
only_files_size += info.len; only_files_size += info.len;
@ -181,7 +193,7 @@ async fn main() -> Result<()> {
} }
} }
if let Some(ref t) = torrent { if let Some(ref t) = torrent {
save_torrent_file(t, &debug, &i, &m.torrent_bytes) save_torrent_file(t, &i, &m.torrent_bytes, config.debug)
} }
( (
@ -203,17 +215,25 @@ async fn main() -> Result<()> {
p.cleanup(&i, Some(only_files_keep))? p.cleanup(&i, Some(only_files_keep))?
} }
if config.debug {
println!("\t\t\tadd `{i}` to index.")
}
index.insert(i, only_files_size, size, list, name) index.insert(i, only_files_size, size, list, name)
} }
Ok(AddTorrentResponse::ListOnly(r)) => { Ok(AddTorrentResponse::ListOnly(r)) => {
if let Some(ref t) = torrent { if let Some(ref t) = torrent {
save_torrent_file(t, &debug, &i, &r.torrent_bytes) save_torrent_file(t, &i, &r.torrent_bytes, config.debug)
} }
// @TODO // @TODO
// use `r.info` for Memory, SQLite, // use `r.info` for Memory, SQLite,
// Manticore and other alternative storage type // Manticore and other alternative storage type
if config.debug {
println!("\t\t\tadd `{i}` to index.")
}
index.insert( index.insert(
i, i,
0, 0,
@ -224,9 +244,13 @@ async fn main() -> Result<()> {
} }
// unexpected as should be deleted // unexpected as should be deleted
Ok(AddTorrentResponse::AlreadyManaged(..)) => panic!(), Ok(AddTorrentResponse::AlreadyManaged(..)) => panic!(),
Err(e) => debug.info(&format!("Skip `{i}`: `{e}`.")), Err(e) => eprintln!("Failed to resolve `{i}`: `{e}`."),
}, },
Err(e) => debug.info(&format!("Skip `{i}`: `{e}`.")), Err(e) => {
if config.debug {
println!("\t\t\tfailed to resolve `{i}`: `{e}`")
}
}
} }
} }
} }
@ -261,23 +285,29 @@ async fn main() -> Result<()> {
{ {
panic!("Preload content size {} bytes reached!", 0) panic!("Preload content size {} bytes reached!", 0)
} }
debug.info(&format!( if config.debug {
"Index completed, {} total, await {} seconds to continue...", println!(
index.len(), "Queue completed, {} total, await {} seconds to continue...",
config.sleep, index.len(),
)); config.sleep,
)
}
std::thread::sleep(Duration::from_secs(config.sleep)); std::thread::sleep(Duration::from_secs(config.sleep));
} }
} }
/// Shared handler function to save resolved torrents as file /// Shared handler function to save resolved torrents as file
fn save_torrent_file(t: &Torrent, d: &Debug, i: &str, b: &[u8]) { fn save_torrent_file(t: &Torrent, i: &str, b: &[u8], d: bool) {
match t.persist(i, b) { match t.persist(i, b) {
Ok(r) => d.info(&match r { Ok(r) => {
Some(p) => format!("Add torrent file `{}`", p.to_string_lossy()), if d {
None => format!("Torrent file `{i}` already exists"), match r {
}), Some(p) => println!("\t\t\tadd torrent file `{}`", p.to_string_lossy()),
Err(e) => d.error(&format!("Error on save torrent file `{i}`: {e}")), None => println!("\t\t\ttorrent file `{i}` already exists"),
}
}
}
Err(e) => eprintln!("Error on save torrent file `{i}`: {e}"),
} }
} }