add comments

This commit is contained in:
yggverse 2025-08-10 16:31:20 +03:00
parent 6285b850ce
commit 8a9c928b98
2 changed files with 12 additions and 8 deletions

View file

@ -6,6 +6,12 @@ use url::Url;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(version, about, long_about = None)] #[command(version, about, long_about = None)]
pub struct Config { pub struct Config {
/// Directory path to store preloaded data (e.g. `.torrent` files)
///
/// * it's probably the same location as `public` dir for the [btracker](https://github.com/YGGverse/btracker) frontend
#[arg(long)]
pub preload: PathBuf,
/// Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API /// Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API
/// ///
/// * PR#233 feature ([Wiki](https://github.com/YGGverse/aquatic-crawler/wiki/Aquatic)) /// * PR#233 feature ([Wiki](https://github.com/YGGverse/aquatic-crawler/wiki/Aquatic))
@ -46,10 +52,6 @@ pub struct Config {
#[arg(long)] #[arg(long)]
pub bind: Option<String>, pub bind: Option<String>,
/// Directory path to store preloaded data (e.g. `.torrent` files)
#[arg(long)]
pub preload: PathBuf,
/// Preload only files match regex pattern (list only without preload by default) /// Preload only files match regex pattern (list only without preload by default)
/// * see also `preload_max_filesize`, `preload_max_filecount` options /// * see also `preload_max_filesize`, `preload_max_filecount` options
/// ///
@ -76,7 +78,7 @@ pub struct Config {
#[arg(long)] #[arg(long)]
pub proxy_url: Option<Url>, pub proxy_url: Option<Url>,
// Peer options // Tuneup the peers processor
#[arg(long)] #[arg(long)]
pub peer_connect_timeout: Option<u64>, pub peer_connect_timeout: Option<u64>,
@ -87,6 +89,8 @@ pub struct Config {
pub peer_keep_alive_interval: Option<u64>, pub peer_keep_alive_interval: Option<u64>,
/// Estimated info-hash index capacity /// Estimated info-hash index capacity
///
/// * use for memory optimization, depending on tracker volumes
#[arg(long, default_value_t = 1000)] #[arg(long, default_value_t = 1000)]
pub index_capacity: usize, pub index_capacity: usize,
@ -100,7 +104,7 @@ pub struct Config {
/// Limit download speed (b/s) /// Limit download speed (b/s)
#[arg(long)] #[arg(long)]
pub download_limit: Option<u32>, pub download_limit: Option<u32>, // * reminder: upload feature is not planed by the crawler impl
/// Skip long-thinking connections, /// Skip long-thinking connections,
/// try to handle the other hashes in this queue after `n` seconds /// try to handle the other hashes in this queue after `n` seconds

View file

@ -181,7 +181,7 @@ async fn main() -> Result<()> {
); );
session session
.delete(librqbit::api::TorrentIdOrHash::Id(id), false) .delete(librqbit::api::TorrentIdOrHash::Id(id), false)
.await?; .await?; // * do not collect billions of slow torrents in the session pool
continue; continue;
} }
log::debug!("torrent `{i}` preload completed."); log::debug!("torrent `{i}` preload completed.");
@ -194,7 +194,7 @@ async fn main() -> Result<()> {
preload.commit(&i, bytes, Some(keep_files))?; preload.commit(&i, bytes, Some(keep_files))?;
session session
.delete(librqbit::api::TorrentIdOrHash::Id(id), false) .delete(librqbit::api::TorrentIdOrHash::Id(id), false)
.await?; .await?; // torrent data was moved on commit; there's no sense in keeping it
log::debug!("torrent `{i}` resolved.") log::debug!("torrent `{i}` resolved.")
} }
Ok(_) => panic!(), Ok(_) => panic!(),