collect unique ids from the multiple source(s) to handle

This commit is contained in:
yggverse 2025-08-18 21:00:41 +03:00
parent 3643d456d0
commit 79643b84d2

View file

@ -66,6 +66,9 @@ async fn main() -> Result<()> {
loop {
let time_queue = Local::now();
log::debug!("queue crawl begin at {time_queue}...");
// build unique ID index from the multiple info-hash sources
let mut queue = HashSet::with_capacity(config.index_capacity);
for source in &config.infohash {
log::debug!("index source `{source}`...");
// grab latest info-hashes from this source
@ -76,13 +79,18 @@ async fn main() -> Result<()> {
i
}
None => {
// skip without panic
log::warn!(
"the feed `{source}` has an incomplete format (or is still updating); skip."
);
continue;
continue; // skip without panic
}
} {
queue.insert(i);
}
}
// handle
for i in queue {
// convert to string once
let h = i.as_string();
if preload.contains_torrent(&h)? {
@ -116,10 +124,7 @@ async fn main() -> Result<()> {
list_only: false,
// the destination folder to preload files match `preload_regex`
// * e.g. images for audio albums
output_folder: preload
.tmp_dir(&h, true)?
.to_str()
.map(|s| s.to_string()),
output_folder: preload.tmp_dir(&h, true)?.to_str().map(|s| s.to_string()),
..Default::default()
}),
),
@ -194,10 +199,7 @@ async fn main() -> Result<()> {
log::debug!("torrent `{h}` preload completed.");
// persist torrent bytes and preloaded content,
// cleanup tmp (see rqbit#408)
log::debug!(
"persist torrent `{h}` with `{}` files...",
keep_files.len()
);
log::debug!("persist torrent `{h}` with `{}` files...", keep_files.len());
preload.commit(&h, bytes, Some(keep_files))?;
session
.delete(librqbit::api::TorrentIdOrHash::Id(id), false)
@ -220,7 +222,6 @@ async fn main() -> Result<()> {
}
}
}
}
log::info!(
"queue completed at {time_queue} (time: {} / uptime: {} / banned: {}) await {} seconds to continue...",
Local::now()