remove extra info-hash copy and validation as already valid from the api parser

This commit is contained in:
yggverse 2025-08-08 04:14:57 +03:00
parent 704a2e5c29
commit 673aa05a58
2 changed files with 18 additions and 20 deletions

View file

@ -89,18 +89,18 @@ async fn main() -> Result<()> {
}
} {
// convert to string once
let is = i.as_string();
let i = i.as_string();
if preload.contains_torrent(&i)? {
continue;
}
log::debug!("Index `{is}`...");
log::debug!("Index `{i}`...");
// run the crawler in single thread for performance reasons,
// use `timeout` argument option to skip the dead connections.
match time::timeout(
Duration::from_secs(config.add_torrent_timeout),
session.add_torrent(
AddTorrent::from_url(magnet(
&is,
&i,
if config.tracker.is_empty() {
None
} else {
@ -143,20 +143,20 @@ async fn main() -> Result<()> {
.is_some_and(|limit| only_files.len() + 1 > limit)
{
log::debug!(
"file count limit reached, skip `{id}` for `{is}`"
"file count limit reached, skip `{id}` for `{i}`"
);
break;
}
if preload.max_filesize.is_some_and(|limit| info.len > limit) {
log::debug!(
"file size limit reached, skip `{id}` for `{is}`"
"file size limit reached, skip `{id}` for `{i}`"
);
continue;
}
if preload.regex.as_ref().is_some_and(|r| {
!r.is_match(&info.relative_filename.to_string_lossy())
}) {
log::debug!("regex filter, skip `{id}` for `{is}`");
log::debug!("regex filter, skip `{id}` for `{i}`");
continue;
}
assert!(keep_files.insert(info.relative_filename.clone()));
@ -175,12 +175,12 @@ async fn main() -> Result<()> {
session
.delete(librqbit::api::TorrentIdOrHash::Id(id), false)
.await?;
log::debug!("torrent `{is}` indexed.")
log::debug!("torrent `{i}` indexed.")
}
Ok(_) => panic!(),
Err(e) => log::debug!("Failed to resolve `{is}`: `{e}`."),
Err(e) => log::debug!("Failed to resolve `{i}`: `{e}`."),
},
Err(e) => log::debug!("failed to resolve `{is}`: `{e}`"),
Err(e) => log::debug!("failed to resolve `{i}`: `{e}`"),
}
}
}

View file

@ -1,5 +1,4 @@
use anyhow::{Result, bail};
use librqbit::dht::Id20;
use regex::Regex;
use std::{collections::HashSet, fs, path::PathBuf};
@ -37,14 +36,13 @@ impl Preload {
/// cleanup tmp data on success (see rqbit#408)
pub fn commit(
&self,
info_hash: &Id20,
info_hash: &str,
torrent_bytes: Vec<u8>,
persist_files: Option<HashSet<PathBuf>>,
) -> Result<()> {
let i = info_hash.as_string();
// persist preload files
let mut d = PathBuf::from(&self.root);
d.push(&i);
d.push(info_hash);
if d.exists() {
// clean previous data
fs::remove_dir_all(&d)?;
@ -88,7 +86,7 @@ impl Preload {
log::debug!("clean tmp data `{}`", tmp.to_string_lossy())
}
// persist torrent bytes to file (on previous operations success)
let t = self.torrent(i);
let t = self.torrent(info_hash);
fs::write(&t, torrent_bytes)?;
log::debug!("persist torrent bytes for `{}`", t.to_string_lossy());
Ok(())
@ -98,9 +96,9 @@ impl Preload {
/// Get absolute path to the temporary directory
/// * optionally creates directory if not exists
pub fn tmp(&self, info_hash: &Id20, is_create: bool) -> Result<PathBuf> {
pub fn tmp(&self, info_hash: &str, is_create: bool) -> Result<PathBuf> {
let mut p = PathBuf::from(&self.root);
p.push(tmp_component(info_hash.as_string()));
p.push(tmp_component(info_hash));
if p.is_file() {
bail!("Output directory `{}` is file", p.to_string_lossy())
}
@ -117,12 +115,12 @@ impl Preload {
}
/// Check the given hash is contain resolved torrent file
pub fn contains_torrent(&self, info_hash: &Id20) -> Result<bool> {
Ok(fs::exists(self.torrent(info_hash.as_string()))?)
pub fn contains_torrent(&self, info_hash: &str) -> Result<bool> {
Ok(fs::exists(self.torrent(info_hash))?)
}
/// Get absolute path to the torrent file
fn torrent(&self, info_hash: String) -> PathBuf {
fn torrent(&self, info_hash: &str) -> PathBuf {
let mut p = PathBuf::from(&self.root);
p.push(format!("{info_hash}.torrent"));
p
@ -130,6 +128,6 @@ impl Preload {
}
/// Build constant path component
fn tmp_component(info_hash: String) -> String {
fn tmp_component(info_hash: &str) -> String {
format!(".{info_hash}")
}