remove extra info-hash copy and validation as already valid from the api parser

This commit is contained in:
yggverse 2025-08-08 04:14:57 +03:00
parent 704a2e5c29
commit 673aa05a58
2 changed files with 18 additions and 20 deletions

View file

@ -89,18 +89,18 @@ async fn main() -> Result<()> {
} }
} { } {
// convert to string once // convert to string once
let is = i.as_string(); let i = i.as_string();
if preload.contains_torrent(&i)? { if preload.contains_torrent(&i)? {
continue; continue;
} }
log::debug!("Index `{is}`..."); log::debug!("Index `{i}`...");
// run the crawler in single thread for performance reasons, // run the crawler in single thread for performance reasons,
// use `timeout` argument option to skip the dead connections. // use `timeout` argument option to skip the dead connections.
match time::timeout( match time::timeout(
Duration::from_secs(config.add_torrent_timeout), Duration::from_secs(config.add_torrent_timeout),
session.add_torrent( session.add_torrent(
AddTorrent::from_url(magnet( AddTorrent::from_url(magnet(
&is, &i,
if config.tracker.is_empty() { if config.tracker.is_empty() {
None None
} else { } else {
@ -143,20 +143,20 @@ async fn main() -> Result<()> {
.is_some_and(|limit| only_files.len() + 1 > limit) .is_some_and(|limit| only_files.len() + 1 > limit)
{ {
log::debug!( log::debug!(
"file count limit reached, skip `{id}` for `{is}`" "file count limit reached, skip `{id}` for `{i}`"
); );
break; break;
} }
if preload.max_filesize.is_some_and(|limit| info.len > limit) { if preload.max_filesize.is_some_and(|limit| info.len > limit) {
log::debug!( log::debug!(
"file size limit reached, skip `{id}` for `{is}`" "file size limit reached, skip `{id}` for `{i}`"
); );
continue; continue;
} }
if preload.regex.as_ref().is_some_and(|r| { if preload.regex.as_ref().is_some_and(|r| {
!r.is_match(&info.relative_filename.to_string_lossy()) !r.is_match(&info.relative_filename.to_string_lossy())
}) { }) {
log::debug!("regex filter, skip `{id}` for `{is}`"); log::debug!("regex filter, skip `{id}` for `{i}`");
continue; continue;
} }
assert!(keep_files.insert(info.relative_filename.clone())); assert!(keep_files.insert(info.relative_filename.clone()));
@ -175,12 +175,12 @@ async fn main() -> Result<()> {
session session
.delete(librqbit::api::TorrentIdOrHash::Id(id), false) .delete(librqbit::api::TorrentIdOrHash::Id(id), false)
.await?; .await?;
log::debug!("torrent `{is}` indexed.") log::debug!("torrent `{i}` indexed.")
} }
Ok(_) => panic!(), Ok(_) => panic!(),
Err(e) => log::debug!("Failed to resolve `{is}`: `{e}`."), Err(e) => log::debug!("Failed to resolve `{i}`: `{e}`."),
}, },
Err(e) => log::debug!("failed to resolve `{is}`: `{e}`"), Err(e) => log::debug!("failed to resolve `{i}`: `{e}`"),
} }
} }
} }

View file

@ -1,5 +1,4 @@
use anyhow::{Result, bail}; use anyhow::{Result, bail};
use librqbit::dht::Id20;
use regex::Regex; use regex::Regex;
use std::{collections::HashSet, fs, path::PathBuf}; use std::{collections::HashSet, fs, path::PathBuf};
@ -37,14 +36,13 @@ impl Preload {
/// cleanup tmp data on success (see rqbit#408) /// cleanup tmp data on success (see rqbit#408)
pub fn commit( pub fn commit(
&self, &self,
info_hash: &Id20, info_hash: &str,
torrent_bytes: Vec<u8>, torrent_bytes: Vec<u8>,
persist_files: Option<HashSet<PathBuf>>, persist_files: Option<HashSet<PathBuf>>,
) -> Result<()> { ) -> Result<()> {
let i = info_hash.as_string();
// persist preload files // persist preload files
let mut d = PathBuf::from(&self.root); let mut d = PathBuf::from(&self.root);
d.push(&i); d.push(info_hash);
if d.exists() { if d.exists() {
// clean previous data // clean previous data
fs::remove_dir_all(&d)?; fs::remove_dir_all(&d)?;
@ -88,7 +86,7 @@ impl Preload {
log::debug!("clean tmp data `{}`", tmp.to_string_lossy()) log::debug!("clean tmp data `{}`", tmp.to_string_lossy())
} }
// persist torrent bytes to file (on previous operations success) // persist torrent bytes to file (on previous operations success)
let t = self.torrent(i); let t = self.torrent(info_hash);
fs::write(&t, torrent_bytes)?; fs::write(&t, torrent_bytes)?;
log::debug!("persist torrent bytes for `{}`", t.to_string_lossy()); log::debug!("persist torrent bytes for `{}`", t.to_string_lossy());
Ok(()) Ok(())
@ -98,9 +96,9 @@ impl Preload {
/// Get absolute path to the temporary directory /// Get absolute path to the temporary directory
/// * optionally creates directory if not exists /// * optionally creates directory if not exists
pub fn tmp(&self, info_hash: &Id20, is_create: bool) -> Result<PathBuf> { pub fn tmp(&self, info_hash: &str, is_create: bool) -> Result<PathBuf> {
let mut p = PathBuf::from(&self.root); let mut p = PathBuf::from(&self.root);
p.push(tmp_component(info_hash.as_string())); p.push(tmp_component(info_hash));
if p.is_file() { if p.is_file() {
bail!("Output directory `{}` is file", p.to_string_lossy()) bail!("Output directory `{}` is file", p.to_string_lossy())
} }
@ -117,12 +115,12 @@ impl Preload {
} }
/// Check the given hash is contain resolved torrent file /// Check the given hash is contain resolved torrent file
pub fn contains_torrent(&self, info_hash: &Id20) -> Result<bool> { pub fn contains_torrent(&self, info_hash: &str) -> Result<bool> {
Ok(fs::exists(self.torrent(info_hash.as_string()))?) Ok(fs::exists(self.torrent(info_hash))?)
} }
/// Get absolute path to the torrent file /// Get absolute path to the torrent file
fn torrent(&self, info_hash: String) -> PathBuf { fn torrent(&self, info_hash: &str) -> PathBuf {
let mut p = PathBuf::from(&self.root); let mut p = PathBuf::from(&self.root);
p.push(format!("{info_hash}.torrent")); p.push(format!("{info_hash}.torrent"));
p p
@ -130,6 +128,6 @@ impl Preload {
} }
/// Build constant path component /// Build constant path component
fn tmp_component(info_hash: String) -> String { fn tmp_component(info_hash: &str) -> String {
format!(".{info_hash}") format!(".{info_hash}")
} }