use shared btracker-scrape library, update btracker-fs API to v0.2

This commit is contained in:
yggverse 2025-09-09 14:39:31 +03:00
parent 6bc739c0b3
commit ea9c632bad
7 changed files with 40 additions and 156 deletions

View file

@ -55,9 +55,9 @@ impl Feed {
/// Append `item` to the feed `channel`
pub fn push(&mut self, torrent: Torrent) {
let info_hash = torrent.info_hash.as_string();
self.buffer.push_str(&format!(
"<item><guid>{}</guid><title>{}</title><link>{}</link>",
torrent.info_hash,
"<item><guid>{info_hash}</guid><title>{}</title><link>{}</link>",
escape(
&torrent
.name
@ -65,7 +65,7 @@ impl Feed {
.map(|b| b.to_string())
.unwrap_or("?".into()) // @TODO
),
self.canonical.link(&torrent.info_hash)
self.canonical.link(&info_hash)
));
self.buffer.push_str("<description>");

View file

@ -4,16 +4,16 @@ extern crate rocket;
mod config;
mod feed;
mod meta;
mod scraper;
mod scrape;
mod torrent;
use btracker_fs::public::{Order, Public, Sort};
use btracker_scrape::Scrape;
use config::Config;
use feed::Feed;
use meta::Meta;
use rocket::{State, http::Status, response::content::RawXml, serde::Serialize};
use rocket_dyn_templates::{Template, context};
use scraper::{Scrape, Scraper};
use std::str::FromStr;
use torrent::Torrent;
@ -21,7 +21,7 @@ use torrent::Torrent;
fn index(
search: Option<&str>,
page: Option<usize>,
scraper: &State<Scraper>,
scrape: &State<Scrape>,
public: &State<Public>,
meta: &State<Meta>,
) -> Result<Template, Status> {
@ -32,7 +32,7 @@ fn index(
files: Option<usize>,
indexed: String,
magnet: String,
scrape: Option<Scrape>,
scrape: Option<scrape::Result>,
size: usize,
torrent: Torrent,
}
@ -82,7 +82,7 @@ fn index(
files: torrent.files(),
indexed: torrent.time.format(&meta.format_time).to_string(),
magnet: torrent.magnet(meta.trackers.as_ref()),
scrape: scraper.scrape(&torrent.info_hash),
scrape: scrape::get(scrape, torrent.info_hash.0),
size: torrent.size as usize, // required by `filesizeformat` impl
torrent
}),
@ -104,10 +104,11 @@ fn index(
fn info(
info_hash: &str,
public: &State<Public>,
scraper: &State<Scraper>,
scrape: &State<Scrape>,
meta: &State<Meta>,
) -> Result<Template, Status> {
match public.torrent(librqbit_core::Id20::from_str(info_hash).map_err(|_| Status::NotFound)?) {
let i = librqbit_core::Id20::from_str(info_hash).map_err(|_| Status::NotFound)?;
match public.torrent(i) {
Some(t) => {
#[derive(Serialize)]
#[serde(crate = "rocket::serde")]
@ -140,7 +141,7 @@ fn info(
.map(|f| {
let p = f.path();
F {
href: public.href(&torrent.info_hash, &p),
href: public.href(&torrent.info_hash.as_string(), &p),
path: p,
size: f.length as usize, // required by `filesizeformat` impl
}
@ -149,7 +150,7 @@ fn info(
}),
indexed: torrent.time.format(&meta.format_time).to_string(),
magnet: torrent.magnet(meta.trackers.as_ref()),
scrape: scraper.scrape(&torrent.info_hash),
scrape: scrape::get(scrape, i.0),
size: torrent.size as usize, // required by `filesizeformat` impl
torrent
},
@ -195,7 +196,7 @@ fn rocket() -> _ {
if config.canonical_url.is_none() {
warn!("Canonical URL option is required for the RSS feed by the specification!") // @TODO
}
let scraper = Scraper::init(
let scrape = Scrape::init(
config
.scrape
.map(|u| {
@ -234,8 +235,8 @@ fn rocket() -> _ {
rocket::Config::release_default()
}
})
.manage(scraper)
.manage(Public::init(config.public.clone(), config.list_limit, config.capacity).unwrap())
.manage(scrape)
.manage(Public::init(&config.public, config.list_limit, config.capacity).unwrap())
.manage(Meta {
canonical: config.canonical_url,
description: config.description,

15
src/scrape.rs Normal file
View file

@ -0,0 +1,15 @@
#[derive(rocket::serde::Serialize, Default)]
#[serde(crate = "rocket::serde")]
pub struct Result {
pub leechers: u32,
pub peers: u32,
pub seeders: u32,
}
pub fn get(scrape: &super::Scrape, id: [u8; 20]) -> Option<Result> {
scrape.get(id).map(|s| Result {
leechers: s.leechers,
peers: s.peers,
seeders: s.seeders,
})
}

View file

@ -1,40 +0,0 @@
mod udp;
use rocket::serde::Serialize;
use std::{net::SocketAddr, str::FromStr};
use udp::Udp;
#[derive(Serialize, Default)]
#[serde(crate = "rocket::serde")]
pub struct Scrape {
pub leechers: u32,
pub peers: u32,
pub seeders: u32,
}
pub struct Scraper {
udp: Option<Udp>,
// tcp: @TODO
}
impl Scraper {
pub fn init(udp: Option<(Vec<SocketAddr>, Vec<SocketAddr>)>) -> Self {
Self {
udp: udp.map(|(local, remote)| Udp::init(local, remote)),
}
}
pub fn scrape(&self, info_hash: &str) -> Option<Scrape> {
self.udp.as_ref()?;
let mut t = Scrape::default();
if let Some(ref u) = self.udp {
let r = u
.scrape(librqbit_core::Id20::from_str(info_hash).ok()?)
.ok()?; // @TODO handle
t.leechers += r.leechers;
t.peers += r.peers;
t.seeders += r.seeders;
}
Some(t)
}
}

View file

@ -1,96 +0,0 @@
use super::Scrape;
use librqbit_core::hash_id::Id20;
use rand::Rng;
use std::{
io::Error,
net::{SocketAddr, UdpSocket},
time::Duration,
};
struct Route {
socket: UdpSocket,
remote: Vec<SocketAddr>,
}
pub struct Udp(Vec<Route>);
impl Udp {
pub fn init(local: Vec<SocketAddr>, remote: Vec<SocketAddr>) -> Self {
Self(
local
.into_iter()
.map(|l| {
let socket = UdpSocket::bind(l).unwrap();
socket
.set_read_timeout(Some(Duration::from_secs(3)))
.unwrap();
Route {
socket,
remote: if l.is_ipv4() {
remote.iter().filter(|r| r.is_ipv4()).cloned().collect()
} else {
remote.iter().filter(|r| r.is_ipv6()).cloned().collect()
},
}
})
.collect(),
)
}
pub fn scrape(&self, info_hash: Id20) -> Result<Scrape, Error> {
let mut t = Scrape::default();
for route in &self.0 {
for remote in &route.remote {
route.socket.send_to(&connection_request(), remote)?;
let mut b = [0u8; 16];
if route.socket.recv(&mut b)? < 16 {
todo!()
}
route.socket.send_to(
&scrape_request(
u64::from_be_bytes(b[8..16].try_into().unwrap()),
rand::rng().random::<u32>(),
&[info_hash],
),
remote,
)?;
let mut b = [0u8; 1024];
let l = route.socket.recv(&mut b)?;
if l < 20 {
todo!()
}
t.seeders += u32::from_be_bytes(b[8..12].try_into().unwrap());
t.leechers += u32::from_be_bytes(b[12..16].try_into().unwrap());
t.peers += u32::from_be_bytes(b[16..20].try_into().unwrap());
}
}
Ok(t)
}
}
fn connection_request() -> Vec<u8> {
let mut b = Vec::new();
b.extend_from_slice(&0x41727101980u64.to_be_bytes());
b.extend_from_slice(&0u32.to_be_bytes());
b.extend_from_slice(&rand::rng().random::<u32>().to_be_bytes());
b
}
fn scrape_request(connection_id: u64, transaction_id: u32, info_hashes: &[Id20]) -> Vec<u8> {
let mut b = Vec::new();
b.extend_from_slice(&connection_id.to_be_bytes());
b.extend_from_slice(&2u32.to_be_bytes());
b.extend_from_slice(&transaction_id.to_be_bytes());
// * up to about 74 torrents can be scraped at once
// https://www.bittorrent.org/beps/bep_0015.html
if info_hashes.len() > 74 {
todo!()
}
for hash in info_hashes {
b.extend_from_slice(&hash.0);
}
b
}

View file

@ -2,7 +2,10 @@ mod file;
use chrono::{DateTime, Utc};
use file::File;
use librqbit_core::torrent_metainfo::{self, TorrentMetaV1Owned};
use librqbit_core::{
Id20,
torrent_metainfo::{self, TorrentMetaV1Owned},
};
use rocket::serde::Serialize;
#[derive(Clone, Debug, Serialize)]
@ -13,7 +16,7 @@ pub struct Torrent {
pub created_by: Option<String>,
pub creation_date: Option<DateTime<Utc>>,
pub files: Option<Vec<File>>,
pub info_hash: String,
pub info_hash: Id20,
pub is_private: bool,
pub length: Option<u64>,
pub name: Option<String>,
@ -29,7 +32,7 @@ impl Torrent {
let i: TorrentMetaV1Owned =
torrent_metainfo::torrent_from_bytes(bytes).map_err(|e| e.to_string())?;
Ok(Torrent {
info_hash: i.info_hash.as_string(),
info_hash: i.info_hash,
announce: i.announce.map(|a| a.to_string()),
comment: i.comment.map(|c| c.to_string()),
created_by: i.created_by.map(|c| c.to_string()),
@ -76,7 +79,7 @@ impl Torrent {
}
pub fn magnet(&self, trackers: Option<&Vec<url::Url>>) -> String {
let mut b = format!("magnet:?xt=urn:btih:{}", self.info_hash);
let mut b = format!("magnet:?xt=urn:btih:{}", self.info_hash.as_string());
if let Some(ref n) = self.name {
b.push_str("&dn=");
b.push_str(&urlencoding::encode(n))