btracker/src/main.rs
2025-08-11 17:12:41 +03:00

255 lines
8.6 KiB
Rust

#[macro_use]
extern crate rocket;
mod config;
mod feed;
mod meta;
mod public;
mod scraper;
mod torrent;
use config::Config;
use feed::Feed;
use meta::Meta;
use plurify::Plurify;
use public::{Order, Public, Sort};
use rocket::{State, http::Status, response::content::RawXml, serde::Serialize};
use rocket_dyn_templates::{Template, context};
use scraper::{Scrape, Scraper};
use std::str::FromStr;
use torrent::Torrent;
#[get("/?<search>&<page>")]
fn index(
search: Option<&str>,
page: Option<usize>,
scraper: &State<Scraper>,
public: &State<Public>,
meta: &State<Meta>,
) -> Result<Template, Status> {
#[derive(Serialize)]
#[serde(crate = "rocket::serde")]
struct R {
created: Option<String>,
files: String,
indexed: String,
magnet: String,
scrape: Option<Scrape>,
size: String,
torrent: Torrent,
}
let (total, torrents) = public
.torrents(
search,
Some((Sort::Modified, Order::Desc)),
page.map(|p| if p > 0 { p - 1 } else { p } * public.default_limit),
Some(public.default_limit),
)
.map_err(|e| {
error!("Torrents public storage read error: `{e}`");
Status::InternalServerError
})?;
Ok(Template::render(
"index",
context! {
title: {
let mut t = String::new();
if let Some(q) = search && !q.is_empty() {
t.push_str(q);
t.push_str(S);
t.push_str("Search");
t.push_str(S)
}
if let Some(p) = page && p > 1 {
t.push_str(&format!("Page {p}"));
t.push_str(S)
}
t.push_str(&meta.title);
if let Some(ref description) = meta.description && page.is_none_or(|p| p == 1) {
t.push_str(S);
t.push_str(description)
}
t
},
meta: meta.inner(),
back: page.map(|p| uri!(index(search, if p > 2 { Some(p - 1) } else { None }))),
next: if page.unwrap_or(1) * public.default_limit >= total { None }
else { Some(uri!(index(search, Some(page.map_or(2, |p| p + 1))))) },
rows: torrents
.into_iter()
.filter_map(|t| match Torrent::from_public(&t.bytes, t.time) {
Ok(torrent) => Some(R {
created: torrent.creation_date.map(|t| t.format(&meta.format_time).to_string()),
files: torrent.files(),
indexed: torrent.time.format(&meta.format_time).to_string(),
magnet: torrent.magnet(meta.trackers.as_ref()),
scrape: scraper.scrape(&torrent.info_hash),
size: torrent.size(),
torrent
}),
Err(e) => {
error!("Torrent storage read error: `{e}`");
None
}
})
.collect::<Vec<R>>(),
pagination_totals: format!(
"Page {} / {} ({total} {} total)",
page.unwrap_or(1),
(total as f64 / public.default_limit as f64).ceil(),
total.plurify(&["torrent", "torrents", "torrents"])
),
search
},
))
}
#[get("/<info_hash>")]
fn info(
info_hash: &str,
public: &State<Public>,
scraper: &State<Scraper>,
meta: &State<Meta>,
) -> Result<Template, Status> {
match public.torrent(librqbit_core::Id20::from_str(info_hash).map_err(|_| Status::NotFound)?) {
Some(t) => {
#[derive(Serialize)]
#[serde(crate = "rocket::serde")]
struct F {
href: Option<String>,
path: String,
size: String,
}
let torrent = Torrent::from_public(&t.bytes, t.time).map_err(|e| {
error!("Torrent parse error: `{e}`");
Status::InternalServerError
})?;
Ok(Template::render(
"info",
context! {
title: {
let mut t = String::new();
if let Some(ref name) = torrent.name {
t.push_str(name);
t.push_str(S)
}
t.push_str(&meta.title);
t
},
meta: meta.inner(),
created: torrent.creation_date.map(|t| t.format(&meta.format_time).to_string()),
files_total: torrent.files(),
files_list: torrent.files.as_ref().map(|f| {
f.iter()
.map(|f| {
let p = f.path();
F {
href: public.href(&torrent.info_hash, &p),
path: p,
size: f.size(),
}
})
.collect::<Vec<F>>()
}),
indexed: torrent.time.format(&meta.format_time).to_string(),
magnet: torrent.magnet(meta.trackers.as_ref()),
scrape: scraper.scrape(&torrent.info_hash),
size: torrent.size(),
torrent
},
))
}
None => Err(Status::NotFound),
}
}
#[get("/rss")]
fn rss(meta: &State<Meta>, public: &State<Public>) -> Result<RawXml<String>, Status> {
let mut f = Feed::new(
&meta.title,
meta.description.as_deref(),
meta.canonical.clone(),
1024, // @TODO
);
for t in public
.torrents(
None,
Some((Sort::Modified, Order::Desc)),
None,
Some(public.default_limit),
)
.map_err(|e| {
error!("Torrent public storage read error: `{e}`");
Status::InternalServerError
})?
.1
{
f.push(Torrent::from_public(&t.bytes, t.time).map_err(|e| {
error!("Torrent parse error: `{e}`");
Status::InternalServerError
})?)
}
Ok(RawXml(f.commit()))
}
#[launch]
fn rocket() -> _ {
use clap::Parser;
let config = Config::parse();
if config.canonical_url.is_none() {
warn!("Canonical URL option is required for the RSS feed by the specification!") // @TODO
}
let scraper = Scraper::init(
config
.scrape
.map(|u| {
u.into_iter()
.map(|url| {
use std::str::FromStr;
if url.scheme() == "tcp" {
todo!("TCP scrape is not implemented")
}
if url.scheme() != "udp" {
todo!("Scheme `{}` is not supported", url.scheme())
}
std::net::SocketAddr::new(
std::net::IpAddr::from_str(
url.host_str()
.expect("Required valid host value")
.trim_start_matches('[')
.trim_end_matches(']'),
)
.unwrap(),
url.port().expect("Required valid port value"),
)
})
.collect()
})
.map(|a| (config.udp, a)),
);
rocket::build()
.attach(Template::fairing())
.configure(rocket::Config {
port: config.port,
address: config.host,
..if config.debug {
rocket::Config::debug_default()
} else {
rocket::Config::release_default()
}
})
.manage(scraper)
.manage(Public::init(config.public.clone(), config.list_limit, config.capacity).unwrap())
.manage(Meta {
canonical: config.canonical_url,
description: config.description,
format_time: config.format_time,
title: config.title,
trackers: config.tracker,
version: env!("CARGO_PKG_VERSION").into(),
})
.mount("/", rocket::fs::FileServer::from(config.public))
.mount("/", routes![index, rss, info])
}
const S: &str = "";