mirror of
https://github.com/YGGverse/btracker.git
synced 2026-03-31 09:05:30 +00:00
reorganize Torrent structs, implement parser, isolate system errors output, remove file listing from rss feed (will be moved to the torrent page)
This commit is contained in:
parent
dc7585311e
commit
9291328401
5 changed files with 156 additions and 155 deletions
27
src/feed.rs
27
src/feed.rs
|
|
@ -1,4 +1,4 @@
|
|||
use crate::format;
|
||||
use crate::{Torrent, format};
|
||||
use url::Url;
|
||||
|
||||
/// Export crawl index to the RSS file
|
||||
|
|
@ -57,7 +57,7 @@ impl Feed {
|
|||
}
|
||||
|
||||
/// Append `item` to the feed `channel`
|
||||
pub fn push(&self, buffer: &mut String, torrent: crate::storage::Torrent) {
|
||||
pub fn push(&self, buffer: &mut String, torrent: Torrent) {
|
||||
buffer.push_str(&format!(
|
||||
"<item><guid>{}</guid><title>{}</title><link>{}</link>",
|
||||
&torrent.info_hash,
|
||||
|
|
@ -71,11 +71,9 @@ impl Feed {
|
|||
escape(format::magnet(&torrent.info_hash, self.trackers.as_ref()))
|
||||
));
|
||||
|
||||
if let Some(d) = item_description(torrent.size, torrent.files) {
|
||||
buffer.push_str("<description>");
|
||||
buffer.push_str(&escape(d));
|
||||
buffer.push_str("</description>")
|
||||
}
|
||||
buffer.push_str("<description>");
|
||||
buffer.push_str(&escape(format::bytes(torrent.size)));
|
||||
buffer.push_str("</description>");
|
||||
|
||||
buffer.push_str("<pubDate>");
|
||||
buffer.push_str(&torrent.time.to_rfc2822());
|
||||
|
|
@ -99,18 +97,3 @@ fn escape(subject: String) -> String {
|
|||
.replace('"', """)
|
||||
.replace("'", "'")
|
||||
}
|
||||
|
||||
fn item_description(size: u64, list: Option<Vec<crate::storage::File>>) -> Option<String> {
|
||||
let mut b = Vec::with_capacity(list.as_ref().map(|l| l.len()).unwrap_or_default() + 1);
|
||||
b.push(format::bytes(size));
|
||||
if let Some(files) = list {
|
||||
for file in files {
|
||||
b.push(format!(
|
||||
"{} ({})",
|
||||
file.name.as_deref().unwrap_or("?"), // @TODO invalid encoding
|
||||
format::bytes(file.length)
|
||||
))
|
||||
}
|
||||
}
|
||||
Some(b.join("\n"))
|
||||
}
|
||||
|
|
|
|||
62
src/main.rs
62
src/main.rs
|
|
@ -6,6 +6,7 @@ mod feed;
|
|||
mod format;
|
||||
mod scraper;
|
||||
mod storage;
|
||||
mod torrent;
|
||||
|
||||
use config::Config;
|
||||
use feed::Feed;
|
||||
|
|
@ -17,7 +18,8 @@ use rocket::{
|
|||
};
|
||||
use rocket_dyn_templates::{Template, context};
|
||||
use scraper::{Scrape, Scraper};
|
||||
use storage::{Order, Sort, Storage, Torrent};
|
||||
use storage::{Order, Sort, Storage};
|
||||
use torrent::Torrent;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
|
|
@ -40,6 +42,7 @@ fn index(
|
|||
meta: &State<Meta>,
|
||||
) -> Result<Template, Custom<String>> {
|
||||
use plurify::Plurify;
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
struct Row {
|
||||
|
|
@ -51,13 +54,18 @@ fn index(
|
|||
size: String,
|
||||
torrent: Torrent,
|
||||
}
|
||||
|
||||
let (total, torrents) = storage
|
||||
.torrents(
|
||||
Some((Sort::Modified, Order::Desc)),
|
||||
page.map(|p| if p > 0 { p - 1 } else { p } * storage.default_limit),
|
||||
Some(storage.default_limit),
|
||||
)
|
||||
.map_err(|e| Custom(Status::InternalServerError, e.to_string()))?;
|
||||
.map_err(|e| {
|
||||
error!("Torrents storage read error: `{e}`");
|
||||
Custom(Status::InternalServerError, E.to_string())
|
||||
})?;
|
||||
|
||||
Ok(Template::render(
|
||||
"index",
|
||||
context! {
|
||||
|
|
@ -67,19 +75,25 @@ fn index(
|
|||
else { Some(uri!(index(Some(page.map_or(2, |p| p + 1))))) },
|
||||
rows: torrents
|
||||
.into_iter()
|
||||
.map(|torrent| Row {
|
||||
created: torrent
|
||||
.creation_date
|
||||
.map(|t| t.format(&meta.format_time).to_string()),
|
||||
indexed: torrent.time.format(&meta.format_time).to_string(),
|
||||
magnet: format::magnet(&torrent.info_hash, meta.trackers.as_ref()),
|
||||
scrape: scraper.scrape(&torrent.info_hash),
|
||||
size: format::bytes(torrent.size),
|
||||
files: torrent.files.as_ref().map_or("1 file".into(), |f| {
|
||||
let l = f.len();
|
||||
format!("{l} {}", l.plurify(&["file", "files", "files"]))
|
||||
.filter_map(|t| match Torrent::from_storage(&t.bytes, t.time) {
|
||||
Ok(torrent) => Some(Row {
|
||||
created: torrent
|
||||
.creation_date
|
||||
.map(|t| t.format(&meta.format_time).to_string()),
|
||||
indexed: torrent.time.format(&meta.format_time).to_string(),
|
||||
magnet: format::magnet(&torrent.info_hash, meta.trackers.as_ref()),
|
||||
scrape: scraper.scrape(&torrent.info_hash),
|
||||
size: format::bytes(torrent.size),
|
||||
files: torrent.files.as_ref().map_or("1 file".into(), |f| {
|
||||
let l = f.len();
|
||||
format!("{l} {}", l.plurify(&["file", "files", "files"]))
|
||||
}),
|
||||
torrent,
|
||||
}),
|
||||
torrent,
|
||||
Err(e) => {
|
||||
error!("Torrent storage read error: `{e}`");
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Row>>(),
|
||||
pagination_totals: format!(
|
||||
|
|
@ -95,16 +109,25 @@ fn index(
|
|||
#[get("/rss")]
|
||||
fn rss(feed: &State<Feed>, storage: &State<Storage>) -> Result<RawXml<String>, Custom<String>> {
|
||||
let mut b = feed.transaction(1024); // @TODO
|
||||
for torrent in storage
|
||||
for t in storage
|
||||
.torrents(
|
||||
Some((Sort::Modified, Order::Desc)),
|
||||
None,
|
||||
Some(storage.default_limit),
|
||||
)
|
||||
.map_err(|e| Custom(Status::InternalServerError, e.to_string()))?
|
||||
.map_err(|e| {
|
||||
error!("Torrent storage read error: `{e}`");
|
||||
Custom(Status::InternalServerError, E.to_string())
|
||||
})?
|
||||
.1
|
||||
{
|
||||
feed.push(&mut b, torrent)
|
||||
feed.push(
|
||||
&mut b,
|
||||
Torrent::from_storage(&t.bytes, t.time).map_err(|e| {
|
||||
error!("Torrent parse error: `{e}`");
|
||||
Custom(Status::InternalServerError, E.to_string())
|
||||
})?,
|
||||
)
|
||||
}
|
||||
Ok(RawXml(feed.commit(b)))
|
||||
}
|
||||
|
|
@ -147,7 +170,7 @@ fn rocket() -> _ {
|
|||
})
|
||||
.map(|a| (config.udp, a)),
|
||||
);
|
||||
let storage = Storage::init(config.preload, config.list_limit, config.capacity).unwrap(); // @TODO handle
|
||||
let storage = Storage::init(config.preload, config.list_limit, config.capacity).unwrap();
|
||||
rocket::build()
|
||||
.attach(Template::fairing())
|
||||
.configure(rocket::Config {
|
||||
|
|
@ -173,3 +196,6 @@ fn rocket() -> _ {
|
|||
.mount("/", rocket::fs::FileServer::from(config.statics))
|
||||
.mount("/", routes![index, rss])
|
||||
}
|
||||
|
||||
/// Public placeholder text for the `Status::InternalServerError`
|
||||
const E: &str = "Oops!";
|
||||
|
|
|
|||
135
src/storage.rs
135
src/storage.rs
|
|
@ -1,8 +1,7 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use librqbit_core::{torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
|
||||
use rocket::serde::Serialize;
|
||||
use std::{
|
||||
fs::{self, DirEntry},
|
||||
io::Error,
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
|
|
@ -19,29 +18,8 @@ pub enum Order {
|
|||
Desc,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct File {
|
||||
pub name: Option<String>,
|
||||
pub length: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct Torrent {
|
||||
pub announce: Option<String>,
|
||||
pub comment: Option<String>,
|
||||
pub created_by: Option<String>,
|
||||
pub creation_date: Option<DateTime<Utc>>,
|
||||
pub files: Option<Vec<File>>,
|
||||
pub info_hash: String,
|
||||
pub is_private: bool,
|
||||
pub length: Option<u64>,
|
||||
pub name: Option<String>,
|
||||
pub publisher_url: Option<String>,
|
||||
pub publisher: Option<String>,
|
||||
pub size: u64,
|
||||
/// File (modified)
|
||||
pub bytes: Vec<u8>,
|
||||
pub time: DateTime<Utc>,
|
||||
}
|
||||
|
||||
|
|
@ -76,92 +54,24 @@ impl Storage {
|
|||
sort_order: Option<(Sort, Order)>,
|
||||
start: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
) -> Result<(usize, Vec<Torrent>), String> {
|
||||
) -> Result<(usize, Vec<Torrent>), Error> {
|
||||
let f = self.files(sort_order)?;
|
||||
let t = f.len();
|
||||
let l = limit.unwrap_or(t);
|
||||
let mut b = Vec::with_capacity(l);
|
||||
for file in f.into_iter().skip(start.unwrap_or_default()).take(l) {
|
||||
if file
|
||||
.path()
|
||||
.extension()
|
||||
.is_none_or(|e| e.is_empty() || e.to_string_lossy() != "torrent")
|
||||
{
|
||||
return Err("Unexpected file extension".into());
|
||||
}
|
||||
let i: TorrentMetaV1Owned = torrent_metainfo::torrent_from_bytes(
|
||||
&fs::read(file.path()).map_err(|e| e.to_string())?,
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
for file in f
|
||||
.into_iter()
|
||||
.skip(start.unwrap_or_default())
|
||||
.take(l)
|
||||
.filter(|f| {
|
||||
f.path()
|
||||
.extension()
|
||||
.is_some_and(|e| !e.is_empty() && e.to_string_lossy() == "torrent")
|
||||
})
|
||||
{
|
||||
b.push(Torrent {
|
||||
info_hash: i.info_hash.as_string(),
|
||||
announce: i.announce.map(|a| a.to_string()),
|
||||
comment: i.comment.map(|c| c.to_string()),
|
||||
created_by: i.created_by.map(|c| c.to_string()),
|
||||
creation_date: i
|
||||
.creation_date
|
||||
.map(|t| DateTime::from_timestamp_nanos(t as i64)),
|
||||
size: i.info.length.unwrap_or_default()
|
||||
+ i.info
|
||||
.files
|
||||
.as_ref()
|
||||
.map(|files| files.iter().map(|f| f.length).sum::<u64>())
|
||||
.unwrap_or_default(),
|
||||
files: i.info.files.map(|files| {
|
||||
let limit = 1000; // @TODO
|
||||
let mut b = Vec::with_capacity(files.len());
|
||||
let mut i = files.iter();
|
||||
let mut t = 0;
|
||||
for f in i.by_ref() {
|
||||
if t < limit {
|
||||
t += 1;
|
||||
b.push(File {
|
||||
name: String::from_utf8(
|
||||
f.path
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(n, b)| {
|
||||
if n == 0 {
|
||||
b.0.to_vec()
|
||||
} else {
|
||||
let mut p = vec![b'/'];
|
||||
p.extend(b.0.to_vec());
|
||||
p
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.ok(),
|
||||
length: f.length,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
// limit reached: count sizes left and use placeholder as the last item name
|
||||
let mut l = 0;
|
||||
for f in i.by_ref() {
|
||||
l += f.length
|
||||
}
|
||||
b.push(File {
|
||||
name: Some("...".to_string()),
|
||||
length: l,
|
||||
});
|
||||
break;
|
||||
}
|
||||
b[..t].sort_by(|a, b| a.name.cmp(&b.name)); // @TODO optional
|
||||
b
|
||||
}),
|
||||
publisher_url: i.publisher_url.map(|u| u.to_string()),
|
||||
publisher: i.publisher.map(|p| p.to_string()),
|
||||
is_private: i.info.private,
|
||||
length: i.info.length,
|
||||
name: i.info.name.map(|e| e.to_string()),
|
||||
time: file
|
||||
.metadata()
|
||||
.map_err(|e| e.to_string())?
|
||||
.modified()
|
||||
.map_err(|e| e.to_string())?
|
||||
.into(),
|
||||
bytes: fs::read(file.path())?,
|
||||
time: file.metadata()?.modified()?.into(),
|
||||
})
|
||||
}
|
||||
Ok((t, b))
|
||||
|
|
@ -169,17 +79,12 @@ impl Storage {
|
|||
|
||||
// Helpers
|
||||
|
||||
fn files(&self, sort_order: Option<(Sort, Order)>) -> Result<Vec<DirEntry>, String> {
|
||||
fn files(&self, sort_order: Option<(Sort, Order)>) -> Result<Vec<DirEntry>, Error> {
|
||||
let mut b = Vec::with_capacity(self.default_capacity);
|
||||
for entry in fs::read_dir(&self.root).map_err(|e| e.to_string())? {
|
||||
let e = entry.map_err(|e| e.to_string())?;
|
||||
match e.file_type() {
|
||||
Ok(t) => {
|
||||
if t.is_file() {
|
||||
b.push((e.metadata().unwrap().modified().unwrap(), e))
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("{}", e.to_string()),
|
||||
for entry in fs::read_dir(&self.root)? {
|
||||
let e = entry?;
|
||||
if e.file_type()?.is_file() {
|
||||
b.push((e.metadata()?.modified()?, e))
|
||||
}
|
||||
}
|
||||
if let Some((sort, order)) = sort_order {
|
||||
|
|
|
|||
79
src/torrent.rs
Normal file
79
src/torrent.rs
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
mod file;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use file::File;
|
||||
use librqbit_core::{torrent_metainfo, torrent_metainfo::TorrentMetaV1Owned};
|
||||
use rocket::serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct Torrent {
|
||||
pub announce: Option<String>,
|
||||
pub comment: Option<String>,
|
||||
pub created_by: Option<String>,
|
||||
pub creation_date: Option<DateTime<Utc>>,
|
||||
pub files: Option<Vec<File>>,
|
||||
pub info_hash: String,
|
||||
pub is_private: bool,
|
||||
pub length: Option<u64>,
|
||||
pub name: Option<String>,
|
||||
pub publisher_url: Option<String>,
|
||||
pub publisher: Option<String>,
|
||||
pub size: u64,
|
||||
/// File (modified)
|
||||
pub time: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl Torrent {
|
||||
pub fn from_storage(bytes: &[u8], time: DateTime<Utc>) -> Result<Self, String> {
|
||||
let i: TorrentMetaV1Owned =
|
||||
torrent_metainfo::torrent_from_bytes(bytes).map_err(|e| e.to_string())?;
|
||||
Ok(Torrent {
|
||||
info_hash: i.info_hash.as_string(),
|
||||
announce: i.announce.map(|a| a.to_string()),
|
||||
comment: i.comment.map(|c| c.to_string()),
|
||||
created_by: i.created_by.map(|c| c.to_string()),
|
||||
creation_date: i
|
||||
.creation_date
|
||||
.map(|t| DateTime::from_timestamp_nanos(t as i64)),
|
||||
size: i.info.length.unwrap_or_default()
|
||||
+ i.info
|
||||
.files
|
||||
.as_ref()
|
||||
.map(|files| files.iter().map(|f| f.length).sum::<u64>())
|
||||
.unwrap_or_default(),
|
||||
files: i.info.files.map(|files| {
|
||||
let mut b = Vec::with_capacity(files.len());
|
||||
for f in files.iter() {
|
||||
b.push(File {
|
||||
name: String::from_utf8(
|
||||
f.path
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(n, b)| {
|
||||
if n == 0 {
|
||||
b.0.to_vec()
|
||||
} else {
|
||||
let mut p = vec![b'/'];
|
||||
p.extend(b.0.to_vec());
|
||||
p
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.ok(),
|
||||
length: f.length,
|
||||
})
|
||||
}
|
||||
b.sort_by(|a, b| a.name.cmp(&b.name)); // @TODO optional
|
||||
b
|
||||
}),
|
||||
publisher_url: i.publisher_url.map(|u| u.to_string()),
|
||||
publisher: i.publisher.map(|p| p.to_string()),
|
||||
is_private: i.info.private,
|
||||
length: i.info.length,
|
||||
name: i.info.name.map(|e| e.to_string()),
|
||||
time,
|
||||
})
|
||||
}
|
||||
}
|
||||
8
src/torrent/file.rs
Normal file
8
src/torrent/file.rs
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
use rocket::serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct File {
|
||||
pub name: Option<String>,
|
||||
pub length: u64,
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue