initial commit

This commit is contained in:
yggverse 2025-09-06 18:31:49 +03:00
parent e7fe14be8a
commit 8bf5e7a21f
7 changed files with 234 additions and 1 deletions

1
.github/FUNDING.yml vendored Normal file
View file

@ -0,0 +1 @@
custom: https://yggverse.github.io/#donate

24
.github/workflows/linux.yml vendored Normal file
View file

@ -0,0 +1,24 @@
name: Linux
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
env:
CARGO_TERM_COLOR: always
RUSTFLAGS: -Dwarnings
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: rustup update
- run: cargo fmt --all -- --check
- run: cargo clippy --all-targets
- run: cargo build --verbose
- run: cargo test --verbose

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/target
Cargo.lock

19
Cargo.toml Normal file
View file

@ -0,0 +1,19 @@
[package]
name = "btracker-fs"
version = "0.1.0"
edition = "2024"
license = "MIT"
readme = "README.md"
description = "Shared filesystem API for the βtracker project components"
keywords = ["btracker", "bittorrent", "aquatic-crawler", "librqbit", "fs"]
categories = ["network-programming"]
repository = "https://github.com/yggverse/btracker-fs"
# homepage = "https://yggverse.github.io"
[features]
# default = ["public"]
public = []
[dependencies]
chrono = { version = "0.4.41", features = ["serde"] }
librqbit-core = "5.0"

View file

@ -1,2 +1,7 @@
# btracker-fs # btracker-fs
Shared filesystem API for the βtracker project
![Linux](https://github.com/yggverse/btracker-fs/actions/workflows/linux.yml/badge.svg)
[![Dependencies](https://deps.rs/repo/github/yggverse/btracker-fs/status.svg)](https://deps.rs/repo/github/yggverse/btracker-fs)
[![crates.io](https://img.shields.io/crates/v/btracker-fs.svg)](https://crates.io/crates/btracker-fs)
Shared filesystem API for the [βtracker](https://github.com/yggverse/btracker) project components

2
src/lib.rs Normal file
View file

@ -0,0 +1,2 @@
#[cfg(feature = "public")]
pub mod public;

180
src/public.rs Normal file
View file

@ -0,0 +1,180 @@
use chrono::{DateTime, Utc};
use std::{fs, io::Error, path::PathBuf, time::SystemTime};
#[derive(Clone, Debug, Default)]
pub enum Sort {
#[default]
Modified,
}
#[derive(Clone, Debug, Default)]
pub enum Order {
#[default]
Asc,
Desc,
}
pub struct Torrent {
pub bytes: Vec<u8>,
pub time: DateTime<Utc>,
}
pub struct Public {
default_capacity: usize,
pub default_limit: usize,
root: PathBuf,
}
impl Public {
// Constructors
pub fn init(
root: PathBuf,
default_limit: usize,
default_capacity: usize,
) -> Result<Self, String> {
if !root.is_dir() {
return Err("Public root is not directory".into());
}
Ok(Self {
default_capacity,
default_limit,
root: root.canonicalize().map_err(|e| e.to_string())?,
})
}
// Getters
pub fn torrent(&self, info_hash: librqbit_core::Id20) -> Option<Torrent> {
let mut p = PathBuf::from(&self.root);
p.push(format!("{}.{E}", info_hash.as_string()));
Some(Torrent {
bytes: fs::read(&p).ok()?,
time: p.metadata().ok()?.modified().ok()?.into(),
})
}
pub fn torrents(
&self,
keyword: Option<&str>,
sort_order: Option<(Sort, Order)>,
start: Option<usize>,
limit: Option<usize>,
) -> Result<(usize, Vec<Torrent>), Error> {
let f = self.files(keyword, sort_order)?;
let t = f.len();
let l = limit.unwrap_or(t);
let mut b = Vec::with_capacity(l);
for file in f.into_iter().skip(start.unwrap_or_default()).take(l) {
b.push(Torrent {
bytes: fs::read(file.path)?,
time: file.modified.into(),
})
}
Ok((t, b))
}
pub fn href(&self, info_hash: &str, path: &str) -> Option<String> {
let mut relative = PathBuf::from(info_hash);
relative.push(path);
let mut absolute = PathBuf::from(&self.root);
absolute.push(&relative);
let c = absolute.canonicalize().ok()?;
if c.starts_with(&self.root) && c.exists() {
Some(relative.to_string_lossy().into())
} else {
None
}
}
// Helpers
fn files(
&self,
keyword: Option<&str>,
sort_order: Option<(Sort, Order)>,
) -> Result<Vec<File>, Error> {
let mut files = Vec::with_capacity(self.default_capacity);
for dir_entry in fs::read_dir(&self.root)? {
let entry = dir_entry?;
let path = entry.path();
if !path.is_file() || path.extension().is_none_or(|e| e != E) {
continue;
}
if let Some(k) = keyword
&& !k.trim_matches(S).is_empty()
&& !librqbit_core::torrent_metainfo::torrent_from_bytes(&fs::read(&path)?)
.is_ok_and(|m: librqbit_core::torrent_metainfo::TorrentMetaV1Owned| {
k.split(S)
.filter(|s| !s.is_empty())
.map(|s| s.trim().to_lowercase())
.all(|q| {
m.info_hash.as_string().to_lowercase().contains(&q)
|| m.info
.name
.as_ref()
.is_some_and(|n| n.to_string().to_lowercase().contains(&q))
|| m.comment
.as_ref()
.is_some_and(|c| c.to_string().to_lowercase().contains(&q))
|| m.created_by
.as_ref()
.is_some_and(|c| c.to_string().to_lowercase().contains(&q))
|| m.publisher
.as_ref()
.is_some_and(|p| p.to_string().to_lowercase().contains(&q))
|| m.publisher_url
.as_ref()
.is_some_and(|u| u.to_string().to_lowercase().contains(&q))
|| m.announce
.as_ref()
.is_some_and(|a| a.to_string().to_lowercase().contains(&q))
|| m.announce_list.iter().any(|l| {
l.iter().any(|a| a.to_string().to_lowercase().contains(&q))
})
|| m.info.files.as_ref().is_some_and(|f| {
f.iter().any(|f| {
let mut p = PathBuf::new();
f.full_path(&mut p).is_ok_and(|_| {
p.to_string_lossy().to_lowercase().contains(&q)
})
})
})
})
})
{
continue;
}
files.push(File {
modified: entry.metadata()?.modified()?,
path,
})
}
if let Some((sort, order)) = sort_order {
match sort {
Sort::Modified => match order {
Order::Asc => files.sort_by(|a, b| a.modified.cmp(&b.modified)),
Order::Desc => files.sort_by(|a, b| b.modified.cmp(&a.modified)),
},
}
}
Ok(files)
}
}
// Local members
/// Torrent file extension
const E: &str = "torrent";
/// Search keyword separators
const S: &[char] = &[
'_', '-', ':', ';', ',', '(', ')', '[', ']', '/', '!', '?', ' ', // @TODO make optional
];
struct File {
modified: SystemTime,
path: PathBuf,
}