mirror of
https://github.com/YGGverse/aquatic-crawler.git
synced 2026-03-31 17:15:35 +00:00
use binary api
This commit is contained in:
parent
15c8d8c350
commit
65e6deaabc
6 changed files with 44 additions and 15 deletions
|
|
@ -17,8 +17,6 @@ clap = { version = "4.5", features = ["derive"] }
|
||||||
hyper-util = "0.1"
|
hyper-util = "0.1"
|
||||||
librqbit = {version = "9.0.0-beta.0", features = ["disable-upload"]}
|
librqbit = {version = "9.0.0-beta.0", features = ["disable-upload"]}
|
||||||
regex = "1.11"
|
regex = "1.11"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
serde_json = "1.0"
|
|
||||||
tokio = { version = "1.45", features = ["full"] }
|
tokio = { version = "1.45", features = ["full"] }
|
||||||
tracing-subscriber = "0.3"
|
tracing-subscriber = "0.3"
|
||||||
url = "2.5"
|
url = "2.5"
|
||||||
|
|
|
||||||
10
README.md
10
README.md
|
|
@ -18,7 +18,7 @@ Crawler for [Aquatic](https://github.com/greatest-ape/aquatic) BitTorrent tracke
|
||||||
* [x] 1
|
* [x] 1
|
||||||
* [ ] 2
|
* [ ] 2
|
||||||
* Import sources
|
* Import sources
|
||||||
* [x] IPv4 / IPv6 info-hash JSON/API (requires [PR#233](https://github.com/greatest-ape/aquatic/pull/233))
|
* [x] IPv4 / IPv6 info-hash binary API (requires [PR#233](https://github.com/greatest-ape/aquatic/pull/233))
|
||||||
* [x] local file path
|
* [x] local file path
|
||||||
* [ ] remote URL
|
* [ ] remote URL
|
||||||
* Export options
|
* Export options
|
||||||
|
|
@ -40,9 +40,9 @@ Crawler for [Aquatic](https://github.com/greatest-ape/aquatic) BitTorrent tracke
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
aquatic-crawler --infohash /path/to/info-hash-ipv4.json\
|
aquatic-crawler --infohash /path/to/info-hash-ipv4.bin\
|
||||||
--infohash /path/to/info-hash-ipv6.json\
|
--infohash /path/to/info-hash-ipv6.bin\
|
||||||
--infohash /path/to/another-source.json\
|
--infohash /path/to/another-source.bin\
|
||||||
--tracker udp://host1:port\
|
--tracker udp://host1:port\
|
||||||
--tracker udp://host2:port\
|
--tracker udp://host2:port\
|
||||||
--preload /path/to/directory\
|
--preload /path/to/directory\
|
||||||
|
|
@ -62,7 +62,7 @@ aquatic-crawler --infohash /path/to/info-hash-ipv4.json\
|
||||||
[default: ei]
|
[default: ei]
|
||||||
|
|
||||||
--infohash <INFOHASH>
|
--infohash <INFOHASH>
|
||||||
Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker JSON/API
|
Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API
|
||||||
|
|
||||||
* PR#233 feature
|
* PR#233 feature
|
||||||
|
|
||||||
|
|
|
||||||
44
src/api.rs
44
src/api.rs
|
|
@ -1,17 +1,49 @@
|
||||||
/// Parse infohash from the source filepath,
|
/// Parse infohash from the source filepath,
|
||||||
/// decode JSON to array on success, return None if the feed file is not reachable
|
/// decode JSON to array on success, return None if the feed file is not reachable
|
||||||
pub fn get(path: &str) -> Option<Vec<String>> {
|
pub fn get(path: &str) -> Option<Vec<String>> {
|
||||||
|
use std::io::Read;
|
||||||
if path.contains("://") {
|
if path.contains("://") {
|
||||||
todo!("URL sources yet not supported")
|
todo!("URL sources yet not supported")
|
||||||
}
|
}
|
||||||
let s = std::fs::read_to_string(path).ok()?; // is updating?
|
const L: usize = 20; // v1 only
|
||||||
let r: Option<Vec<String>> = serde_json::from_str(&s).ok(); // is incomplete?
|
let mut r = Vec::new();
|
||||||
r
|
let mut f = std::fs::File::open(path).ok()?;
|
||||||
|
loop {
|
||||||
|
let mut b = vec![0; L];
|
||||||
|
let l = f.read(&mut b).ok()?;
|
||||||
|
if l != L {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
r.push(
|
||||||
|
b[..l]
|
||||||
|
.iter()
|
||||||
|
.map(|i| format!("{i:02x}"))
|
||||||
|
.collect::<String>(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Some(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test() {
|
fn test() {
|
||||||
assert!(get("test/api/0.json").is_none());
|
use std::fs;
|
||||||
assert!(get("test/api/1.json").is_some());
|
|
||||||
assert!(get("test/api/2.json").is_none());
|
#[cfg(not(any(target_os = "linux", target_os = "macos",)))]
|
||||||
|
{
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
const P0: &str = "/tmp/aquatic-crawler-api-test-0.bin";
|
||||||
|
const P1: &str = "/tmp/aquatic-crawler-api-test-1.bin";
|
||||||
|
const P2: &str = "/tmp/aquatic-crawler-api-test-2.bin";
|
||||||
|
|
||||||
|
fs::write(P0, vec![]).unwrap();
|
||||||
|
fs::write(P1, vec![1; 40]).unwrap(); // 20 + 20 bytes
|
||||||
|
|
||||||
|
assert!(get(P0).is_some_and(|b| b.is_empty()));
|
||||||
|
assert!(get(P1).is_some_and(|b| b.len() == 2));
|
||||||
|
assert!(get(P2).is_none());
|
||||||
|
|
||||||
|
fs::remove_file(P0).unwrap();
|
||||||
|
fs::remove_file(P1).unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ pub struct Config {
|
||||||
#[arg(short, long, default_value_t = String::from("ei"))]
|
#[arg(short, long, default_value_t = String::from("ei"))]
|
||||||
pub debug: String,
|
pub debug: String,
|
||||||
|
|
||||||
/// Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker JSON/API
|
/// Absolute path(s) or URL(s) to import infohashes from the Aquatic tracker binary API
|
||||||
///
|
///
|
||||||
/// * PR#233 feature
|
/// * PR#233 feature
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
["1","2","3"]
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue