mirror of
https://github.com/YGGverse/aquatic.git
synced 2026-04-02 18:55:32 +00:00
aquatic_http: fix issue with incorrect parsing of info_hash, peer_id
This commit is contained in:
parent
136a79ce8d
commit
f28abbb7f6
2 changed files with 48 additions and 5 deletions
2
TODO.md
2
TODO.md
|
|
@ -12,8 +12,6 @@
|
||||||
what error return type to use then
|
what error return type to use then
|
||||||
* compact peer representation in announce response: is implementation correct?
|
* compact peer representation in announce response: is implementation correct?
|
||||||
* scrape info hash parsing: multiple ought to be accepted
|
* scrape info hash parsing: multiple ought to be accepted
|
||||||
* info hashes, peer ids: check that whole deserialization and url decoding
|
|
||||||
works as it should. There are suspicously many `\u{fffd}`
|
|
||||||
* move stuff to common crate with ws: what about Request/InMessage etc?
|
* move stuff to common crate with ws: what about Request/InMessage etc?
|
||||||
* don't overdo this
|
* don't overdo this
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -148,10 +148,11 @@ impl Request {
|
||||||
let mut split_parts= path.splitn(2, '?');
|
let mut split_parts= path.splitn(2, '?');
|
||||||
|
|
||||||
let path = split_parts.next()?;
|
let path = split_parts.next()?;
|
||||||
let query_string = split_parts.next()?;
|
let query_string = Self::preprocess_query_string(split_parts.next()?);
|
||||||
|
|
||||||
if path == "/announce" {
|
if path == "/announce" {
|
||||||
let result: Result<AnnounceRequest, serde_urlencoded::de::Error> = serde_urlencoded::from_str(query_string);
|
let result: Result<AnnounceRequest, serde_urlencoded::de::Error> =
|
||||||
|
serde_urlencoded::from_str(&query_string);
|
||||||
|
|
||||||
if let Err(ref err) = result {
|
if let Err(ref err) = result {
|
||||||
log::debug!("error: {}", err);
|
log::debug!("error: {}", err);
|
||||||
|
|
@ -159,7 +160,8 @@ impl Request {
|
||||||
|
|
||||||
result.ok().map(Request::Announce)
|
result.ok().map(Request::Announce)
|
||||||
} else {
|
} else {
|
||||||
let result: Result<ScrapeRequest, serde_urlencoded::de::Error> = serde_urlencoded::from_str(query_string);
|
let result: Result<ScrapeRequest, serde_urlencoded::de::Error> =
|
||||||
|
serde_urlencoded::from_str(&query_string);
|
||||||
|
|
||||||
if let Err(ref err) = result {
|
if let Err(ref err) = result {
|
||||||
log::debug!("error: {}", err);
|
log::debug!("error: {}", err);
|
||||||
|
|
@ -168,6 +170,49 @@ impl Request {
|
||||||
result.ok().map(Request::Scrape)
|
result.ok().map(Request::Scrape)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The info hashes and peer id's that are received are url-encoded byte
|
||||||
|
/// by byte, e.g., %fa for byte 0xfa. However, they are parsed as an UTF-8
|
||||||
|
/// string, meaning that non-ascii bytes are invalid characters. Therefore,
|
||||||
|
/// these bytes must be converted to their equivalent multi-byte UTF-8
|
||||||
|
/// encodings first.
|
||||||
|
fn preprocess_query_string(query_string: &str) -> String {
|
||||||
|
let mut processed = String::new();
|
||||||
|
|
||||||
|
for (i, part) in query_string.split('%').enumerate(){
|
||||||
|
println!("{}", part);
|
||||||
|
|
||||||
|
if i == 0 {
|
||||||
|
processed.push_str(part);
|
||||||
|
} else if part.len() >= 2 {
|
||||||
|
let mut two_first = String::with_capacity(2);
|
||||||
|
let mut rest = String::new();
|
||||||
|
|
||||||
|
for (j, c) in part.chars().enumerate(){
|
||||||
|
if j < 2 {
|
||||||
|
two_first.push(c);
|
||||||
|
} else {
|
||||||
|
rest.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let byte = u8::from_str_radix(&two_first, 16).unwrap();
|
||||||
|
|
||||||
|
let mut tmp = [0u8; 4];
|
||||||
|
|
||||||
|
let slice = (byte as char).encode_utf8(&mut tmp);
|
||||||
|
|
||||||
|
for byte in slice.bytes(){
|
||||||
|
processed.push('%');
|
||||||
|
processed.push_str(&format!("{:02x}", byte));
|
||||||
|
}
|
||||||
|
|
||||||
|
processed.push_str(&rest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
processed
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue