diff --git a/README.md b/README.md index eec3d12..d9ac1ff 100644 --- a/README.md +++ b/README.md @@ -162,14 +162,16 @@ ipv6_only = false use_tls = false tls_pkcs12_path = '' tls_pkcs12_password = '' -max_scrape_torrents = 255 -max_offers = 10 -peer_announce_interval = 120 poll_event_capacity = 4096 poll_timeout_milliseconds = 50 websocket_max_message_size = 65536 websocket_max_frame_size = 16384 +[protocol] +max_scrape_torrents = 255 +max_offers = 10 +peer_announce_interval = 120 + [handlers] max_requests_per_iter = 10000 channel_recv_timeout_microseconds = 200 diff --git a/aquatic_ws/src/lib/config.rs b/aquatic_ws/src/lib/config.rs index 5defc63..505de45 100644 --- a/aquatic_ws/src/lib/config.rs +++ b/aquatic_ws/src/lib/config.rs @@ -31,6 +31,7 @@ pub struct Config { pub socket_workers: usize, pub log_level: LogLevel, pub network: NetworkConfig, + pub protocol: ProtocolConfig, pub handlers: HandlerConfig, pub cleaning: CleaningConfig, pub privileges: PrivilegeConfig, @@ -46,12 +47,6 @@ pub struct NetworkConfig { pub use_tls: bool, pub tls_pkcs12_path: String, pub tls_pkcs12_password: String, - /// Maximum number of torrents to accept in scrape request - pub max_scrape_torrents: usize, // FIXME: should this really be in NetworkConfig? - /// Maximum number of offers to accept in announce request - pub max_offers: usize, // FIXME: should this really be in NetworkConfig? - /// Ask peers to announce this often (seconds) - pub peer_announce_interval: usize, // FIXME: should this really be in NetworkConfig? pub poll_event_capacity: usize, pub poll_timeout_milliseconds: u64, pub websocket_max_message_size: usize, @@ -69,6 +64,18 @@ pub struct HandlerConfig { } +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(default)] +pub struct ProtocolConfig { + /// Maximum number of torrents to accept in scrape request + pub max_scrape_torrents: usize, + /// Maximum number of offers to accept in announce request + pub max_offers: usize, + /// Ask peers to announce this often (seconds) + pub peer_announce_interval: usize, +} + + #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(default)] pub struct CleaningConfig { @@ -99,6 +106,7 @@ impl Default for Config { socket_workers: 1, log_level: LogLevel::default(), network: NetworkConfig::default(), + protocol: ProtocolConfig::default(), handlers: HandlerConfig::default(), cleaning: CleaningConfig::default(), privileges: PrivilegeConfig::default(), @@ -115,9 +123,6 @@ impl Default for NetworkConfig { use_tls: false, tls_pkcs12_path: "".into(), tls_pkcs12_password: "".into(), - max_scrape_torrents: 255, // FIXME: what value is reasonable? - max_offers: 10, - peer_announce_interval: 120, poll_event_capacity: 4096, poll_timeout_milliseconds: 50, websocket_max_message_size: 64 * 1024, @@ -127,6 +132,17 @@ impl Default for NetworkConfig { } +impl Default for ProtocolConfig { + fn default() -> Self { + Self { + max_scrape_torrents: 255, // FIXME: what value is reasonable? + max_offers: 10, + peer_announce_interval: 120, + } + } +} + + impl Default for HandlerConfig { fn default() -> Self { Self { diff --git a/aquatic_ws/src/lib/handler.rs b/aquatic_ws/src/lib/handler.rs index 71413c9..d0bf910 100644 --- a/aquatic_ws/src/lib/handler.rs +++ b/aquatic_ws/src/lib/handler.rs @@ -153,7 +153,7 @@ pub fn handle_announce_requests( if let Some(offers) = request.offers { // FIXME: config: also maybe check this when parsing request let max_num_peers_to_take = offers.len() - .min(config.network.max_offers); + .min(config.protocol.max_offers); #[inline] fn f(peer: &Peer) -> Peer { @@ -206,7 +206,7 @@ pub fn handle_announce_requests( info_hash, complete: torrent_data.num_seeders, incomplete: torrent_data.num_leechers, - announce_interval: config.network.peer_announce_interval, + announce_interval: config.protocol.peer_announce_interval, }); messages_out.push((sender_meta, response)); @@ -222,7 +222,7 @@ pub fn handle_scrape_requests( ){ messages_out.extend(requests.map(|(meta, request)| { let num_to_take = request.info_hashes.len().min( - config.network.max_scrape_torrents + config.protocol.max_scrape_torrents ); let mut response = ScrapeResponse {