From 7ee588a565e8746f6195adfea890ee78a7e96dd7 Mon Sep 17 00:00:00 2001 From: Jose Celano Date: Tue, 31 Jan 2023 18:20:27 +0000 Subject: [PATCH] refactor(test): [#159] refactor tests for scrape request --- cSpell.json | 4 +- src/tracker/peer.rs | 2 +- tests/common/fixtures.rs | 12 ++ tests/http/asserts.rs | 16 ++- tests/http/bencode.rs | 14 ++ tests/http/requests/announce.rs | 2 +- tests/http/responses/scrape.rs | 223 +++++++++++++++++++++++--------- tests/http_tracker.rs | 76 +++++------ 8 files changed, 235 insertions(+), 114 deletions(-) diff --git a/cSpell.json b/cSpell.json index 5d0a6e1f1..dc51c87c5 100644 --- a/cSpell.json +++ b/cSpell.json @@ -72,6 +72,8 @@ "Vagaa", "Vuze", "Xtorrent", - "Xunlei" + "Xunlei", + "xxxxxxxxxxxxxxxxxxxxd", + "yyyyyyyyyyyyyyyyyyyyd" ] } diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index 22889381f..3f639f970 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -20,7 +20,7 @@ pub struct Peer { #[serde(with = "NumberOfBytesDef")] pub downloaded: NumberOfBytes, #[serde(with = "NumberOfBytesDef")] - pub left: NumberOfBytes, + pub left: NumberOfBytes, // The number of bytes this peer still has to download #[serde(with = "AnnounceEventDef")] pub event: AnnounceEvent, } diff --git a/tests/common/fixtures.rs b/tests/common/fixtures.rs index 0ff6798f6..5e644c45f 100644 --- a/tests/common/fixtures.rs +++ b/tests/common/fixtures.rs @@ -21,6 +21,18 @@ impl PeerBuilder { self } + #[allow(dead_code)] + pub fn with_bytes_pending_to_download(mut self, left: i64) -> Self { + self.peer.left = NumberOfBytes(left); + self + } + + #[allow(dead_code)] + pub fn build(self) -> Peer { + self.into() + } + + #[allow(dead_code)] pub fn into(self) -> Peer { self.peer } diff --git a/tests/http/asserts.rs b/tests/http/asserts.rs index 4e2214317..b8ccfee22 100644 --- a/tests/http/asserts.rs +++ b/tests/http/asserts.rs @@ -1,6 +1,7 @@ use reqwest::Response; use super::responses::announce::{Announce, Compact, DeserializedCompact}; +use super::responses::scrape; use crate::http::responses::error::Error; pub async fn assert_empty_announce_response(response: Response) { @@ -17,7 +18,7 @@ pub async fn assert_announce_response(response: Response, expected_announce_resp assert_eq!(announce_response, *expected_announce_response); } -/// Sample bencoded response as byte array: +/// Sample bencoded announce response as byte array: /// /// ```text /// b"d8:intervali120e12:min intervali120e8:completei2e10:incompletei0e5:peers6:~\0\0\x01\x1f\x90e6:peers60:e" @@ -40,6 +41,19 @@ pub async fn assert_compact_announce_response(response: Response, expected_respo assert_eq!(actual_response, *expected_response); } +/// Sample bencoded scrape response as byte array: +/// +/// ```text +/// b"d5:filesd20:\x9c8B\"\x13\xe3\x0b\xff!+0\xc3`\xd2o\x9a\x02\x13d\"d8:completei1e10:downloadedi0e10:incompletei0eeee" +/// ``` +pub async fn assert_scrape_response(response: Response, expected_response: &scrape::Response) { + assert_eq!(response.status(), 200); + + let scrape_response = scrape::Response::try_from_bytes(&response.bytes().await.unwrap()).unwrap(); + + assert_eq!(scrape_response, *expected_response); +} + pub async fn assert_is_announce_response(response: Response) { assert_eq!(response.status(), 200); let body = response.text().await.unwrap(); diff --git a/tests/http/bencode.rs b/tests/http/bencode.rs index b67b278d7..d107089cf 100644 --- a/tests/http/bencode.rs +++ b/tests/http/bencode.rs @@ -1 +1,15 @@ pub type ByteArray20 = [u8; 20]; + +pub struct InfoHash(ByteArray20); + +impl InfoHash { + pub fn new(vec: &[u8]) -> Self { + let mut byte_array_20: ByteArray20 = Default::default(); + byte_array_20.clone_from_slice(vec); + Self(byte_array_20) + } + + pub fn bytes(&self) -> ByteArray20 { + self.0 + } +} diff --git a/tests/http/requests/announce.rs b/tests/http/requests/announce.rs index 5656d8f1d..a8ebc95f8 100644 --- a/tests/http/requests/announce.rs +++ b/tests/http/requests/announce.rs @@ -225,7 +225,7 @@ impl QueryParams { pub fn remove_optional_params(&mut self) { // todo: make them optional with the Option<...> in the AnnounceQuery struct - // if they are really optional. SO that we can crete a minimal AnnounceQuery + // if they are really optional. So that we can crete a minimal AnnounceQuery // instead of removing the optional params afterwards. // // The original specification on: diff --git a/tests/http/responses/scrape.rs b/tests/http/responses/scrape.rs index 450006815..c9081a10f 100644 --- a/tests/http/responses/scrape.rs +++ b/tests/http/responses/scrape.rs @@ -4,84 +4,36 @@ use std::str; use serde::{self, Deserialize, Serialize}; use serde_bencode::value::Value; -use crate::http::bencode::ByteArray20; +use crate::http::bencode::{ByteArray20, InfoHash}; -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Default)] pub struct Response { pub files: HashMap, } impl Response { - pub fn from_bytes(bytes: &[u8]) -> Self { + pub fn try_from_bytes(bytes: &[u8]) -> Result { let scrape_response: DeserializedResponse = serde_bencode::from_bytes(bytes).unwrap(); - Self::from(scrape_response) + Self::try_from(scrape_response) + } + + pub fn empty() -> Self { + Self::default() } } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct File { - pub complete: i64, - pub downloaded: i64, - pub incomplete: i64, + pub complete: i64, // The number of active peers that have completed downloading + pub downloaded: i64, // The number of peers that have ever completed downloading + pub incomplete: i64, // The number of active peers that have not completed downloading } -impl From for Response { - fn from(scrape_response: DeserializedResponse) -> Self { - // todo: - // - Use `try_from` trait instead of `from`. - // - Improve error messages. - // - Extract parser function out of the trait. - // - Extract parser for each nested element. - // - Extract function to instantiate [u8; 20] from Vec. - let mut files: HashMap = HashMap::new(); - - match scrape_response.files { - Value::Dict(dict) => { - for file_element in dict { - let info_hash_byte_vec = file_element.0; - let file_value = file_element.1; - - let file = match &file_value { - Value::Dict(dict) => { - let mut file = File { - complete: 0, - downloaded: 0, - incomplete: 0, - }; - - for file_field in dict { - let value = match file_field.1 { - Value::Int(number) => *number, - _ => panic!("Error parsing bencoded scrape response. Invalid value. Expected "), - }; - - if file_field.0 == b"complete" { - file.complete = value; - } else if file_field.0 == b"downloaded" { - file.downloaded = value; - } else if file_field.0 == b"incomplete" { - file.incomplete = value; - } else { - panic!("Error parsing bencoded scrape response. Invalid field"); - } - } - - file - } - _ => panic!("Error parsing bencoded scrape response. Invalid value. Expected "), - }; - - // Clone Vec into [u8; 20] - let mut info_hash_byte_array: [u8; 20] = Default::default(); - info_hash_byte_array.clone_from_slice(info_hash_byte_vec.as_slice()); - - files.insert(info_hash_byte_array, file); - } - } - _ => panic!("Error parsing bencoded scrape response. Invalid value. Expected "), - } +impl TryFrom for Response { + type Error = BencodeParseError; - Self { files } + fn try_from(scrape_response: DeserializedResponse) -> Result { + parse_bencoded_response(&scrape_response.files) } } @@ -89,3 +41,148 @@ impl From for Response { struct DeserializedResponse { pub files: Value, } + +pub struct ResponseBuilder { + response: Response, +} + +impl ResponseBuilder { + pub fn default() -> Self { + Self { + response: Response::empty(), + } + } + + pub fn add_file(mut self, info_hash_bytes: ByteArray20, file: File) -> Self { + self.response.files.insert(info_hash_bytes, file); + self + } + + pub fn build(self) -> Response { + self.response + } +} + +#[derive(Debug)] +pub enum BencodeParseError { + InvalidValueExpectedDict { value: Value }, + InvalidValueExpectedInt { value: Value }, + InvalidFileField { value: Value }, + MissingFileField { field_name: String }, +} + +/// It parses a bencoded scrape response into a `Response` struct. +/// +/// For example: +/// +/// ```text +/// d5:filesd20:xxxxxxxxxxxxxxxxxxxxd8:completei11e10:downloadedi13772e10:incompletei19e +/// 20:yyyyyyyyyyyyyyyyyyyyd8:completei21e10:downloadedi206e10:incompletei20eee +/// ``` +/// +/// Response (JSON encoded for readability): +/// +/// ```text +/// { +/// 'files': { +/// 'xxxxxxxxxxxxxxxxxxxx': {'complete': 11, 'downloaded': 13772, 'incomplete': 19}, +/// 'yyyyyyyyyyyyyyyyyyyy': {'complete': 21, 'downloaded': 206, 'incomplete': 20} +/// } +/// } +fn parse_bencoded_response(value: &Value) -> Result { + let mut files: HashMap = HashMap::new(); + + match value { + Value::Dict(dict) => { + for file_element in dict { + let info_hash_byte_vec = file_element.0; + let file_value = file_element.1; + + let file = parse_bencoded_file(file_value).unwrap(); + + files.insert(InfoHash::new(info_hash_byte_vec).bytes(), file); + } + } + _ => return Err(BencodeParseError::InvalidValueExpectedDict { value: value.clone() }), + } + + Ok(Response { files }) +} + +/// It parses a bencoded dictionary into a `File` struct. +/// +/// For example: +/// +/// +/// ```text +/// d8:completei11e10:downloadedi13772e10:incompletei19ee +/// ``` +/// +/// into: +/// +/// ```text +/// File { +/// complete: 11, +/// downloaded: 13772, +/// incomplete: 19, +/// } +/// ``` +fn parse_bencoded_file(value: &Value) -> Result { + let file = match &value { + Value::Dict(dict) => { + let mut complete = None; + let mut downloaded = None; + let mut incomplete = None; + + for file_field in dict { + let field_name = file_field.0; + + let field_value = match file_field.1 { + Value::Int(number) => Ok(*number), + _ => Err(BencodeParseError::InvalidValueExpectedInt { + value: file_field.1.clone(), + }), + }?; + + if field_name == b"complete" { + complete = Some(field_value); + } else if field_name == b"downloaded" { + downloaded = Some(field_value); + } else if field_name == b"incomplete" { + incomplete = Some(field_value); + } else { + return Err(BencodeParseError::InvalidFileField { + value: file_field.1.clone(), + }); + } + } + + if complete.is_none() { + return Err(BencodeParseError::MissingFileField { + field_name: "complete".to_string(), + }); + } + + if downloaded.is_none() { + return Err(BencodeParseError::MissingFileField { + field_name: "downloaded".to_string(), + }); + } + + if incomplete.is_none() { + return Err(BencodeParseError::MissingFileField { + field_name: "incomplete".to_string(), + }); + } + + File { + complete: complete.unwrap(), + downloaded: downloaded.unwrap(), + incomplete: incomplete.unwrap(), + } + } + _ => return Err(BencodeParseError::InvalidValueExpectedDict { value: value.clone() }), + }; + + Ok(file) +} diff --git a/tests/http_tracker.rs b/tests/http_tracker.rs index 099e1d360..888da393a 100644 --- a/tests/http_tracker.rs +++ b/tests/http_tracker.rs @@ -5,19 +5,6 @@ mod common; mod http; mod http_tracker_server { - use std::str::FromStr; - - use percent_encoding::NON_ALPHANUMERIC; - use torrust_tracker::protocol::info_hash::InfoHash; - - #[test] - fn calculate_info_hash_param() { - let info_hash = InfoHash::from_str("3b245504cf5f11bbdbe1201cea6a6bf45aee1bc0").unwrap(); - - let param = percent_encoding::percent_encode(&info_hash.0, NON_ALPHANUMERIC).to_string(); - - assert_eq!(param, "%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0"); - } mod for_all_config_modes { @@ -331,7 +318,7 @@ mod http_tracker_server { // Peer 1 let previously_announced_peer = PeerBuilder::default() .with_peer_id(&peer::Id(*b"-qB00000000000000001")) - .into(); + .build(); // Add the Peer 1 http_tracker_server.add_torrent(&info_hash, &previously_announced_peer).await; @@ -365,7 +352,7 @@ mod http_tracker_server { let http_tracker_server = start_public_http_tracker().await; let info_hash = InfoHash::from_str("9c38422213e30bff212b30c360d26f9a02136422").unwrap(); - let peer = PeerBuilder::default().into(); + let peer = PeerBuilder::default().build(); // Add a peer http_tracker_server.add_torrent(&info_hash, &peer).await; @@ -396,7 +383,7 @@ mod http_tracker_server { // Peer 1 let previously_announced_peer = PeerBuilder::default() .with_peer_id(&peer::Id(*b"-qB00000000000000001")) - .into(); + .build(); // Add the Peer 1 http_tracker_server.add_torrent(&info_hash, &previously_announced_peer).await; @@ -435,7 +422,7 @@ mod http_tracker_server { // Peer 1 let previously_announced_peer = PeerBuilder::default() .with_peer_id(&peer::Id(*b"-qB00000000000000001")) - .into(); + .build(); // Add the Peer 1 http_tracker_server.add_torrent(&info_hash, &previously_announced_peer).await; @@ -684,17 +671,16 @@ mod http_tracker_server { // Vuze (bittorrent client) docs: // https://wiki.vuze.com/w/Scrape - use std::collections::HashMap; use std::str::FromStr; use torrust_tracker::protocol::info_hash::InfoHash; use torrust_tracker::tracker::peer; use crate::common::fixtures::PeerBuilder; - use crate::http::asserts::assert_internal_server_error_response; + use crate::http::asserts::{assert_internal_server_error_response, assert_scrape_response}; use crate::http::client::Client; use crate::http::requests; - use crate::http::responses::scrape::{File, Response}; + use crate::http::responses::scrape::{File, ResponseBuilder}; use crate::http::server::start_public_http_tracker; #[tokio::test] @@ -707,20 +693,21 @@ mod http_tracker_server { #[tokio::test] async fn should_return_the_scrape_response() { - let http_tracker_server = start_public_http_tracker().await; + let http_tracker = start_public_http_tracker().await; let info_hash = InfoHash::from_str("9c38422213e30bff212b30c360d26f9a02136422").unwrap(); - // Peer - let previously_announced_peer = PeerBuilder::default() - .with_peer_id(&peer::Id(*b"-qB00000000000000001")) - .into(); - - // Add the Peer - http_tracker_server.add_torrent(&info_hash, &previously_announced_peer).await; + http_tracker + .add_torrent( + &info_hash, + &PeerBuilder::default() + .with_peer_id(&peer::Id(*b"-qB00000000000000001")) + .with_bytes_pending_to_download(1) + .build(), + ) + .await; - // Scrape the tracker - let response = Client::new(http_tracker_server.get_connection_info()) + let response = Client::new(http_tracker.get_connection_info()) .scrape( &requests::scrape::QueryBuilder::default() .with_one_info_hash(&info_hash) @@ -728,24 +715,18 @@ mod http_tracker_server { ) .await; - // todo: extract scrape response builder or named constructor. - // A builder with an "add_file(info_hash_bytes: &[u8], file: File)" method could be a good solution. - let mut files = HashMap::new(); - files.insert( - info_hash.bytes(), - File { - complete: 1, - downloaded: 0, - incomplete: 0, - }, - ); - let expected_scrape_response = Response { files }; + let expected_scrape_response = ResponseBuilder::default() + .add_file( + info_hash.bytes(), + File { + complete: 0, + downloaded: 0, + incomplete: 1, + }, + ) + .build(); - // todo: extract assert - assert_eq!(response.status(), 200); - let bytes = response.bytes().await.unwrap(); - let scrape_response = Response::from_bytes(&bytes); - assert_eq!(scrape_response, expected_scrape_response); + assert_scrape_response(response, &expected_scrape_response).await; } } } @@ -776,6 +757,7 @@ mod http_tracker_server { } #[tokio::test] + async fn should_allow_announcing_a_whitelisted_torrent() { let http_tracker_server = start_whitelisted_http_tracker().await;