Skip to content

Commit

Permalink
refactor(test): [#159] refactor tests for scrape request
Browse files Browse the repository at this point in the history
  • Loading branch information
josecelano committed Jan 31, 2023
1 parent c89a1f3 commit 7ee588a
Show file tree
Hide file tree
Showing 8 changed files with 235 additions and 114 deletions.
4 changes: 3 additions & 1 deletion cSpell.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,8 @@
"Vagaa",
"Vuze",
"Xtorrent",
"Xunlei"
"Xunlei",
"xxxxxxxxxxxxxxxxxxxxd",
"yyyyyyyyyyyyyyyyyyyyd"
]
}
2 changes: 1 addition & 1 deletion src/tracker/peer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ pub struct Peer {
#[serde(with = "NumberOfBytesDef")]
pub downloaded: NumberOfBytes,
#[serde(with = "NumberOfBytesDef")]
pub left: NumberOfBytes,
pub left: NumberOfBytes, // The number of bytes this peer still has to download
#[serde(with = "AnnounceEventDef")]
pub event: AnnounceEvent,
}
Expand Down
12 changes: 12 additions & 0 deletions tests/common/fixtures.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,18 @@ impl PeerBuilder {
self
}

#[allow(dead_code)]
pub fn with_bytes_pending_to_download(mut self, left: i64) -> Self {
self.peer.left = NumberOfBytes(left);
self
}

#[allow(dead_code)]
pub fn build(self) -> Peer {
self.into()
}

#[allow(dead_code)]
pub fn into(self) -> Peer {
self.peer
}
Expand Down
16 changes: 15 additions & 1 deletion tests/http/asserts.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use reqwest::Response;

use super::responses::announce::{Announce, Compact, DeserializedCompact};
use super::responses::scrape;
use crate::http::responses::error::Error;

pub async fn assert_empty_announce_response(response: Response) {
Expand All @@ -17,7 +18,7 @@ pub async fn assert_announce_response(response: Response, expected_announce_resp
assert_eq!(announce_response, *expected_announce_response);
}

/// Sample bencoded response as byte array:
/// Sample bencoded announce response as byte array:
///
/// ```text
/// b"d8:intervali120e12:min intervali120e8:completei2e10:incompletei0e5:peers6:~\0\0\x01\x1f\x90e6:peers60:e"
Expand All @@ -40,6 +41,19 @@ pub async fn assert_compact_announce_response(response: Response, expected_respo
assert_eq!(actual_response, *expected_response);
}

/// Sample bencoded scrape response as byte array:
///
/// ```text
/// b"d5:filesd20:\x9c8B\"\x13\xe3\x0b\xff!+0\xc3`\xd2o\x9a\x02\x13d\"d8:completei1e10:downloadedi0e10:incompletei0eeee"
/// ```
pub async fn assert_scrape_response(response: Response, expected_response: &scrape::Response) {
assert_eq!(response.status(), 200);

let scrape_response = scrape::Response::try_from_bytes(&response.bytes().await.unwrap()).unwrap();

assert_eq!(scrape_response, *expected_response);
}

pub async fn assert_is_announce_response(response: Response) {
assert_eq!(response.status(), 200);
let body = response.text().await.unwrap();
Expand Down
14 changes: 14 additions & 0 deletions tests/http/bencode.rs
Original file line number Diff line number Diff line change
@@ -1 +1,15 @@
pub type ByteArray20 = [u8; 20];

pub struct InfoHash(ByteArray20);

impl InfoHash {
pub fn new(vec: &[u8]) -> Self {
let mut byte_array_20: ByteArray20 = Default::default();
byte_array_20.clone_from_slice(vec);
Self(byte_array_20)
}

pub fn bytes(&self) -> ByteArray20 {
self.0
}
}
2 changes: 1 addition & 1 deletion tests/http/requests/announce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ impl QueryParams {

pub fn remove_optional_params(&mut self) {
// todo: make them optional with the Option<...> in the AnnounceQuery struct
// if they are really optional. SO that we can crete a minimal AnnounceQuery
// if they are really optional. So that we can crete a minimal AnnounceQuery
// instead of removing the optional params afterwards.
//
// The original specification on:
Expand Down
223 changes: 160 additions & 63 deletions tests/http/responses/scrape.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,88 +4,185 @@ use std::str;
use serde::{self, Deserialize, Serialize};
use serde_bencode::value::Value;

use crate::http::bencode::ByteArray20;
use crate::http::bencode::{ByteArray20, InfoHash};

#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Default)]
pub struct Response {
pub files: HashMap<ByteArray20, File>,
}

impl Response {
pub fn from_bytes(bytes: &[u8]) -> Self {
pub fn try_from_bytes(bytes: &[u8]) -> Result<Self, BencodeParseError> {
let scrape_response: DeserializedResponse = serde_bencode::from_bytes(bytes).unwrap();
Self::from(scrape_response)
Self::try_from(scrape_response)
}

pub fn empty() -> Self {
Self::default()
}
}

#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct File {
pub complete: i64,
pub downloaded: i64,
pub incomplete: i64,
pub complete: i64, // The number of active peers that have completed downloading
pub downloaded: i64, // The number of peers that have ever completed downloading
pub incomplete: i64, // The number of active peers that have not completed downloading
}

impl From<DeserializedResponse> for Response {
fn from(scrape_response: DeserializedResponse) -> Self {
// todo:
// - Use `try_from` trait instead of `from`.
// - Improve error messages.
// - Extract parser function out of the trait.
// - Extract parser for each nested element.
// - Extract function to instantiate [u8; 20] from Vec<u8>.
let mut files: HashMap<ByteArray20, File> = HashMap::new();

match scrape_response.files {
Value::Dict(dict) => {
for file_element in dict {
let info_hash_byte_vec = file_element.0;
let file_value = file_element.1;

let file = match &file_value {
Value::Dict(dict) => {
let mut file = File {
complete: 0,
downloaded: 0,
incomplete: 0,
};

for file_field in dict {
let value = match file_field.1 {
Value::Int(number) => *number,
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <i64>"),
};

if file_field.0 == b"complete" {
file.complete = value;
} else if file_field.0 == b"downloaded" {
file.downloaded = value;
} else if file_field.0 == b"incomplete" {
file.incomplete = value;
} else {
panic!("Error parsing bencoded scrape response. Invalid <File> field");
}
}

file
}
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <Value::Dict>"),
};

// Clone Vec<u8> into [u8; 20]
let mut info_hash_byte_array: [u8; 20] = Default::default();
info_hash_byte_array.clone_from_slice(info_hash_byte_vec.as_slice());

files.insert(info_hash_byte_array, file);
}
}
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <Value::Dict>"),
}
impl TryFrom<DeserializedResponse> for Response {
type Error = BencodeParseError;

Self { files }
fn try_from(scrape_response: DeserializedResponse) -> Result<Self, Self::Error> {
parse_bencoded_response(&scrape_response.files)
}
}

#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct DeserializedResponse {
pub files: Value,
}

pub struct ResponseBuilder {
response: Response,
}

impl ResponseBuilder {
pub fn default() -> Self {
Self {
response: Response::empty(),
}
}

pub fn add_file(mut self, info_hash_bytes: ByteArray20, file: File) -> Self {
self.response.files.insert(info_hash_bytes, file);
self
}

pub fn build(self) -> Response {
self.response
}
}

#[derive(Debug)]
pub enum BencodeParseError {
InvalidValueExpectedDict { value: Value },
InvalidValueExpectedInt { value: Value },
InvalidFileField { value: Value },
MissingFileField { field_name: String },
}

/// It parses a bencoded scrape response into a `Response` struct.
///
/// For example:
///
/// ```text
/// d5:filesd20:xxxxxxxxxxxxxxxxxxxxd8:completei11e10:downloadedi13772e10:incompletei19e
/// 20:yyyyyyyyyyyyyyyyyyyyd8:completei21e10:downloadedi206e10:incompletei20eee
/// ```
///
/// Response (JSON encoded for readability):
///
/// ```text
/// {
/// 'files': {
/// 'xxxxxxxxxxxxxxxxxxxx': {'complete': 11, 'downloaded': 13772, 'incomplete': 19},
/// 'yyyyyyyyyyyyyyyyyyyy': {'complete': 21, 'downloaded': 206, 'incomplete': 20}
/// }
/// }
fn parse_bencoded_response(value: &Value) -> Result<Response, BencodeParseError> {
let mut files: HashMap<ByteArray20, File> = HashMap::new();

match value {
Value::Dict(dict) => {
for file_element in dict {
let info_hash_byte_vec = file_element.0;
let file_value = file_element.1;

let file = parse_bencoded_file(file_value).unwrap();

files.insert(InfoHash::new(info_hash_byte_vec).bytes(), file);
}
}
_ => return Err(BencodeParseError::InvalidValueExpectedDict { value: value.clone() }),
}

Ok(Response { files })
}

/// It parses a bencoded dictionary into a `File` struct.
///
/// For example:
///
///
/// ```text
/// d8:completei11e10:downloadedi13772e10:incompletei19ee
/// ```
///
/// into:
///
/// ```text
/// File {
/// complete: 11,
/// downloaded: 13772,
/// incomplete: 19,
/// }
/// ```
fn parse_bencoded_file(value: &Value) -> Result<File, BencodeParseError> {
let file = match &value {
Value::Dict(dict) => {
let mut complete = None;
let mut downloaded = None;
let mut incomplete = None;

for file_field in dict {
let field_name = file_field.0;

let field_value = match file_field.1 {
Value::Int(number) => Ok(*number),
_ => Err(BencodeParseError::InvalidValueExpectedInt {
value: file_field.1.clone(),
}),
}?;

if field_name == b"complete" {
complete = Some(field_value);
} else if field_name == b"downloaded" {
downloaded = Some(field_value);
} else if field_name == b"incomplete" {
incomplete = Some(field_value);
} else {
return Err(BencodeParseError::InvalidFileField {
value: file_field.1.clone(),
});
}
}

if complete.is_none() {
return Err(BencodeParseError::MissingFileField {
field_name: "complete".to_string(),
});
}

if downloaded.is_none() {
return Err(BencodeParseError::MissingFileField {
field_name: "downloaded".to_string(),
});
}

if incomplete.is_none() {
return Err(BencodeParseError::MissingFileField {
field_name: "incomplete".to_string(),
});
}

File {
complete: complete.unwrap(),
downloaded: downloaded.unwrap(),
incomplete: incomplete.unwrap(),
}
}
_ => return Err(BencodeParseError::InvalidValueExpectedDict { value: value.clone() }),
};

Ok(file)
}
Loading

0 comments on commit 7ee588a

Please sign in to comment.