Skip to content

Commit

Permalink
test(http): [torrust#159] scaffolding to test scrape responses in htt…
Browse files Browse the repository at this point in the history
…p tracker
  • Loading branch information
josecelano committed Jan 30, 2023
1 parent d7610ef commit dc304e7
Show file tree
Hide file tree
Showing 12 changed files with 280 additions and 12 deletions.
3 changes: 2 additions & 1 deletion tests/http/asserts.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use reqwest::Response;

use super::responses::announce::{Announce, Compact, DecodedCompact, Error};
use super::responses::announce::{Announce, Compact, DecodedCompact};
use crate::http::responses::error::Error;

pub async fn assert_empty_announce_response(response: Response) {
assert_eq!(response.status(), 200);
Expand Down
1 change: 1 addition & 0 deletions tests/http/bencode.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pub type ByteArray20 = [u8; 20];
15 changes: 12 additions & 3 deletions tests/http/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ use reqwest::{Client as ReqwestClient, Response};
use torrust_tracker::tracker::auth::KeyId;

use super::connection_info::ConnectionInfo;
use super::requests::announce::Query;
use super::requests::announce::{self, Query};
use super::requests::scrape;

/// HTTP Tracker Client
pub struct Client {
Expand Down Expand Up @@ -47,10 +48,14 @@ impl Client {
}
}

pub async fn announce(&self, query: &Query) -> Response {
pub async fn announce(&self, query: &announce::Query) -> Response {
self.get(&self.build_announce_path_and_query(query)).await
}

pub async fn scrape(&self, query: &scrape::Query) -> Response {
self.get(&self.build_scrape_path_and_query(query)).await
}

pub async fn announce_with_header(&self, query: &Query, key_id: &str, value: &str) -> Response {
self.get_with_header(&self.build_announce_path_and_query(query), key_id, value)
.await
Expand All @@ -69,10 +74,14 @@ impl Client {
.unwrap()
}

fn build_announce_path_and_query(&self, query: &Query) -> String {
fn build_announce_path_and_query(&self, query: &announce::Query) -> String {
format!("{}?{query}", self.build_path("announce"))
}

fn build_scrape_path_and_query(&self, query: &scrape::Query) -> String {
format!("{}?{query}", self.build_path("scrape"))
}

fn build_path(&self, path: &str) -> String {
match &self.key_id {
Some(key_id) => format!("{path}/{key_id}"),
Expand Down
1 change: 1 addition & 0 deletions tests/http/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod asserts;
pub mod bencode;
pub mod client;
pub mod connection_info;
pub mod requests;
Expand Down
3 changes: 2 additions & 1 deletion tests/http/requests/announce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_repr::Serialize_repr;
use torrust_tracker::protocol::info_hash::InfoHash;
use torrust_tracker::tracker::peer::Id;

use crate::http::bencode::ByteArray20;

pub struct Query {
pub info_hash: ByteArray20,
pub peer_addr: IpAddr,
Expand Down Expand Up @@ -47,7 +49,6 @@ impl Query {
}

pub type BaseTenASCII = u64;
pub type ByteArray20 = [u8; 20];
pub type PortNumber = u16;

pub enum Event {
Expand Down
1 change: 1 addition & 0 deletions tests/http/requests/mod.rs
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
pub mod announce;
pub mod scrape;
108 changes: 108 additions & 0 deletions tests/http/requests/scrape.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
use std::fmt;
use std::str::FromStr;

use percent_encoding::NON_ALPHANUMERIC;
use torrust_tracker::protocol::info_hash::InfoHash;

use crate::http::bencode::ByteArray20;

pub struct Query {
pub info_hash: Vec<ByteArray20>,
}

impl fmt::Display for Query {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.build())
}
}

/// HTTP Tracker Scrape Request:
///
/// <https://www.bittorrent.org/beps/bep_0048.html>
impl Query {
/// It builds the URL query component for the scrape request.
///
/// This custom URL query params encoding is needed because `reqwest` does not allow
/// bytes arrays in query parameters. More info on this issue:
///
/// <https://github.com/seanmonstar/reqwest/issues/1613>
pub fn build(&self) -> String {
self.params().to_string()
}

pub fn params(&self) -> QueryParams {
QueryParams::from(self)
}
}

pub struct QueryBuilder {
scrape_query: Query,
}

impl QueryBuilder {
pub fn default() -> QueryBuilder {
let default_scrape_query = Query {
info_hash: [InfoHash::from_str("9c38422213e30bff212b30c360d26f9a02136422").unwrap().0].to_vec(),
};
Self {
scrape_query: default_scrape_query,
}
}

pub fn with_one_info_hash(mut self, info_hash: &InfoHash) -> Self {
self.scrape_query.info_hash = [info_hash.0].to_vec();
self
}

pub fn query(self) -> Query {
self.scrape_query
}
}

/// It contains all the GET parameters that can be used in a HTTP Scrape request.
///
/// The `info_hash` param is the percent encoded of the the 20-byte array info hash.
///
/// Sample Scrape URL with all the GET parameters:
///
/// For `IpV4`:
///
/// ```text
/// http://127.0.0.1:7070/scrape?info_hash=%9C8B%22%13%E3%0B%FF%21%2B0%C3%60%D2o%9A%02%13d%22
/// ```
///
/// For `IpV6`:
///
/// ```text
/// http://[::1]:7070/scrape?info_hash=%9C8B%22%13%E3%0B%FF%21%2B0%C3%60%D2o%9A%02%13d%22
/// ```
///
/// You can add as many info hashes as you want, just adding the same param again.
pub struct QueryParams {
pub info_hash: Vec<String>,
}

impl std::fmt::Display for QueryParams {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let query = self
.info_hash
.iter()
.map(|info_hash| format!("info_hash={}", &info_hash))
.collect::<Vec<String>>()
.join("&");

write!(f, "{query}")
}
}

impl QueryParams {
pub fn from(scrape_query: &Query) -> Self {
let info_hashes = scrape_query
.info_hash
.iter()
.map(|info_hash_bytes| percent_encoding::percent_encode(info_hash_bytes, NON_ALPHANUMERIC).to_string())
.collect::<Vec<String>>();

Self { info_hash: info_hashes }
}
}
6 changes: 0 additions & 6 deletions tests/http/responses/announce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,3 @@ impl From<Compact> for DecodedCompact {
}
}
}

#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Error {
#[serde(rename = "failure reason")]
pub failure_reason: String,
}
7 changes: 7 additions & 0 deletions tests/http/responses/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
use serde::{self, Deserialize, Serialize};

#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Error {
#[serde(rename = "failure reason")]
pub failure_reason: String,
}
2 changes: 2 additions & 0 deletions tests/http/responses/mod.rs
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
pub mod announce;
pub mod error;
pub mod scrape;
91 changes: 91 additions & 0 deletions tests/http/responses/scrape.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
use std::collections::HashMap;
use std::str;

use serde::{self, Deserialize, Serialize};
use serde_bencode::value::Value;

use crate::http::bencode::ByteArray20;

#[derive(Debug, PartialEq)]
pub struct Response {
pub files: HashMap<ByteArray20, File>,
}

impl Response {
pub fn from_bytes(bytes: &[u8]) -> Self {
let scrape_response: DeserializedResponse = serde_bencode::from_bytes(bytes).unwrap();
Self::from(scrape_response)
}
}

#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct File {
pub complete: i64,
pub downloaded: i64,
pub incomplete: i64,
}

impl From<DeserializedResponse> for Response {
fn from(scrape_response: DeserializedResponse) -> Self {
// todo:
// - Use `try_from` trait instead of `from`.
// - Improve error messages.
// - Extract parser function out of the trait.
// - Extract parser for each nested element.
// - Extract function to instantiate [u8; 20] from Vec<u8>.
let mut files: HashMap<ByteArray20, File> = HashMap::new();

match scrape_response.files {
Value::Dict(dict) => {
for file_element in dict {
let info_hash_byte_vec = file_element.0;
let file_value = file_element.1;

let file = match &file_value {
Value::Dict(dict) => {
let mut file = File {
complete: 0,
downloaded: 0,
incomplete: 0,
};

for file_field in dict {
let value = match file_field.1 {
Value::Int(number) => *number,
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <i64>"),
};

if file_field.0 == b"complete" {
file.complete = value;
} else if file_field.0 == b"downloaded" {
file.downloaded = value;
} else if file_field.0 == b"incomplete" {
file.incomplete = value;
} else {
panic!("Error parsing bencoded scrape response. Invalid <File> field");
}
}

file
}
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <Value::Dict>"),
};

// Clone Vec<u8> into [u8; 20]
let mut info_hash_byte_array: [u8; 20] = Default::default();
info_hash_byte_array.clone_from_slice(info_hash_byte_vec.as_slice());

files.insert(info_hash_byte_array, file);
}
}
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <Value::Dict>"),
}

Self { files }
}
}

#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct DeserializedResponse {
pub files: Value,
}
54 changes: 53 additions & 1 deletion tests/http_tracker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -676,16 +676,25 @@ mod http_tracker_server {

mod receiving_an_scrape_request {

// Scrape specification:
// Scrape documentation:
//
// BEP 48. Tracker Protocol Extension: Scrape
// https://www.bittorrent.org/beps/bep_0048.html
//
// Vuze (bittorrent client) docs:
// https://wiki.vuze.com/w/Scrape

use std::collections::HashMap;
use std::str::FromStr;

use torrust_tracker::protocol::info_hash::InfoHash;
use torrust_tracker::tracker::peer;

use crate::common::fixtures::PeerBuilder;
use crate::http::asserts::assert_internal_server_error_response;
use crate::http::client::Client;
use crate::http::requests;
use crate::http::responses::scrape::{File, Response};
use crate::http::server::start_public_http_tracker;

#[tokio::test]
Expand All @@ -695,6 +704,49 @@ mod http_tracker_server {

assert_internal_server_error_response(response).await;
}

#[tokio::test]
async fn should_return_the_scrape_response() {
let http_tracker_server = start_public_http_tracker().await;

let info_hash = InfoHash::from_str("9c38422213e30bff212b30c360d26f9a02136422").unwrap();

// Peer
let previously_announced_peer = PeerBuilder::default()
.with_peer_id(&peer::Id(*b"-qB00000000000000001"))
.into();

// Add the Peer
http_tracker_server.add_torrent(&info_hash, &previously_announced_peer).await;

// Scrape the tracker
let response = Client::new(http_tracker_server.get_connection_info())
.scrape(
&requests::scrape::QueryBuilder::default()
.with_one_info_hash(&info_hash)
.query(),
)
.await;

// todo: extract scrape response builder or named constructor.
// A builder with an "add_file(info_hash_bytes: &[u8], file: File)" method could be a good solution.
let mut files = HashMap::new();
files.insert(
info_hash.0,
File {
complete: 1,
downloaded: 0,
incomplete: 0,
},
);
let expected_scrape_response = Response { files };

// todo: extract assert
assert_eq!(response.status(), 200);
let bytes = response.bytes().await.unwrap();
let scrape_response = Response::from_bytes(&bytes);
assert_eq!(scrape_response, expected_scrape_response);
}
}
}

Expand Down

0 comments on commit dc304e7

Please sign in to comment.