forked from torrust/torrust-tracker
-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
test(http): [torrust#159] scaffolding to test scrape responses in htt…
…p tracker
- Loading branch information
1 parent
d7610ef
commit dc304e7
Showing
12 changed files
with
280 additions
and
12 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
pub type ByteArray20 = [u8; 20]; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
pub mod asserts; | ||
pub mod bencode; | ||
pub mod client; | ||
pub mod connection_info; | ||
pub mod requests; | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,2 @@ | ||
pub mod announce; | ||
pub mod scrape; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,108 @@ | ||
use std::fmt; | ||
use std::str::FromStr; | ||
|
||
use percent_encoding::NON_ALPHANUMERIC; | ||
use torrust_tracker::protocol::info_hash::InfoHash; | ||
|
||
use crate::http::bencode::ByteArray20; | ||
|
||
pub struct Query { | ||
pub info_hash: Vec<ByteArray20>, | ||
} | ||
|
||
impl fmt::Display for Query { | ||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
write!(f, "{}", self.build()) | ||
} | ||
} | ||
|
||
/// HTTP Tracker Scrape Request: | ||
/// | ||
/// <https://www.bittorrent.org/beps/bep_0048.html> | ||
impl Query { | ||
/// It builds the URL query component for the scrape request. | ||
/// | ||
/// This custom URL query params encoding is needed because `reqwest` does not allow | ||
/// bytes arrays in query parameters. More info on this issue: | ||
/// | ||
/// <https://github.com/seanmonstar/reqwest/issues/1613> | ||
pub fn build(&self) -> String { | ||
self.params().to_string() | ||
} | ||
|
||
pub fn params(&self) -> QueryParams { | ||
QueryParams::from(self) | ||
} | ||
} | ||
|
||
pub struct QueryBuilder { | ||
scrape_query: Query, | ||
} | ||
|
||
impl QueryBuilder { | ||
pub fn default() -> QueryBuilder { | ||
let default_scrape_query = Query { | ||
info_hash: [InfoHash::from_str("9c38422213e30bff212b30c360d26f9a02136422").unwrap().0].to_vec(), | ||
}; | ||
Self { | ||
scrape_query: default_scrape_query, | ||
} | ||
} | ||
|
||
pub fn with_one_info_hash(mut self, info_hash: &InfoHash) -> Self { | ||
self.scrape_query.info_hash = [info_hash.0].to_vec(); | ||
self | ||
} | ||
|
||
pub fn query(self) -> Query { | ||
self.scrape_query | ||
} | ||
} | ||
|
||
/// It contains all the GET parameters that can be used in a HTTP Scrape request. | ||
/// | ||
/// The `info_hash` param is the percent encoded of the the 20-byte array info hash. | ||
/// | ||
/// Sample Scrape URL with all the GET parameters: | ||
/// | ||
/// For `IpV4`: | ||
/// | ||
/// ```text | ||
/// http://127.0.0.1:7070/scrape?info_hash=%9C8B%22%13%E3%0B%FF%21%2B0%C3%60%D2o%9A%02%13d%22 | ||
/// ``` | ||
/// | ||
/// For `IpV6`: | ||
/// | ||
/// ```text | ||
/// http://[::1]:7070/scrape?info_hash=%9C8B%22%13%E3%0B%FF%21%2B0%C3%60%D2o%9A%02%13d%22 | ||
/// ``` | ||
/// | ||
/// You can add as many info hashes as you want, just adding the same param again. | ||
pub struct QueryParams { | ||
pub info_hash: Vec<String>, | ||
} | ||
|
||
impl std::fmt::Display for QueryParams { | ||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { | ||
let query = self | ||
.info_hash | ||
.iter() | ||
.map(|info_hash| format!("info_hash={}", &info_hash)) | ||
.collect::<Vec<String>>() | ||
.join("&"); | ||
|
||
write!(f, "{query}") | ||
} | ||
} | ||
|
||
impl QueryParams { | ||
pub fn from(scrape_query: &Query) -> Self { | ||
let info_hashes = scrape_query | ||
.info_hash | ||
.iter() | ||
.map(|info_hash_bytes| percent_encoding::percent_encode(info_hash_bytes, NON_ALPHANUMERIC).to_string()) | ||
.collect::<Vec<String>>(); | ||
|
||
Self { info_hash: info_hashes } | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
use serde::{self, Deserialize, Serialize}; | ||
|
||
#[derive(Serialize, Deserialize, Debug, PartialEq)] | ||
pub struct Error { | ||
#[serde(rename = "failure reason")] | ||
pub failure_reason: String, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,3 @@ | ||
pub mod announce; | ||
pub mod error; | ||
pub mod scrape; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,91 @@ | ||
use std::collections::HashMap; | ||
use std::str; | ||
|
||
use serde::{self, Deserialize, Serialize}; | ||
use serde_bencode::value::Value; | ||
|
||
use crate::http::bencode::ByteArray20; | ||
|
||
#[derive(Debug, PartialEq)] | ||
pub struct Response { | ||
pub files: HashMap<ByteArray20, File>, | ||
} | ||
|
||
impl Response { | ||
pub fn from_bytes(bytes: &[u8]) -> Self { | ||
let scrape_response: DeserializedResponse = serde_bencode::from_bytes(bytes).unwrap(); | ||
Self::from(scrape_response) | ||
} | ||
} | ||
|
||
#[derive(Serialize, Deserialize, Debug, PartialEq)] | ||
pub struct File { | ||
pub complete: i64, | ||
pub downloaded: i64, | ||
pub incomplete: i64, | ||
} | ||
|
||
impl From<DeserializedResponse> for Response { | ||
fn from(scrape_response: DeserializedResponse) -> Self { | ||
// todo: | ||
// - Use `try_from` trait instead of `from`. | ||
// - Improve error messages. | ||
// - Extract parser function out of the trait. | ||
// - Extract parser for each nested element. | ||
// - Extract function to instantiate [u8; 20] from Vec<u8>. | ||
let mut files: HashMap<ByteArray20, File> = HashMap::new(); | ||
|
||
match scrape_response.files { | ||
Value::Dict(dict) => { | ||
for file_element in dict { | ||
let info_hash_byte_vec = file_element.0; | ||
let file_value = file_element.1; | ||
|
||
let file = match &file_value { | ||
Value::Dict(dict) => { | ||
let mut file = File { | ||
complete: 0, | ||
downloaded: 0, | ||
incomplete: 0, | ||
}; | ||
|
||
for file_field in dict { | ||
let value = match file_field.1 { | ||
Value::Int(number) => *number, | ||
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <i64>"), | ||
}; | ||
|
||
if file_field.0 == b"complete" { | ||
file.complete = value; | ||
} else if file_field.0 == b"downloaded" { | ||
file.downloaded = value; | ||
} else if file_field.0 == b"incomplete" { | ||
file.incomplete = value; | ||
} else { | ||
panic!("Error parsing bencoded scrape response. Invalid <File> field"); | ||
} | ||
} | ||
|
||
file | ||
} | ||
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <Value::Dict>"), | ||
}; | ||
|
||
// Clone Vec<u8> into [u8; 20] | ||
let mut info_hash_byte_array: [u8; 20] = Default::default(); | ||
info_hash_byte_array.clone_from_slice(info_hash_byte_vec.as_slice()); | ||
|
||
files.insert(info_hash_byte_array, file); | ||
} | ||
} | ||
_ => panic!("Error parsing bencoded scrape response. Invalid value. Expected <Value::Dict>"), | ||
} | ||
|
||
Self { files } | ||
} | ||
} | ||
|
||
#[derive(Serialize, Deserialize, Debug, PartialEq)] | ||
struct DeserializedResponse { | ||
pub files: Value, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters