Skip to content

Commit

Permalink
Merge #663: Tracker Checker: check HTTP trackers
Browse files Browse the repository at this point in the history
4456203 feat: [#640] Tracker Chekcer: scrape check (Jose Celano)
cb5bb68 feat: [#640] Tracker Chekcer: announce check (Jose Celano)

Pull request description:

  Implement checks for HTTP Trackers in the Tracker Checker.

  - [x] Add check for `announce` request.
  - [x] Add check for `scrape` request.

ACKs for top commit:
  josecelano:
    ACK 4456203

Tree-SHA512: edd0883513d4dec1041cec36c23ba11b867eaf49f9b3dda35165f7fc13f1875bea1990678982681907e0ff243ea64e6507fc34f1df78f7cc86ef50e873811999
  • Loading branch information
josecelano committed Jan 30, 2024
2 parents 005a8cf + 4456203 commit 95c5aa4
Show file tree
Hide file tree
Showing 2 changed files with 120 additions and 25 deletions.
128 changes: 103 additions & 25 deletions src/console/clients/checker/service.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
use std::net::SocketAddr;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;

use colored::Colorize;
use reqwest::{Client, Url};
use reqwest::{Client as HttpClient, Url};

use super::config::Configuration;
use super::console::Console;
use crate::console::clients::checker::printer::Printer;
use crate::shared::bit_torrent::info_hash::InfoHash;
use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder;
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
use crate::shared::bit_torrent::tracker::http::client::{requests, Client};

pub struct Service {
pub(crate) config: Arc<Configuration>,
Expand All @@ -19,7 +25,7 @@ pub type CheckResult = Result<(), CheckError>;
#[derive(Debug)]
pub enum CheckError {
UdpError,
HttpError,
HttpError { url: Url },
HealthCheckError { url: Url },
}

Expand All @@ -30,10 +36,15 @@ impl Service {
pub async fn run_checks(&self) -> Vec<CheckResult> {
self.console.println("Running checks for trackers ...");

let mut check_results = vec![];

self.check_udp_trackers();
self.check_http_trackers();

self.run_health_checks().await
self.check_http_trackers(&mut check_results).await;

self.run_health_checks(&mut check_results).await;

check_results
}

fn check_udp_trackers(&self) {
Expand All @@ -44,63 +55,130 @@ impl Service {
}
}

fn check_http_trackers(&self) {
async fn check_http_trackers(&self, check_results: &mut Vec<CheckResult>) {
self.console.println("HTTP trackers ...");

for http_tracker in &self.config.http_trackers {
self.check_http_tracker(http_tracker);
let colored_tracker_url = http_tracker.to_string().yellow();

match self.check_http_announce(http_tracker).await {
Ok(()) => {
check_results.push(Ok(()));
self.console
.println(&format!("{} - Announce at {} is OK", "✓".green(), colored_tracker_url));
}
Err(err) => {
check_results.push(Err(err));
self.console
.println(&format!("{} - Announce at {} is failing", "✗".red(), colored_tracker_url));
}
}

match self.check_http_scrape(http_tracker).await {
Ok(()) => {
check_results.push(Ok(()));
self.console
.println(&format!("{} - Scrape at {} is OK", "✓".green(), colored_tracker_url));
}
Err(err) => {
check_results.push(Err(err));
self.console
.println(&format!("{} - Scrape at {} is failing", "✗".red(), colored_tracker_url));
}
}
}
}

async fn run_health_checks(&self) -> Vec<CheckResult> {
async fn run_health_checks(&self, check_results: &mut Vec<CheckResult>) {
self.console.println("Health checks ...");

let mut check_results = vec![];

for health_check_url in &self.config.health_checks {
match self.run_health_check(health_check_url.clone()).await {
Ok(()) => check_results.push(Ok(())),
Err(err) => check_results.push(Err(err)),
}
}

check_results
}

fn check_udp_tracker(&self, address: &SocketAddr) {
// todo:
// - Make announce request
// - Make scrape request
self.console
.println(&format!("{} - UDP tracker at {:?} is OK (TODO)", "✓".green(), address));

let colored_address = address.to_string().yellow();

self.console.println(&format!(
"{} - UDP tracker at udp://{} is OK ({})",
"✓".green(),
colored_address,
"TODO".red(),
));
}

fn check_http_tracker(&self, url: &Url) {
// todo:
// - Make announce request
// - Make scrape request
self.console
.println(&format!("{} - HTTP tracker at {} is OK (TODO)", "✓".green(), url));
async fn check_http_announce(&self, url: &Url) -> Result<(), CheckError> {
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required");

let response = Client::new(url.clone())
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query())
.await;

if let Ok(body) = response.bytes().await {
if let Ok(_announce_response) = serde_bencode::from_bytes::<Announce>(&body) {
Ok(())
} else {
Err(CheckError::HttpError { url: url.clone() })
}
} else {
Err(CheckError::HttpError { url: url.clone() })
}
}

async fn check_http_scrape(&self, url: &Url) -> Result<(), CheckError> {
let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237
let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required");

let response = Client::new(url.clone()).scrape(&query).await;

if let Ok(body) = response.bytes().await {
if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) {
Ok(())
} else {
Err(CheckError::HttpError { url: url.clone() })
}
} else {
Err(CheckError::HttpError { url: url.clone() })
}
}

async fn run_health_check(&self, url: Url) -> Result<(), CheckError> {
let client = Client::builder().timeout(Duration::from_secs(5)).build().unwrap();
let client = HttpClient::builder().timeout(Duration::from_secs(5)).build().unwrap();

let colored_url = url.to_string().yellow();

match client.get(url.clone()).send().await {
Ok(response) => {
if response.status().is_success() {
self.console
.println(&format!("{} - Health API at {} is OK", "✓".green(), url));
.println(&format!("{} - Health API at {} is OK", "✓".green(), colored_url));
Ok(())
} else {
self.console
.eprintln(&format!("{} - Health API at {} failing: {:?}", "✗".red(), url, response));
self.console.eprintln(&format!(
"{} - Health API at {} is failing: {:?}",
"✗".red(),
colored_url,
response
));
Err(CheckError::HealthCheckError { url })
}
}
Err(err) => {
self.console
.eprintln(&format!("{} - Health API at {} failing: {:?}", "✗".red(), url, err));
self.console.eprintln(&format!(
"{} - Health API at {} is failing: {:?}",
"✗".red(),
colored_url,
err
));
Err(CheckError::HealthCheckError { url })
}
}
Expand Down
17 changes: 17 additions & 0 deletions src/shared/bit_torrent/tracker/http/client/requests/scrape.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,23 @@ impl TryFrom<&[String]> for Query {
}
}

impl TryFrom<Vec<String>> for Query {
type Error = ConversionError;

fn try_from(info_hashes: Vec<String>) -> Result<Self, Self::Error> {
let mut validated_info_hashes: Vec<ByteArray20> = Vec::new();

for info_hash in info_hashes {
let validated_info_hash = InfoHash::from_str(&info_hash).map_err(|_| ConversionError(info_hash.clone()))?;
validated_info_hashes.push(validated_info_hash.0);
}

Ok(Self {
info_hash: validated_info_hashes,
})
}
}

/// HTTP Tracker Scrape Request:
///
/// <https://www.bittorrent.org/beps/bep_0048.html>
Expand Down

0 comments on commit 95c5aa4

Please sign in to comment.