Skip to content

Commit

Permalink
Fix issue causing a crash when cloning futures.
Browse files Browse the repository at this point in the history
  • Loading branch information
Edu4rdSHL committed Oct 17, 2023
1 parent 0a3288c commit cd2dc1a
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 43 deletions.
71 changes: 34 additions & 37 deletions src/httplib.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,19 @@
use crate::structs::{HTTPFilters, LibOptions};
use async_recursion::async_recursion;
use rand::{distributions::Alphanumeric, thread_rng as rng, Rng};
use reqwest::{
header::{CONTENT_LENGTH, CONTENT_TYPE},
redirect::Policy,
Client, Response, Url,
};
use std::collections::{HashMap, HashSet};
use {
crate::{structs::HttpData, utils},
crate::{
structs::{HTTPFilters, HttpData, LibOptions},
utils,
},
async_recursion::async_recursion,
futures::stream::StreamExt,
reqwest::{self, header::USER_AGENT},
rand::{distributions::Alphanumeric, thread_rng as rng, Rng},
reqwest::{
self,
header::{CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT},
redirect::Policy,
Client, Response, Url,
},
scraper::{Html, Selector},
std::collections::{HashMap, HashSet},
};

#[allow(clippy::too_many_arguments)]
Expand All @@ -31,12 +33,12 @@ pub async fn return_http_data(options: &LibOptions) -> HashMap<String, HttpData>
// Create futures
let https_send_fut = options
.client
.get(&format!("https://{}", host))
.get(format!("https://{host}"))
.header(USER_AGENT, &user_agent);

let http_send_fut = options
.client
.get(&format!("http://{}", host))
.get(format!("http://{host}"))
.header(USER_AGENT, &user_agent);

let mut http_data = HttpData::default();
Expand All @@ -47,24 +49,18 @@ pub async fn return_http_data(options: &LibOptions) -> HashMap<String, HttpData>
if options.retries != 1 {
let mut counter = 0;
while counter < options.retries {
if let Ok(resp) = https_send_fut
.try_clone()
.expect("Failed to clone https future")
.send()
.await
{
response = Some(resp);
break;
} else if let Ok(resp) = http_send_fut
.try_clone()
.expect("Failed to clone http future")
.send()
.await
{
response = Some(resp);
break;
if let Some(resp) = https_send_fut.try_clone() {
if let Ok(resp) = resp.send().await {
response = Some(resp);
break;
}
} else if let Some(resp) = http_send_fut.try_clone() {
if let Ok(resp) = resp.send().await {
response = Some(resp);
break;
}
counter += 1;
}
counter += 1;
}
} else if let Ok(resp) = https_send_fut.send().await {
response = Some(resp);
Expand Down Expand Up @@ -126,7 +122,8 @@ pub fn return_http_client(timeout: u64, max_redirects: usize) -> Client {
.expect("Failed to create HTTP client")
}

#[must_use] pub fn return_url(mut url: Url) -> String {
#[must_use]
pub fn return_url(mut url: Url) -> String {
url.set_path("");
url.set_query(None);
url.to_string()
Expand All @@ -152,10 +149,10 @@ pub async fn assign_response_data(
.unwrap_or_default()
.to_string()
} else {
"".to_string()
String::new()
};

http_data.headers = format!("{:?}", headers);
http_data.headers = format!("{headers:?}");

let full_body = resp.text().await.unwrap_or_default();

Expand Down Expand Up @@ -197,7 +194,7 @@ pub fn return_title_and_body(http_data: &mut HttpData, body: &str) {
}
}
Err(err) => {
eprintln!("Failed to parse selector: {:?}", err);
eprintln!("Failed to parse selector: {err:?}");
}
}

Expand All @@ -211,7 +208,7 @@ pub fn return_title_and_body(http_data: &mut HttpData, body: &str) {
}
}
Err(err) => {
eprintln!("Failed to parse selector: {:?}", err);
eprintln!("Failed to parse selector: {err:?}");
}
}

Expand Down Expand Up @@ -255,8 +252,8 @@ pub async fn return_filters_data(

let data = return_http_data(&lib_options).await;

data.iter()
.map(|(_, http_data)| {
data.values()
.map(|http_data| {
http_filters
.bad_http_lengths
.append(&mut vec![http_data.content_length.to_string()]);
Expand Down
9 changes: 3 additions & 6 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
use std::collections::HashSet;

use fhc::{httplib, structs::LibOptions};

use {
clap::{value_t, App, Arg},
fhc::utils,
fhc::{httplib, structs::LibOptions, utils},
std::collections::HashSet,
tokio::{
self,
io::{self, AsyncReadExt},
Expand Down Expand Up @@ -128,7 +125,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
let domain = value_t!(matches, "domain", String).unwrap();
buffer
.lines()
.map(|word| format!("{}.{}", word, domain))
.map(|word| format!("{word}.{domain}"))
.collect()
} else {
buffer.lines().map(str::to_owned).collect()
Expand Down

0 comments on commit cd2dc1a

Please sign in to comment.