From c38ae0d2d0efb381762831a8340f57f0519e1e64 Mon Sep 17 00:00:00 2001 From: Jerboa-app Date: Mon, 20 May 2024 19:41:26 +0100 Subject: [PATCH] Removes count from Hit, Adds documentation --- src/content/mod.rs | 9 ++------ src/content/sitemap.rs | 20 +++++++++++++++-- src/filesystem/file.rs | 2 ++ src/filesystem/folder.rs | 4 ++++ src/integrations/mod.rs | 1 + src/integrations/webhook.rs | 1 + src/server/api/mod.rs | 2 +- src/server/stats/digest.rs | 45 ++++++++++++++++++++----------------- src/server/stats/file.rs | 10 ++++----- src/server/stats/hits.rs | 34 +++++++++++++++++++--------- src/task/mod.rs | 2 +- tests/stats/2024-03-24 | 16 ------------- tests/stats/2024-03-25 | 1 - tests/test_stats.rs | 13 +++++------ 14 files changed, 88 insertions(+), 72 deletions(-) diff --git a/src/content/mod.rs b/src/content/mod.rs index 543e646..18a6b34 100644 --- a/src/content/mod.rs +++ b/src/content/mod.rs @@ -19,15 +19,10 @@ pub mod sitemap; /// Store web content /// -/// CF [crate::content::pages::page::Page] and [crate::content::resources::resource::Resource] -/// -/// - [Content::uri] is the served uri of the content (webaddress) -/// - [Content::body] is a byte body -/// - [Content::disk_path] is a path to the locally stored file on disk representing [Content::body] -/// - [Content::cache_period_seconds] is the cache-control max-age -/// /// - The body is unpopulated until [Content::load_from_file] is called /// - The body may be converted to a utf8 string using [Content::utf8_body] +/// - A hash of the file is used to check it is stale, used by [Observed] +/// - Content may have different server side and browser side cache ages #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Content { diff --git a/src/content/sitemap.rs b/src/content/sitemap.rs index 197100c..70d779f 100644 --- a/src/content/sitemap.rs +++ b/src/content/sitemap.rs @@ -13,6 +13,11 @@ use crate::server::https::parse_uri; use super::{get_content, mime_type::Mime, Content}; +/// A tree structure representing a uri stem and content +/// convertable to a [Router] which monitors the content if +/// [crate::config::ContentConfig::static_content] is false. If +/// so and the server cache has expired ([crate::config::ContentConfig::server_cache_period_seconds]) +/// then content is automatically refreshed when served pub struct ContentTree { uri_stem: String, @@ -104,7 +109,8 @@ impl ContentTree } } - /// Implements writing to an xml conforming to https://www.sitemaps.org/protocol.html + /// Implements writing to an xml conforming to + /// with and pub fn to_xml(&self, domain: String) -> Vec { if self.contents.is_empty() && self.children.is_empty() @@ -172,6 +178,13 @@ impl ContentTree } } +/// Represents the structure of a site. +/// If no sitemap.xml or robots.txt is present +/// these will be generated by calling [SiteMap::to_xml] +/// and inserting the resulting sitemap.xml +/// +/// Convertable to a router, see [ContentTree] for dynamic +/// options pub struct SiteMap { contents: ContentTree, @@ -191,6 +204,8 @@ impl SiteMap self.contents.push(content.uri.clone(), content); } + /// Searches the content path from [SiteMap::new] for [Content] + /// robots.txt and sitemap.xml can be generated and added here pub fn build ( &mut self, @@ -287,7 +302,8 @@ impl SiteMap } } - /// Implements writing to an xml conforming to https://www.sitemaps.org/protocol.html + /// Implements writing to an xml conforming to + /// with and pub fn to_xml(&self) -> Vec { let mut buffer = Vec::new(); diff --git a/src/filesystem/file.rs b/src/filesystem/file.rs index 635f619..2bb2b83 100644 --- a/src/filesystem/file.rs +++ b/src/filesystem/file.rs @@ -23,6 +23,8 @@ pub trait File fn path(&self) -> String; } +/// Something that may be observed for changes +/// [Observed::is_stale] is typically a hash function pub trait Observed { fn is_stale(&self) -> bool; diff --git a/src/filesystem/folder.rs b/src/filesystem/folder.rs index a2ec6a8..7238a2f 100644 --- a/src/filesystem/folder.rs +++ b/src/filesystem/folder.rs @@ -15,6 +15,7 @@ impl fmt::Display for ListDirError { } } +/// List all files in path pub fn list_dir(path: String) -> Result { match std::fs::read_dir(path) @@ -30,6 +31,7 @@ pub fn list_dir(path: String) -> Result } } +/// Return parsed [std::ffi::OsString] from [DirEntry] pub fn dir_entry_to_path(d: DirEntry) -> Option { let file_os_string = d.file_name(); @@ -45,6 +47,7 @@ pub fn dir_entry_to_path(d: DirEntry) -> Option } } +/// List all subdirectories of a path pub fn list_sub_dirs(path: String) -> Vec { let mut found_dirs: Vec = vec![]; @@ -100,6 +103,7 @@ pub fn list_sub_dirs(path: String) -> Vec found_dirs } +/// List all files conforming to an [Option] [Regex] pub fn list_dir_by(pattern: Option, path: String) -> Vec { match std::fs::read_dir(path.clone()) diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index e0e6893..8d2b10e 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -13,6 +13,7 @@ pub mod webhook; /// Uses openssl to verify the request body via the given hmac_token /// - hmac_header_key is the location in the https header for the digest +/// - hmac_token must be 64 byte Hex, as should he the sent HMAC digest pub fn is_authentic ( headers: &HeaderMap, diff --git a/src/integrations/webhook.rs b/src/integrations/webhook.rs index 78adf4d..2e82e43 100644 --- a/src/integrations/webhook.rs +++ b/src/integrations/webhook.rs @@ -6,6 +6,7 @@ pub struct Webhook addr: String } +/// Wraps a webhook url impl Webhook { diff --git a/src/server/api/mod.rs b/src/server/api/mod.rs index 11f5a3b..38b0f69 100644 --- a/src/server/api/mod.rs +++ b/src/server/api/mod.rs @@ -15,7 +15,7 @@ use super::stats::hits::HitStats; pub trait ApiRequest { /// Validate a request's hmac given a token read from config.json - /// - See [crate::config::Config] and [crate::web::is_authentic] + /// - See [crate::config::Config] and [crate::integrations::is_authentic] fn is_authentic(headers: HeaderMap, body: Bytes) -> StatusCode; /// Deserialise the Bytes body from JSON fn deserialise_payload(&mut self, headers: HeaderMap, body: Bytes) -> StatusCode; diff --git a/src/server/stats/digest.rs b/src/server/stats/digest.rs index 7d1eaf2..b4b545a 100644 --- a/src/server/stats/digest.rs +++ b/src/server/stats/digest.rs @@ -6,15 +6,16 @@ use crate::{config::{Config, CONFIG_PATH}, content::is_page, util::matches_one}; use super::hits::{collect_hits, HitStats}; +/// A digest of hit statistics #[derive(Debug, Clone, PartialEq)] pub struct Digest { - pub top_hitters: Vec<(String, u16)>, - pub top_pages: Vec<(String, u16)>, - pub top_resources: Vec<(String, u16)>, - pub hits_by_hour_utc: [u16; 24], - pub total_hits: u16, - pub unique_hits: u16 + pub top_hitters: Vec<(String, usize)>, + pub top_pages: Vec<(String, usize)>, + pub top_resources: Vec<(String, usize)>, + pub hits_by_hour_utc: [usize; 24], + pub total_hits: usize, + pub unique_hits: usize } impl Digest @@ -33,6 +34,7 @@ impl Digest } } +/// Collect hits cached and from local files into a [Digest] pub fn process_hits(path: String, from: Option>, to: Option>, top_n: Option, stats: Option) -> Digest { @@ -52,9 +54,9 @@ pub fn process_hits(path: String, from: Option>, to: Optio None => (vec![], config.domain) }; - let mut hitters: HashMap = HashMap::new(); - let mut pages: HashMap = HashMap::new(); - let mut resources: HashMap = HashMap::new(); + let mut hitters: HashMap = HashMap::new(); + let mut pages: HashMap = HashMap::new(); + let mut resources: HashMap = HashMap::new(); for hit in collect_hits(path, stats, from, to) { @@ -65,10 +67,10 @@ pub fn process_hits(path: String, from: Option>, to: Optio match hitters.contains_key(&hit.ip_hash) { - true => {hitters.insert(hit.ip_hash.clone(), hit.count+hitters[&hit.ip_hash]);}, + true => {hitters.insert(hit.ip_hash.clone(), hit.count()+hitters[&hit.ip_hash]);}, false => { - hitters.insert(hit.ip_hash, hit.count); + hitters.insert(hit.ip_hash.clone(), hit.count()); digest.unique_hits += 1; } } @@ -77,20 +79,20 @@ pub fn process_hits(path: String, from: Option>, to: Optio { match pages.contains_key(&hit.path) { - true => {pages.insert(hit.path.clone(), hit.count+pages[&hit.path]);}, - false => {pages.insert(hit.path, hit.count);} + true => {pages.insert(hit.path.clone(), hit.count()+pages[&hit.path]);}, + false => {pages.insert(hit.path.clone(), hit.count());} } } else { match resources.contains_key(&hit.path) { - true => {resources.insert(hit.path.clone(), hit.count+resources[&hit.path]);}, - false => {resources.insert(hit.path, hit.count);} + true => {resources.insert(hit.path.clone(), hit.count()+resources[&hit.path]);}, + false => {resources.insert(hit.path.clone(), hit.count());} } } - digest.total_hits += hit.count; + digest.total_hits += hit.count(); for time in hit.times { @@ -105,13 +107,13 @@ pub fn process_hits(path: String, from: Option>, to: Optio } } - let mut all_hitters: Vec<(String, u16)> = hitters.into_iter().collect(); - let mut all_pages: Vec<(String, u16)> = pages.into_iter().collect(); - let mut all_resources: Vec<(String, u16)> = resources.into_iter().collect(); + let mut all_hitters: Vec<(String, usize)> = hitters.into_iter().collect(); + let mut all_pages: Vec<(String, usize)> = pages.into_iter().collect(); + let mut all_resources: Vec<(String, usize)> = resources.into_iter().collect(); for data in vec![&mut all_hitters, &mut all_pages, &mut all_resources] { - data.sort_by(|a: &(String, u16), b: &(String, u16)| a.1.cmp(&b.1)); + data.sort_by(|a: &(String, usize), b: &(String, usize)| a.1.cmp(&b.1)); data.reverse(); } @@ -157,6 +159,7 @@ pub fn process_hits(path: String, from: Option>, to: Optio } +/// Post a [Digest] as a formatted message to Discord pub fn digest_message(digest: Digest, from: Option>, to: Option>) -> String { let mut msg = String::new(); @@ -204,7 +207,7 @@ pub fn digest_message(digest: Digest, from: Option>, to: O msg } -pub fn hits_by_hour_text_graph(hits: [u16; 24], symbol: char, size: u8) -> String +pub fn hits_by_hour_text_graph(hits: [usize; 24], symbol: char, size: u8) -> String { let mut graph = String::new(); diff --git a/src/server/stats/file.rs b/src/server/stats/file.rs index 5423dc1..7d76a68 100644 --- a/src/server/stats/file.rs +++ b/src/server/stats/file.rs @@ -2,17 +2,17 @@ use crate::{config::{Config, CONFIG_PATH}, filesystem::file::{read_file_utf8, wr use super::hits::{Hit, HitStats}; +/// Save hits to disk. +/// Each stats files takes the current date YYYY-MM-DD as +/// its file name, if multiple saves occur on the same date +/// the file is appended to +/// See [crate::server::stats::HitStats] pub struct StatsFile { pub hits: Vec, pub path: Option } -/// Save hits to disk. -/// Each stats files takes the current date YYYY-MM-DD as -/// its file name, if multiple saves occur on the same date -/// the file is appended to -/// See [crate::server::stats::HitStats] impl StatsFile { pub fn new() -> StatsFile diff --git a/src/server/stats/hits.rs b/src/server/stats/hits.rs index 8053848..f0d24ba 100644 --- a/src/server/stats/hits.rs +++ b/src/server/stats/hits.rs @@ -1,25 +1,39 @@ -use std::{collections::HashMap, fs::create_dir, net::{IpAddr, Ipv4Addr, SocketAddr}, sync::Arc, time::Instant}; +use std::{collections::HashMap, net::{IpAddr, Ipv4Addr, SocketAddr}, sync::Arc, time::Instant}; use axum::{extract::{ConnectInfo, State}, http::Request, middleware::Next, response::Response}; use chrono::DateTime; use openssl::sha::sha512; use reqwest::StatusCode; use serde::{Deserialize, Serialize}; -use tokio::sync::{Mutex, MutexGuard}; +use tokio::sync::Mutex; -use crate::{config::{Config, CONFIG_PATH}, filesystem::{file::{read_file_utf8, write_file_bytes}, folder::list_dir_by}, util::{compress, date_to_rfc3339, dump_bytes}}; +use crate::{config::{Config, CONFIG_PATH}, filesystem::{file::read_file_utf8, folder::list_dir_by}, util::{date_to_rfc3339, dump_bytes}}; use super::digest::Digest; +/// A hit defined by a uri and an ip (hashed) at given times +/// - [Hit::times] the unique hit times of equivalent events (controlled by [crate::config::StatsConfig::hit_cooloff_seconds]) +/// - [Hit::path] the uri defining this hit +/// - [Hit::ip_hash] the sha512 hash of the ip defining this hit +/// +/// Further hits inside the [crate::config::StatsConfig::hit_cooloff_seconds] period will be ignored #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct Hit { - pub count: u16, pub times: Vec, pub path: String, pub ip_hash: String } +impl Hit +{ + pub fn count(&self) -> usize + { + self.times.len().clone() + } +} + +/// A collections of Hits indexed by a sha512 of the ip and uri #[derive(Debug, Clone)] pub struct HitStats { @@ -39,6 +53,7 @@ impl HitStats } } +/// Launches a thread to log statistics for this request pub async fn log_stats ( ConnectInfo(addr): ConnectInfo, @@ -59,6 +74,7 @@ pub async fn log_stats Ok(next.run(request).await) } +/// Updates hit statistics with this request pub async fn process_hit ( addr: SocketAddr, @@ -112,7 +128,6 @@ pub async fn process_hit return } hit.times.push(chrono::offset::Utc::now().to_rfc3339()); - hit.count += 1; hit }, Err(_e) => {hit} @@ -122,7 +137,7 @@ pub async fn process_hit }, false => { - Hit {path: uri, count: 1, times: vec![chrono::offset::Utc::now().to_rfc3339()], ip_hash: dump_bytes(&ip_hash)} + Hit {path: uri, times: vec![chrono::offset::Utc::now().to_rfc3339()], ip_hash: dump_bytes(&ip_hash)} } }; @@ -138,6 +153,7 @@ pub async fn process_hit ), Some("PERFORMANCE".to_string())); } +/// Gathers [Hit]s both from disk and those cached in [HitStats] pub fn collect_hits(path: String, stats: Option, from: Option>, to: Option>) -> Vec { let stats_files = list_dir_by(None, path); @@ -189,7 +205,6 @@ pub fn collect_hits(path: String, stats: Option, from: Option = vec![]; for i in 0..hit.times.len() { @@ -200,13 +215,12 @@ pub fn collect_hits(path: String, stats: Option, from: Option to) { - count += 1; times.push(hit.times[i].clone()); } } - if count > 0 + if times.len() > 0 { - let h = Hit {count, times, ip_hash: hit.ip_hash.clone(), path: hit.path.clone()}; + let h = Hit {times, ip_hash: hit.ip_hash.clone(), path: hit.path.clone()}; hits.push(h.clone()); } } diff --git a/src/task/mod.rs b/src/task/mod.rs index 47eb720..900d58d 100644 --- a/src/task/mod.rs +++ b/src/task/mod.rs @@ -66,7 +66,7 @@ impl TaskPool /// Returns a duration to wait for the next runnable process /// and an information string about that process including - /// a [DateTime] when it will be run + /// a [`DateTime`] when it will be run pub async fn waiting_for(&self) -> (tokio::time::Duration, String) { if self.tasks.len() == 0 diff --git a/tests/stats/2024-03-24 b/tests/stats/2024-03-24 index 6e90087..3805ccb 100644 --- a/tests/stats/2024-03-24 +++ b/tests/stats/2024-03-24 @@ -1,6 +1,5 @@ [ { - "count": 1, "times": [ "2024-03-24T04:12:44.736120969+00:00" ], @@ -8,7 +7,6 @@ "ip_hash": "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8" }, { - "count": 1, "times": [ "2024-03-24T05:47:19.588565149+00:00" ], @@ -16,7 +14,6 @@ "ip_hash": "0740AB1D2E51ECC4841138C8FE89541B69093095D5F25D83ECD2CC4BC5E57736F6AE59EAAD3E6E81734562553F47EC8F37DBDD89595A9725E21560EBF8134457" }, { - "count": 4, "times": [ "2024-03-23T11:48:30.126043279+00:00", "2024-03-23T17:00:09.696503341+00:00", @@ -27,7 +24,6 @@ "ip_hash": "FCAE05F5141CB54454F2389800F331645AB26895D575D0DBE66B053CC980E8175553A306EF6CCE72ED9CA3EA6EBFEA8EA0C46DC375BA1B00CBCAC38E41B5D8CC" }, { - "count": 1, "times": [ "2024-03-24T08:34:54.419188478+00:00" ], @@ -35,7 +31,6 @@ "ip_hash": "DB811697A278796AAB70225183BAD4A0810EE9656E78AD5D127A5149D1F2EEA925CF1B703F8B56B4A3F72FD8BFE9ABF66CA8022B8878D2915B341F42F5DDB684" }, { - "count": 1, "times": [ "2024-03-23T23:49:29.465472690+00:00" ], @@ -43,7 +38,6 @@ "ip_hash": "DDBA68E3F6476AFD49639227FF84A76B4DA0A8F236E308114186407349D6C799EB12CCF5BD9509765BD11A23B06A09635E37921FCB923B4D8FCB8BE982F59458" }, { - "count": 1, "times": [ "2024-03-23T11:07:05.375822282+00:00" ], @@ -51,7 +45,6 @@ "ip_hash": "6844DB8F8E03A2D4141525D8545671A577588327B757F331185F1B71E5CEBDF2BF582546E5C86F5AE929BEB47A1A347EA2D3665983032CCCE02C905E07CFF75F" }, { - "count": 1, "times": [ "2024-03-23T23:49:29.420774481+00:00" ], @@ -59,7 +52,6 @@ "ip_hash": "DDBA68E3F6476AFD49639227FF84A76B4DA0A8F236E308114186407349D6C799EB12CCF5BD9509765BD11A23B06A09635E37921FCB923B4D8FCB8BE982F59458" }, { - "count": 1, "times": [ "2024-03-24T00:28:32.666460208+00:00" ], @@ -67,7 +59,6 @@ "ip_hash": "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8" }, { - "count": 1, "times": [ "2024-03-23T17:52:37.318752807+00:00" ], @@ -75,7 +66,6 @@ "ip_hash": "DC6E1BEAA1CED685985E8B4BEF91B55EA0AFC48EB6FD6FCC7527D1A32D16AB807CADB7C95BADF3B408A11F07FDE08C6F9CA89758E18DC2E5852D35353016CC86" }, { - "count": 2, "times": [ "2024-03-23T20:29:06.704843460+00:00", "2024-03-23T23:21:17.970437006+00:00" @@ -84,7 +74,6 @@ "ip_hash": "A9464538D1B20CA644A7CC6D0E9C8ADCF59C79CBF1581C61451F794E1B4D3A5A9AF3DE6E9C4133865B7A1B29CAF3EF9A4AEEF0F4CABCCA41C54C0F2ADCCFBA5F" }, { - "count": 1, "times": [ "2024-03-24T05:33:06.776551190+00:00" ], @@ -92,7 +81,6 @@ "ip_hash": "AD3391E311EFBA83596CC7DF9CD96581F9382E27584B5A39B813AF634F8637B7602ADF725F768C37BC9184A375641CA61CD9D72E868997556799FBDF6E845878" }, { - "count": 1, "times": [ "2024-03-23T20:06:05.495202418+00:00" ], @@ -100,7 +88,6 @@ "ip_hash": "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8" }, { - "count": 1, "times": [ "2024-03-23T17:52:39.067812916+00:00" ], @@ -108,7 +95,6 @@ "ip_hash": "DC6E1BEAA1CED685985E8B4BEF91B55EA0AFC48EB6FD6FCC7527D1A32D16AB807CADB7C95BADF3B408A11F07FDE08C6F9CA89758E18DC2E5852D35353016CC86" }, { - "count": 1, "times": [ "2024-03-24T05:07:09.643633348+00:00" ], @@ -116,7 +102,6 @@ "ip_hash": "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8" }, { - "count": 1, "times": [ "2024-03-23T21:14:53.620892240+00:00" ], @@ -124,7 +109,6 @@ "ip_hash": "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8" }, { - "count": 1, "times": [ "2024-03-23T23:49:29.266306916+00:00" ], diff --git a/tests/stats/2024-03-25 b/tests/stats/2024-03-25 index 9bc0f11..41c733b 100644 --- a/tests/stats/2024-03-25 +++ b/tests/stats/2024-03-25 @@ -1,6 +1,5 @@ [ { - "count": 1, "times": [ "2024-03-25T04:12:44.736120969+00:00" ], diff --git a/tests/test_stats.rs b/tests/test_stats.rs index c655a08..783d19a 100644 --- a/tests/test_stats.rs +++ b/tests/test_stats.rs @@ -37,7 +37,7 @@ mod test_stats_graph #[test] fn test_text_graph() { - let hits: [u16; 24] = core::array::from_fn(|i| i as u16); + let hits: [usize; 24] = core::array::from_fn(|i| i); let graph = hits_by_hour_text_graph(hits, '-', 24); println!("{}", graph); assert_eq!(graph, GRAPH); @@ -51,7 +51,6 @@ mod test_stats_graph let mut hit = Hit { - count: 1, times: vec!["2024-03-25T04:12:44.736120969+00:00".to_string()], path: "/login.php/'%3E%3Csvg/onload=confirm%60xss%60%3E".to_owned(), ip_hash: "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8".to_owned() @@ -71,7 +70,6 @@ mod test_stats_graph hit = Hit { - count: 1, times: vec!["2024-03-24T04:12:44.736120969+00:00".to_string()], path: "/login.php/'%3E%3Csvg/onload=confirm%60xss%60%3E".to_owned(), ip_hash: "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8".to_owned() @@ -90,11 +88,11 @@ mod test_stats_graph assert_eq!(digest.total_hits, 21); assert_eq!(digest.hits_by_hour_utc, [1, 0, 0, 0, 2, 4, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 0, 3, 1, 0, 2, 1, 0, 0]); - assert_eq!(digest.top_hitters.first().unwrap(), &("75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8".to_string(), 6 as u16)); + assert_eq!(digest.top_hitters.first().unwrap(), &("75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8".to_string(), 6)); assert_eq!(digest.top_pages.first().unwrap(), &("/".to_string(), 5)); - assert!(digest.top_resources.contains(&("/login.php/'%3E%3Csvg/onload=confirm%60xss%60%3E".to_string(), 2 as u16))); - assert!(digest.top_resources.contains(&("https://jerboa.app/console.js".to_string(), 2 as u16))); - assert!(digest.top_resources.contains(&("/admin/.env".to_string(), 2 as u16))); + assert!(digest.top_resources.contains(&("/login.php/'%3E%3Csvg/onload=confirm%60xss%60%3E".to_string(), 2))); + assert!(digest.top_resources.contains(&("https://jerboa.app/console.js".to_string(), 2))); + assert!(digest.top_resources.contains(&("/admin/.env".to_string(), 2))); } #[test] @@ -128,7 +126,6 @@ mod test_stats_graph let hit = Hit { - count: 1, times: vec!["2024-03-24T04:12:44.736120969+00:00".to_string()], path: "/login.php/'%3E%3Csvg/onload=confirm%60xss%60%3E".to_owned(), ip_hash: "75A05052881EA1D68995532845978B4090012883F99354EFF67AD4E1ED5FF1833F4A2EC893181EAA00B94B9CD35E1E1DD581B7F80FEF2EFF45B75D529A080BD8".to_owned()