diff --git a/project-words.txt b/project-words.txt index a76aa985..289364ad 100644 --- a/project-words.txt +++ b/project-words.txt @@ -21,6 +21,7 @@ hexlify httpseeds imagoodboy imdl +indexmap infohash jsonwebtoken leechers diff --git a/src/app.rs b/src/app.rs index 3d1f818f..2322d139 100644 --- a/src/app.rs +++ b/src/app.rs @@ -11,11 +11,9 @@ use crate::bootstrap::logging; use crate::cache::image::manager::ImageCacheService; use crate::common::AppData; use crate::config::Configuration; -use crate::databases::database::connect_database; -use crate::mailer::MailerService; -use crate::routes; -use crate::tracker::service::Service; +use crate::databases::database; use crate::tracker::statistics_importer::StatisticsImporter; +use crate::{mailer, routes, tracker}; pub struct Running { pub api_server: Server, @@ -43,12 +41,12 @@ pub async fn run(configuration: Configuration) -> Running { // Build app dependencies - let database = Arc::new(connect_database(&database_connect_url).await.expect("Database error.")); + let database = Arc::new(database::connect(&database_connect_url).await.expect("Database error.")); let auth = Arc::new(AuthorizationService::new(cfg.clone(), database.clone())); - let tracker_service = Arc::new(Service::new(cfg.clone(), database.clone()).await); + let tracker_service = Arc::new(tracker::service::Service::new(cfg.clone(), database.clone()).await); let tracker_statistics_importer = Arc::new(StatisticsImporter::new(cfg.clone(), tracker_service.clone(), database.clone()).await); - let mailer_service = Arc::new(MailerService::new(cfg.clone()).await); + let mailer_service = Arc::new(mailer::Service::new(cfg.clone()).await); let image_cache_service = Arc::new(ImageCacheService::new(cfg.clone()).await); // Build app container @@ -92,7 +90,7 @@ pub async fn run(configuration: Configuration) -> Running { .wrap(Cors::permissive()) .app_data(web::Data::new(app_data.clone())) .wrap(middleware::Logger::default()) - .configure(routes::init_routes) + .configure(routes::init) }) .bind((ip, net_port)) .expect("can't bind server to socket address"); diff --git a/src/auth.rs b/src/auth.rs index 8c0f2c27..a8fbf76b 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -7,7 +7,7 @@ use crate::config::Configuration; use crate::databases::database::Database; use crate::errors::ServiceError; use crate::models::user::{UserClaims, UserCompact}; -use crate::utils::clock::current_time; +use crate::utils::clock; pub struct AuthorizationService { cfg: Arc, @@ -19,19 +19,25 @@ impl AuthorizationService { AuthorizationService { cfg, database } } + /// Create Json Web Token pub async fn sign_jwt(&self, user: UserCompact) -> String { let settings = self.cfg.settings.read().await; // create JWT that expires in two weeks let key = settings.auth.secret_key.as_bytes(); // TODO: create config option for setting the token validity in seconds - let exp_date = current_time() + 1_209_600; // two weeks from now + let exp_date = clock::now() + 1_209_600; // two weeks from now let claims = UserClaims { user, exp: exp_date }; - encode(&Header::default(), &claims, &EncodingKey::from_secret(key)).unwrap() + encode(&Header::default(), &claims, &EncodingKey::from_secret(key)).expect("argument `Header` should match `EncodingKey`") } + /// Verify Json Web Token + /// + /// # Errors + /// + /// This function will return an error if the JWT is not good or expired. pub async fn verify_jwt(&self, token: &str) -> Result { let settings = self.cfg.settings.read().await; @@ -41,7 +47,7 @@ impl AuthorizationService { &Validation::new(Algorithm::HS256), ) { Ok(token_data) => { - if token_data.claims.exp < current_time() { + if token_data.claims.exp < clock::now() { return Err(ServiceError::TokenExpired); } Ok(token_data.claims) @@ -50,12 +56,21 @@ impl AuthorizationService { } } + /// Get Claims from Request + /// + /// # Errors + /// + /// This function will return an `ServiceError::TokenNotFound` if `HeaderValue` is `None` + /// This function will pass through the `ServiceError::TokenInvalid` if unable to verify the JWT. pub async fn get_claims_from_request(&self, req: &HttpRequest) -> Result { - let _auth = req.headers().get("Authorization"); - match _auth { - Some(_) => { - let _split: Vec<&str> = _auth.unwrap().to_str().unwrap().split("Bearer").collect(); - let token = _split[1].trim(); + match req.headers().get("Authorization") { + Some(auth) => { + let split: Vec<&str> = auth + .to_str() + .expect("variable `auth` contains data that is not visible ASCII chars.") + .split("Bearer") + .collect(); + let token = split[1].trim(); match self.verify_jwt(token).await { Ok(claims) => Ok(claims), @@ -66,6 +81,11 @@ impl AuthorizationService { } } + /// Get User (in compact form) from Request + /// + /// # Errors + /// + /// This function will return an `ServiceError::UserNotFound` if unable to get user from database. pub async fn get_user_compact_from_request(&self, req: &HttpRequest) -> Result { let claims = self.get_claims_from_request(req).await?; diff --git a/src/bin/upgrade.rs b/src/bin/upgrade.rs index 874f0fad..8fb1ee0c 100644 --- a/src/bin/upgrade.rs +++ b/src/bin/upgrade.rs @@ -2,9 +2,9 @@ //! It updates the application from version v1.0.0 to v2.0.0. //! You can execute it with: `cargo run --bin upgrade ./data.db ./data_v2.db ./uploads` -use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::upgrader::run_upgrader; +use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::upgrader::run; #[actix_web::main] async fn main() { - run_upgrader().await; + run().await; } diff --git a/src/cache/cache.rs b/src/cache/cache.rs index 8573ba0d..ce842448 100644 --- a/src/cache/cache.rs +++ b/src/cache/cache.rs @@ -27,6 +27,7 @@ pub struct BytesCache { } impl BytesCache { + #[must_use] pub fn new() -> Self { Self { bytes_table: IndexMap::new(), @@ -36,6 +37,7 @@ impl BytesCache { } // With a total capacity in bytes. + #[must_use] pub fn with_capacity(capacity: usize) -> Self { let mut new = Self::new(); @@ -45,6 +47,7 @@ impl BytesCache { } // With a limit for individual entry sizes. + #[must_use] pub fn with_entry_size_limit(entry_size_limit: usize) -> Self { let mut new = Self::new(); @@ -77,6 +80,7 @@ impl BytesCache { } // Size of all the entry bytes combined. + #[must_use] pub fn total_size(&self) -> usize { let mut size: usize = 0; diff --git a/src/cache/image/manager.rs b/src/cache/image/manager.rs index 8a6960a1..9e8d814c 100644 --- a/src/cache/image/manager.rs +++ b/src/cache/image/manager.rs @@ -5,7 +5,7 @@ use std::time::{Duration, SystemTime}; use bytes::Bytes; use tokio::sync::RwLock; -use crate::cache::cache::BytesCache; +use crate::cache::BytesCache; use crate::config::Configuration; use crate::models::user::UserCompact; @@ -19,11 +19,12 @@ pub enum Error { type UserQuotas = HashMap; +#[must_use] pub fn now_in_secs() -> u64 { - match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) { - Ok(n) => n.as_secs(), - Err(_) => panic!("SystemTime before UNIX EPOCH!"), - } + SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("SystemTime before UNIX EPOCH!") + .as_secs() } #[derive(Clone)] @@ -36,6 +37,7 @@ pub struct ImageCacheQuota { } impl ImageCacheQuota { + #[must_use] pub fn new(user_id: i64, max_usage: usize, period_secs: u64) -> Self { Self { user_id, @@ -46,14 +48,19 @@ impl ImageCacheQuota { } } - pub fn add_usage(&mut self, amount: usize) -> Result<(), ()> { + /// Add Usage Quota + /// + /// # Errors + /// + /// This function will return a `Error::UserQuotaMet` if user quota has been met. + pub fn add_usage(&mut self, amount: usize) -> Result<(), Error> { // Check if quota needs to be reset. if now_in_secs() - self.date_start_secs > self.period_secs { self.reset(); } if self.is_reached() { - return Err(()); + return Err(Error::UserQuotaMet); } self.usage = self.usage.saturating_add(amount); @@ -66,6 +73,7 @@ impl ImageCacheQuota { self.date_start_secs = now_in_secs(); } + #[must_use] pub fn is_reached(&self) -> bool { self.usage >= self.max_usage } @@ -89,7 +97,7 @@ impl ImageCacheService { let reqwest_client = reqwest::Client::builder() .timeout(Duration::from_millis(settings.image_cache.max_request_timeout_ms)) .build() - .unwrap(); + .expect("unable to build client request"); drop(settings); @@ -103,33 +111,37 @@ impl ImageCacheService { /// Get an image from the url and insert it into the cache if it isn't cached already. /// Unauthenticated users can only get already cached images. + /// + /// # Errors + /// + /// Return a `Error::Unauthenticated` if the user has not been authenticated. pub async fn get_image_by_url(&self, url: &str, opt_user: Option) -> Result { if let Some(entry) = self.image_cache.read().await.get(url).await { return Ok(entry.bytes); } - if opt_user.is_none() { - return Err(Error::Unauthenticated); - } - - let user = opt_user.unwrap(); + match opt_user { + None => Err(Error::Unauthenticated), - self.check_user_quota(&user).await?; + Some(user) => { + self.check_user_quota(&user).await?; - let image_bytes = self.get_image_from_url_as_bytes(url).await?; + let image_bytes = self.get_image_from_url_as_bytes(url).await?; - self.check_image_size(&image_bytes).await?; + self.check_image_size(&image_bytes).await?; - // These two functions could be executed after returning the image to the client, - // but than we would need a dedicated task or thread that executes these functions. - // This can be problematic if a task is spawned after every user request. - // Since these functions execute very fast, I don't see a reason to further optimize this. - // For now. - self.update_image_cache(url, &image_bytes).await?; + // These two functions could be executed after returning the image to the client, + // but than we would need a dedicated task or thread that executes these functions. + // This can be problematic if a task is spawned after every user request. + // Since these functions execute very fast, I don't see a reason to further optimize this. + // For now. + self.update_image_cache(url, &image_bytes).await?; - self.update_user_quota(&user, image_bytes.len()).await?; + self.update_user_quota(&user, image_bytes.len()).await?; - Ok(image_bytes) + Ok(image_bytes) + } + } } async fn get_image_from_url_as_bytes(&self, url: &str) -> Result { diff --git a/src/cache/mod.rs b/src/cache/mod.rs index 3afdefbc..1696cdb8 100644 --- a/src/cache/mod.rs +++ b/src/cache/mod.rs @@ -1,2 +1,222 @@ -pub mod cache; pub mod image; + +use bytes::Bytes; +use indexmap::IndexMap; + +#[derive(Debug)] +pub enum Error { + EntrySizeLimitExceedsTotalCapacity, + BytesExceedEntrySizeLimit, + CacheCapacityIsTooSmall, +} + +#[derive(Debug, Clone)] +pub struct BytesCacheEntry { + pub bytes: Bytes, +} + +// Individual entry destined for the byte cache. +impl BytesCacheEntry { + pub fn new(bytes: Bytes) -> Self { + Self { bytes } + } +} +#[allow(clippy::module_name_repetitions)] +pub struct BytesCache { + bytes_table: IndexMap, + total_capacity: usize, + entry_size_limit: usize, +} + +impl BytesCache { + #[must_use] + pub fn new() -> Self { + Self { + bytes_table: IndexMap::new(), + total_capacity: 0, + entry_size_limit: 0, + } + } + + // With a total capacity in bytes. + #[must_use] + pub fn with_capacity(capacity: usize) -> Self { + let mut new = Self::new(); + + new.total_capacity = capacity; + + new + } + + // With a limit for individual entry sizes. + #[must_use] + pub fn with_entry_size_limit(entry_size_limit: usize) -> Self { + let mut new = Self::new(); + + new.entry_size_limit = entry_size_limit; + + new + } + + /// Helper to create a new bytes cache with both an individual entry and size limit. + /// + /// # Errors + /// + /// This function will return `Error::EntrySizeLimitExceedsTotalCapacity` if the specified size is too large. + /// + pub fn with_capacity_and_entry_size_limit(capacity: usize, entry_size_limit: usize) -> Result { + if entry_size_limit > capacity { + return Err(Error::EntrySizeLimitExceedsTotalCapacity); + } + + let mut new = Self::new(); + + new.total_capacity = capacity; + new.entry_size_limit = entry_size_limit; + + Ok(new) + } + + #[allow(clippy::unused_async)] + pub async fn get(&self, key: &str) -> Option { + self.bytes_table.get(key).cloned() + } + + // Return the amount of entries in the map. + #[allow(clippy::unused_async)] + pub async fn len(&self) -> usize { + self.bytes_table.len() + } + + #[allow(clippy::unused_async)] + pub async fn is_empty(&self) -> bool { + self.bytes_table.is_empty() + } + + // Size of all the entry bytes combined. + #[must_use] + pub fn total_size(&self) -> usize { + let mut size: usize = 0; + + for (_, entry) in self.bytes_table.iter() { + size += entry.bytes.len(); + } + + size + } + + /// Adds a image to the cache. + /// + /// # Errors + /// + /// This function will return an error if there is not enough free size. + /// + // Insert bytes using key. + // TODO: Freed space might need to be reserved. Hold and pass write lock between functions? + // For TO DO above: semaphore: Arc, might be a solution. + #[allow(clippy::unused_async)] + pub async fn set(&mut self, key: String, bytes: Bytes) -> Result, Error> { + if bytes.len() > self.entry_size_limit { + return Err(Error::BytesExceedEntrySizeLimit); + } + + // Remove the old entry so that a new entry will be added as last in the queue. + let _ = self.bytes_table.shift_remove(&key); + + let bytes_cache_entry = BytesCacheEntry::new(bytes); + + self.free_size(bytes_cache_entry.bytes.len())?; + + Ok(self.bytes_table.insert(key, bytes_cache_entry)) + } + + // Free space. Size amount in bytes. + fn free_size(&mut self, size: usize) -> Result<(), Error> { + // Size may not exceed the total capacity of the bytes cache. + if size > self.total_capacity { + return Err(Error::CacheCapacityIsTooSmall); + } + + let cache_size = self.total_size(); + let size_to_be_freed = size.saturating_sub(self.total_capacity - cache_size); + let mut size_freed: usize = 0; + + while size_freed < size_to_be_freed { + let oldest_entry = self + .pop() + .expect("bytes cache has no more entries, yet there isn't enough space."); + + size_freed += oldest_entry.bytes.len(); + } + + Ok(()) + } + + // Remove and return the oldest entry. + pub fn pop(&mut self) -> Option { + self.bytes_table.shift_remove_index(0).map(|(_, entry)| entry) + } +} + +impl Default for BytesCache { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use bytes::Bytes; + + use crate::cache::BytesCache; + + #[tokio::test] + async fn set_bytes_cache_with_capacity_and_entry_size_limit_should_succeed() { + let mut bytes_cache = BytesCache::with_capacity_and_entry_size_limit(6, 6).unwrap(); + let bytes: Bytes = Bytes::from("abcdef"); + + assert!(bytes_cache.set("1".to_string(), bytes).await.is_ok()); + } + + #[tokio::test] + async fn given_a_bytes_cache_with_a_capacity_and_entry_size_limit_it_should_allow_adding_new_entries_if_the_limit_is_not_exceeded( + ) { + let bytes: Bytes = Bytes::from("abcdef"); + + let mut bytes_cache = BytesCache::with_capacity_and_entry_size_limit(bytes.len() * 2, bytes.len()).unwrap(); + + // Add first entry (6 bytes) + assert!(bytes_cache.set("key1".to_string(), bytes.clone()).await.is_ok()); + + // Add second entry (6 bytes) + assert!(bytes_cache.set("key2".to_string(), bytes).await.is_ok()); + + // Both entries were added because we did not reach the limit + assert_eq!(bytes_cache.len().await, 2); + } + + #[tokio::test] + async fn given_a_bytes_cache_with_a_capacity_and_entry_size_limit_it_should_not_allow_adding_new_entries_if_the_capacity_is_exceeded( + ) { + let bytes: Bytes = Bytes::from("abcdef"); + + let mut bytes_cache = BytesCache::with_capacity_and_entry_size_limit(bytes.len() * 2 - 1, bytes.len()).unwrap(); + + // Add first entry (6 bytes) + assert!(bytes_cache.set("key1".to_string(), bytes.clone()).await.is_ok()); + + // Add second entry (6 bytes) + assert!(bytes_cache.set("key2".to_string(), bytes).await.is_ok()); + + // Only one entry is in the cache, because otherwise the total capacity would have been exceeded + assert_eq!(bytes_cache.len().await, 1); + } + + #[tokio::test] + async fn set_bytes_cache_with_capacity_and_entry_size_limit_should_fail() { + let mut bytes_cache = BytesCache::with_capacity_and_entry_size_limit(6, 5).unwrap(); + let bytes: Bytes = Bytes::from("abcdef"); + + assert!(bytes_cache.set("1".to_string(), bytes).await.is_err()); + } +} diff --git a/src/common.rs b/src/common.rs index 25759f71..51861fae 100644 --- a/src/common.rs +++ b/src/common.rs @@ -4,10 +4,8 @@ use crate::auth::AuthorizationService; use crate::cache::image::manager::ImageCacheService; use crate::config::Configuration; use crate::databases::database::Database; -use crate::mailer::MailerService; -use crate::tracker::service::Service; use crate::tracker::statistics_importer::StatisticsImporter; - +use crate::{mailer, tracker}; pub type Username = String; pub type WebAppData = actix_web::web::Data>; @@ -16,9 +14,9 @@ pub struct AppData { pub cfg: Arc, pub database: Arc>, pub auth: Arc, - pub tracker_service: Arc, + pub tracker_service: Arc, pub tracker_statistics_importer: Arc, - pub mailer: Arc, + pub mailer: Arc, pub image_cache_manager: Arc, } @@ -27,9 +25,9 @@ impl AppData { cfg: Arc, database: Arc>, auth: Arc, - tracker_service: Arc, + tracker_service: Arc, tracker_statistics_importer: Arc, - mailer: Arc, + mailer: Arc, image_cache_manager: Arc, ) -> AppData { AppData { diff --git a/src/config.rs b/src/config.rs index 24fa6201..5a95ff2f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -11,6 +11,14 @@ pub struct Website { pub name: String, } +impl Default for Website { + fn default() -> Self { + Self { + name: "Torrust".to_string(), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub enum TrackerMode { // todo: use https://crates.io/crates/torrust-tracker-primitives @@ -20,6 +28,12 @@ pub enum TrackerMode { PrivateWhitelisted, } +impl Default for TrackerMode { + fn default() -> Self { + Self::Public + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Tracker { pub url: String, @@ -29,6 +43,18 @@ pub struct Tracker { pub token_valid_seconds: u64, } +impl Default for Tracker { + fn default() -> Self { + Self { + url: "udp://localhost:6969".to_string(), + mode: TrackerMode::default(), + api_url: "http://localhost:1212".to_string(), + token: "MyAccessToken".to_string(), + token_valid_seconds: 7_257_600, + } + } +} + /// Port 0 means that the OS will choose a random free port. pub const FREE_PORT: u16 = 0; @@ -38,6 +64,15 @@ pub struct Network { pub base_url: Option, } +impl Default for Network { + fn default() -> Self { + Self { + port: 3000, + base_url: None, + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub enum EmailOnSignup { Required, @@ -45,6 +80,12 @@ pub enum EmailOnSignup { None, } +impl Default for EmailOnSignup { + fn default() -> Self { + Self::Optional + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Auth { pub email_on_signup: EmailOnSignup, @@ -53,12 +94,32 @@ pub struct Auth { pub secret_key: String, } +impl Default for Auth { + fn default() -> Self { + Self { + email_on_signup: EmailOnSignup::default(), + min_password_length: 6, + max_password_length: 64, + secret_key: "MaxVerstappenWC2021".to_string(), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Database { pub connect_url: String, pub torrent_info_update_interval: u64, } +impl Default for Database { + fn default() -> Self { + Self { + connect_url: "sqlite://data.db?mode=rwc".to_string(), + torrent_info_update_interval: 3600, + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Mail { pub email_verification_enabled: bool, @@ -70,6 +131,21 @@ pub struct Mail { pub port: u16, } +impl Default for Mail { + fn default() -> Self { + Self { + email_verification_enabled: false, + from: "example@email.com".to_string(), + reply_to: "noreply@email.com".to_string(), + username: String::default(), + password: String::default(), + server: String::default(), + port: 25, + } + } +} + +#[allow(clippy::module_name_repetitions)] #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ImageCache { pub max_request_timeout_ms: u64, @@ -85,8 +161,29 @@ pub struct Api { pub max_torrent_page_size: u8, } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AppConfiguration { +impl Default for Api { + fn default() -> Self { + Self { + default_torrent_page_size: 10, + max_torrent_page_size: 30, + } + } +} + +impl Default for ImageCache { + fn default() -> Self { + Self { + max_request_timeout_ms: 1000, + capacity: 128_000_000, + entry_size_limit: 4_000_000, + user_quota_period_seconds: 3600, + user_quota_bytes: 64_000_000, + } + } +} + +#[derive(Debug, Default, Clone, Serialize, Deserialize)] +pub struct TorrustBackend { pub website: Website, pub tracker: Tracker, pub net: Network, @@ -97,67 +194,16 @@ pub struct AppConfiguration { pub api: Api, } -impl Default for AppConfiguration { - fn default() -> Self { - Self { - website: Website { - name: "Torrust".to_string(), - }, - tracker: Tracker { - url: "udp://localhost:6969".to_string(), - mode: TrackerMode::Public, - api_url: "http://localhost:1212".to_string(), - token: "MyAccessToken".to_string(), - token_valid_seconds: 7_257_600, - }, - net: Network { - port: 3000, - base_url: None, - }, - auth: Auth { - email_on_signup: EmailOnSignup::Optional, - min_password_length: 6, - max_password_length: 64, - secret_key: "MaxVerstappenWC2021".to_string(), - }, - database: Database { - connect_url: "sqlite://data.db?mode=rwc".to_string(), - torrent_info_update_interval: 3600, - }, - mail: Mail { - email_verification_enabled: false, - from: "example@email.com".to_string(), - reply_to: "noreply@email.com".to_string(), - username: String::new(), - password: String::new(), - server: String::new(), - port: 25, - }, - image_cache: ImageCache { - max_request_timeout_ms: 1000, - capacity: 128_000_000, - entry_size_limit: 4_000_000, - user_quota_period_seconds: 3600, - user_quota_bytes: 64_000_000, - }, - api: Api { - default_torrent_page_size: 10, - max_torrent_page_size: 30, - }, - } - } -} - #[derive(Debug)] pub struct Configuration { - pub settings: RwLock, + pub settings: RwLock, pub config_path: Option, } impl Default for Configuration { - fn default() -> Self { - Self { - settings: RwLock::new(AppConfiguration::default()), + fn default() -> Configuration { + Configuration { + settings: RwLock::new(TorrustBackend::default()), config_path: None, } } @@ -165,6 +211,11 @@ impl Default for Configuration { impl Configuration { /// Loads the configuration from the configuration file. + /// + /// # Errors + /// + /// This function will return an error no configuration in the `CONFIG_PATH` exists, and a new file is is created. + /// This function will return an error if the `config` is not a valid `TorrustConfig` document. pub async fn load_from_file(config_path: &str) -> Result { let config_builder = Config::builder(); @@ -183,7 +234,7 @@ impl Configuration { )); } - let torrust_config: AppConfiguration = match config.try_deserialize() { + let torrust_config: TorrustBackend = match config.try_deserialize() { Ok(data) => Ok(data), Err(e) => Err(ConfigError::Message(format!("Errors while processing config: {}.", e))), }?; @@ -207,7 +258,7 @@ impl Configuration { let config_builder = Config::builder() .add_source(File::from_str(&config_toml, FileFormat::Toml)) .build()?; - let torrust_config: AppConfiguration = config_builder.try_deserialize()?; + let torrust_config: TorrustBackend = config_builder.try_deserialize()?; Ok(Configuration { settings: RwLock::new(torrust_config), config_path: None, @@ -219,6 +270,7 @@ impl Configuration { } } + /// Returns the save to file of this [`Configuration`]. pub async fn save_to_file(&self, config_path: &str) { let settings = self.settings.read().await; @@ -229,13 +281,17 @@ impl Configuration { fs::write(config_path, toml_string).expect("Could not write to file!"); } - /// Updates the settings and saves them to the configuration file. + /// Update the settings file based upon a supplied `new_settings`. + /// + /// # Errors + /// + /// Todo: Make an error if the save fails. /// /// # Panics /// /// Will panic if the configuration file path is not defined. That happens /// when the configuration was loaded from the environment variable. - pub async fn update_settings(&self, new_settings: AppConfiguration) { + pub async fn update_settings(&self, new_settings: TorrustBackend) -> Result<(), ()> { match &self.config_path { Some(config_path) => { let mut settings = self.settings.write().await; @@ -244,6 +300,8 @@ impl Configuration { drop(settings); let _ = self.save_to_file(config_path).await; + + Ok(()) } None => panic!( "Cannot update settings when the config file path is not defined. For example: when it's loaded from env var." diff --git a/src/console/commands/import_tracker_statistics.rs b/src/console/commands/import_tracker_statistics.rs index 61206449..a322eef2 100644 --- a/src/console/commands/import_tracker_statistics.rs +++ b/src/console/commands/import_tracker_statistics.rs @@ -4,11 +4,11 @@ use std::env; use std::sync::Arc; use derive_more::{Display, Error}; -use text_colorizer::*; +use text_colorizer::Colorize; use crate::bootstrap::config::init_configuration; use crate::bootstrap::logging; -use crate::databases::database::connect_database; +use crate::databases::database; use crate::tracker::service::Service; use crate::tracker::statistics_importer::StatisticsImporter; @@ -53,9 +53,14 @@ fn print_usage() { } pub async fn run_importer() { - import(&parse_args().unwrap()).await; + import(&parse_args().expect("unable to parse command arguments")).await; } +/// Import Command Arguments +/// +/// # Panics +/// +/// Panics if `Configuration::load_from_file` has any error. pub async fn import(_args: &Arguments) { println!("Importing statistics from linked tracker ..."); @@ -72,14 +77,17 @@ pub async fn import(_args: &Arguments) { eprintln!("Tracker url: {}", tracker_url.green()); let database = Arc::new( - connect_database(&settings.database.connect_url) + database::connect(&settings.database.connect_url) .await - .expect("Database error."), + .expect("unable to connect to db"), ); let tracker_service = Arc::new(Service::new(cfg.clone(), database.clone()).await); let tracker_statistics_importer = Arc::new(StatisticsImporter::new(cfg.clone(), tracker_service.clone(), database.clone()).await); - tracker_statistics_importer.import_all_torrents_statistics().await.unwrap(); + tracker_statistics_importer + .import_all_torrents_statistics() + .await + .expect("variable `tracker_service` is unable to `update_torrents`"); } diff --git a/src/databases/database.rs b/src/databases/database.rs index 915b52ce..ccbd4bf6 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -2,8 +2,8 @@ use async_trait::async_trait; use chrono::NaiveDateTime; use serde::{Deserialize, Serialize}; -use crate::databases::mysql::MysqlDatabase; -use crate::databases::sqlite::SqliteDatabase; +use crate::databases::mysql::Mysql; +use crate::databases::sqlite::Sqlite; use crate::models::info_hash::InfoHash; use crate::models::response::TorrentsResponse; use crate::models::torrent::TorrentListing; @@ -13,7 +13,7 @@ use crate::models::user::{User, UserAuthentication, UserCompact, UserProfile}; /// Database drivers. #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] -pub enum DatabaseDriver { +pub enum Driver { Sqlite3, Mysql, } @@ -50,7 +50,7 @@ pub enum Sorting { /// Database errors. #[derive(Debug)] -pub enum DatabaseError { +pub enum Error { Error, UnrecognizedDatabaseDriver, // when the db path does not start with sqlite or mysql UsernameTaken, @@ -63,77 +63,93 @@ pub enum DatabaseError { TorrentTitleAlreadyExists, } -/// Connect to a database. -pub async fn connect_database(db_path: &str) -> Result, DatabaseError> { +/// Get the Driver of the Database from the Connection String +/// +/// # Errors +/// +/// This function will return an `Error::UnrecognizedDatabaseDriver` if unable to match database type. +pub fn get_driver(db_path: &str) -> Result { match &db_path.chars().collect::>() as &[char] { - ['s', 'q', 'l', 'i', 't', 'e', ..] => { - let db = SqliteDatabase::new(db_path).await; - Ok(Box::new(db)) - } - ['m', 'y', 's', 'q', 'l', ..] => { - let db = MysqlDatabase::new(db_path).await; - Ok(Box::new(db)) - } - _ => Err(DatabaseError::UnrecognizedDatabaseDriver), + ['s', 'q', 'l', 'i', 't', 'e', ..] => Ok(Driver::Sqlite3), + ['m', 'y', 's', 'q', 'l', ..] => Ok(Driver::Mysql), + _ => Err(Error::UnrecognizedDatabaseDriver), } } +/// Connect to a database. +/// +/// # Errors +/// +/// This function will return an `Error::UnrecognizedDatabaseDriver` if unable to match database type. +pub async fn connect(db_path: &str) -> Result, Error> { + let db_driver = self::get_driver(db_path)?; + + Ok(match db_driver { + self::Driver::Sqlite3 => Box::new(Sqlite::new(db_path).await), + self::Driver::Mysql => Box::new(Mysql::new(db_path).await), + }) +} + /// Trait for database implementations. #[async_trait] pub trait Database: Sync + Send { /// Return current database driver. - fn get_database_driver(&self) -> DatabaseDriver; + fn get_database_driver(&self) -> Driver; + + async fn new(db_path: &str) -> Self + where + Self: Sized; /// Add new user and return the newly inserted `user_id`. - async fn insert_user_and_get_id(&self, username: &str, email: &str, password: &str) -> Result; + async fn insert_user_and_get_id(&self, username: &str, email: &str, password: &str) -> Result; /// Get `User` from `user_id`. - async fn get_user_from_id(&self, user_id: i64) -> Result; + async fn get_user_from_id(&self, user_id: i64) -> Result; /// Get `UserAuthentication` from `user_id`. - async fn get_user_authentication_from_id(&self, user_id: i64) -> Result; + async fn get_user_authentication_from_id(&self, user_id: i64) -> Result; /// Get `UserProfile` from `username`. - async fn get_user_profile_from_username(&self, username: &str) -> Result; + async fn get_user_profile_from_username(&self, username: &str) -> Result; /// Get `UserCompact` from `user_id`. - async fn get_user_compact_from_id(&self, user_id: i64) -> Result; + async fn get_user_compact_from_id(&self, user_id: i64) -> Result; /// Get a user's `TrackerKey`. async fn get_user_tracker_key(&self, user_id: i64) -> Option; /// Get total user count. - async fn count_users(&self) -> Result; + async fn count_users(&self) -> Result; /// Ban user with `user_id`, `reason` and `date_expiry`. - async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), DatabaseError>; + async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), Error>; /// Grant a user the administrator role. - async fn grant_admin_role(&self, user_id: i64) -> Result<(), DatabaseError>; + async fn grant_admin_role(&self, user_id: i64) -> Result<(), Error>; /// Verify a user's email with `user_id`. - async fn verify_email(&self, user_id: i64) -> Result<(), DatabaseError>; + async fn verify_email(&self, user_id: i64) -> Result<(), Error>; /// Link a `TrackerKey` to a certain user with `user_id`. - async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), DatabaseError>; + async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), Error>; /// Delete user and all related user data with `user_id`. - async fn delete_user(&self, user_id: i64) -> Result<(), DatabaseError>; + async fn delete_user(&self, user_id: i64) -> Result<(), Error>; /// Add a new category and return `category_id`. - async fn insert_category_and_get_id(&self, category_name: &str) -> Result; + async fn insert_category_and_get_id(&self, category_name: &str) -> Result; /// Get `Category` from `category_id`. - async fn get_category_from_id(&self, category_id: i64) -> Result; + async fn get_category_from_id(&self, category_id: i64) -> Result; /// Get `Category` from `category_name`. - async fn get_category_from_name(&self, category_name: &str) -> Result; + async fn get_category_from_name(&self, category_name: &str) -> Result; /// Get all categories as `Vec`. - async fn get_categories(&self) -> Result, DatabaseError>; + async fn get_categories(&self) -> Result, Error>; /// Delete category with `category_name`. - async fn delete_category(&self, category_name: &str) -> Result<(), DatabaseError>; + async fn delete_category(&self, category_name: &str) -> Result<(), Error>; /// Get results of a torrent search in a paginated and sorted form as `TorrentsResponse` from `search`, `categories`, `sort`, `offset` and `page_size`. async fn get_torrents_search_sorted_paginated( @@ -143,7 +159,7 @@ pub trait Database: Sync + Send { sort: &Sorting, offset: u64, page_size: u8, - ) -> Result; + ) -> Result; /// Add new torrent and return the newly inserted `torrent_id` with `torrent`, `uploader_id`, `category_id`, `title` and `description`. async fn insert_torrent_and_get_id( @@ -153,10 +169,10 @@ pub trait Database: Sync + Send { category_id: i64, title: &str, description: &str, - ) -> Result; + ) -> Result; /// Get `Torrent` from `InfoHash`. - async fn get_torrent_from_infohash(&self, infohash: &InfoHash) -> Result { + async fn get_torrent_from_infohash(&self, infohash: &InfoHash) -> Result { let torrent_info = self.get_torrent_info_from_infohash(infohash).await?; let torrent_files = self.get_torrent_files_from_id(torrent_info.torrent_id).await?; @@ -171,7 +187,7 @@ pub trait Database: Sync + Send { } /// Get `Torrent` from `torrent_id`. - async fn get_torrent_from_id(&self, torrent_id: i64) -> Result { + async fn get_torrent_from_id(&self, torrent_id: i64) -> Result { let torrent_info = self.get_torrent_info_from_id(torrent_id).await?; let torrent_files = self.get_torrent_files_from_id(torrent_id).await?; @@ -186,44 +202,38 @@ pub trait Database: Sync + Send { } /// Get torrent's info as `DbTorrentInfo` from `torrent_id`. - async fn get_torrent_info_from_id(&self, torrent_id: i64) -> Result; + async fn get_torrent_info_from_id(&self, torrent_id: i64) -> Result; /// Get torrent's info as `DbTorrentInfo` from torrent `InfoHash`. - async fn get_torrent_info_from_infohash(&self, info_hash: &InfoHash) -> Result; + async fn get_torrent_info_from_infohash(&self, info_hash: &InfoHash) -> Result; /// Get all torrent's files as `Vec` from `torrent_id`. - async fn get_torrent_files_from_id(&self, torrent_id: i64) -> Result, DatabaseError>; + async fn get_torrent_files_from_id(&self, torrent_id: i64) -> Result, Error>; /// Get all torrent's announce urls as `Vec>` from `torrent_id`. - async fn get_torrent_announce_urls_from_id(&self, torrent_id: i64) -> Result>, DatabaseError>; + async fn get_torrent_announce_urls_from_id(&self, torrent_id: i64) -> Result>, Error>; /// Get `TorrentListing` from `torrent_id`. - async fn get_torrent_listing_from_id(&self, torrent_id: i64) -> Result; + async fn get_torrent_listing_from_id(&self, torrent_id: i64) -> Result; /// Get `TorrentListing` from `InfoHash`. - async fn get_torrent_listing_from_infohash(&self, infohash: &InfoHash) -> Result; + async fn get_torrent_listing_from_infohash(&self, infohash: &InfoHash) -> Result; /// Get all torrents as `Vec`. - async fn get_all_torrents_compact(&self) -> Result, DatabaseError>; + async fn get_all_torrents_compact(&self) -> Result, Error>; /// Update a torrent's title with `torrent_id` and `title`. - async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), DatabaseError>; + async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), Error>; /// Update a torrent's description with `torrent_id` and `description`. - async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), DatabaseError>; + async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), Error>; /// Update the seeders and leechers info for a torrent with `torrent_id`, `tracker_url`, `seeders` and `leechers`. - async fn update_tracker_info( - &self, - torrent_id: i64, - tracker_url: &str, - seeders: i64, - leechers: i64, - ) -> Result<(), DatabaseError>; + async fn update_tracker_info(&self, torrent_id: i64, tracker_url: &str, seeders: i64, leechers: i64) -> Result<(), Error>; /// Delete a torrent with `torrent_id`. - async fn delete_torrent(&self, torrent_id: i64) -> Result<(), DatabaseError>; + async fn delete_torrent(&self, torrent_id: i64) -> Result<(), Error>; /// DELETES ALL DATABASE ROWS, ONLY CALL THIS IF YOU KNOW WHAT YOU'RE DOING! - async fn delete_all_database_rows(&self) -> Result<(), DatabaseError>; + async fn delete_all_database_rows(&self) -> Result<(), Error>; } diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index 97f699a7..029ec7c1 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -3,22 +3,28 @@ use chrono::NaiveDateTime; use sqlx::mysql::MySqlPoolOptions; use sqlx::{query, query_as, Acquire, MySqlPool}; -use crate::databases::database::{Category, Database, DatabaseDriver, DatabaseError, Sorting, TorrentCompact}; +use crate::databases::database; +use crate::databases::database::{Category, Database, Driver, Sorting, TorrentCompact}; use crate::models::info_hash::InfoHash; use crate::models::response::TorrentsResponse; use crate::models::torrent::TorrentListing; use crate::models::torrent_file::{DbTorrentAnnounceUrl, DbTorrentFile, DbTorrentInfo, Torrent, TorrentFile}; use crate::models::tracker_key::TrackerKey; use crate::models::user::{User, UserAuthentication, UserCompact, UserProfile}; -use crate::utils::clock::current_time; -use crate::utils::hex::bytes_to_hex; +use crate::utils::clock; +use crate::utils::hex::from_bytes; -pub struct MysqlDatabase { +pub struct Mysql { pub pool: MySqlPool, } -impl MysqlDatabase { - pub async fn new(database_url: &str) -> Self { +#[async_trait] +impl Database for Mysql { + fn get_database_driver(&self) -> Driver { + Driver::Mysql + } + + async fn new(database_url: &str) -> Self { let db = MySqlPoolOptions::new() .connect(database_url) .await @@ -31,27 +37,20 @@ impl MysqlDatabase { Self { pool: db } } -} -#[async_trait] -impl Database for MysqlDatabase { - fn get_database_driver(&self) -> DatabaseDriver { - DatabaseDriver::Mysql - } - - async fn insert_user_and_get_id(&self, username: &str, email: &str, password_hash: &str) -> Result { + async fn insert_user_and_get_id(&self, username: &str, email: &str, password_hash: &str) -> Result { // open pool connection - let mut conn = self.pool.acquire().await.map_err(|_| DatabaseError::Error)?; + let mut conn = self.pool.acquire().await.map_err(|_| database::Error::Error)?; // start db transaction - let mut tx = conn.begin().await.map_err(|_| DatabaseError::Error)?; + let mut tx = conn.begin().await.map_err(|_| database::Error::Error)?; // create the user account and get the user id let user_id = query("INSERT INTO torrust_users (date_registered) VALUES (UTC_TIMESTAMP())") .execute(&mut tx) .await .map(|v| v.last_insert_id()) - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; // add password hash for account let insert_user_auth_result = query("INSERT INTO torrust_user_authentication (user_id, password_hash) VALUES (?, ?)") @@ -59,7 +58,7 @@ impl Database for MysqlDatabase { .bind(password_hash) .execute(&mut tx) .await - .map_err(|_| DatabaseError::Error); + .map_err(|_| database::Error::Error); // rollback transaction on error if let Err(e) = insert_user_auth_result { @@ -77,21 +76,21 @@ impl Database for MysqlDatabase { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("username") { - DatabaseError::UsernameTaken + database::Error::UsernameTaken } else if err.message().contains("email") { - DatabaseError::EmailTaken + database::Error::EmailTaken } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error + _ => database::Error::Error }); // commit or rollback transaction and return user_id on success match insert_user_profile_result { Ok(_) => { let _ = tx.commit().await; - Ok(user_id as i64) + Ok(i64::overflowing_add_unsigned(0, user_id).0) } Err(e) => { let _ = tx.rollback().await; @@ -100,43 +99,48 @@ impl Database for MysqlDatabase { } } - async fn get_user_from_id(&self, user_id: i64) -> Result { + async fn get_user_from_id(&self, user_id: i64) -> Result { query_as::<_, User>("SELECT * FROM torrust_users WHERE user_id = ?") .bind(user_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } - async fn get_user_authentication_from_id(&self, user_id: i64) -> Result { + async fn get_user_authentication_from_id(&self, user_id: i64) -> Result { query_as::<_, UserAuthentication>("SELECT * FROM torrust_user_authentication WHERE user_id = ?") .bind(user_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } - async fn get_user_profile_from_username(&self, username: &str) -> Result { + async fn get_user_profile_from_username(&self, username: &str) -> Result { query_as::<_, UserProfile>(r#"SELECT user_id, username, COALESCE(email, "") as email, email_verified, COALESCE(bio, "") as bio, COALESCE(avatar, "") as avatar FROM torrust_user_profiles WHERE username = ?"#) .bind(username) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } - async fn get_user_compact_from_id(&self, user_id: i64) -> Result { + async fn get_user_compact_from_id(&self, user_id: i64) -> Result { query_as::<_, UserCompact>("SELECT tu.user_id, tp.username, tu.administrator FROM torrust_users tu INNER JOIN torrust_user_profiles tp ON tu.user_id = tp.user_id WHERE tu.user_id = ?") .bind(user_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } + /// Gets User Tracker Key + /// + /// # Panics + /// + /// Will panic if the input time overflows the `u64` seconds overflows the `i64` type. + /// (this will naturally happen in 292.5 billion years) async fn get_user_tracker_key(&self, user_id: i64) -> Option { const HOUR_IN_SECONDS: i64 = 3600; - // casting current_time() to i64 will overflow in the year 2262 - let current_time_plus_hour = (current_time() as i64) + HOUR_IN_SECONDS; + let current_time_plus_hour = i64::try_from(clock::now()).unwrap().saturating_add(HOUR_IN_SECONDS); // get tracker key that is valid for at least one hour from now query_as::<_, TrackerKey>("SELECT tracker_key AS 'key', date_expiry AS valid_until FROM torrust_tracker_keys WHERE user_id = ? AND date_expiry > ? ORDER BY date_expiry DESC") @@ -147,15 +151,15 @@ impl Database for MysqlDatabase { .ok() } - async fn count_users(&self) -> Result { + async fn count_users(&self) -> Result { query_as("SELECT COUNT(*) FROM torrust_users") .fetch_one(&self.pool) .await .map(|(v,)| v) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), DatabaseError> { + async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), database::Error> { // date needs to be in ISO 8601 format let date_expiry_string = date_expiry.format("%Y-%m-%d %H:%M:%S").to_string(); @@ -166,40 +170,40 @@ impl Database for MysqlDatabase { .execute(&self.pool) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn grant_admin_role(&self, user_id: i64) -> Result<(), DatabaseError> { + async fn grant_admin_role(&self, user_id: i64) -> Result<(), database::Error> { query("UPDATE torrust_users SET administrator = TRUE WHERE user_id = ?") .bind(user_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::UserNotFound) + Err(database::Error::UserNotFound) } }) } - async fn verify_email(&self, user_id: i64) -> Result<(), DatabaseError> { + async fn verify_email(&self, user_id: i64) -> Result<(), database::Error> { query("UPDATE torrust_user_profiles SET email_verified = TRUE WHERE user_id = ?") .bind(user_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::UserNotFound) + Err(database::Error::UserNotFound) } }) } - async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), DatabaseError> { + async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), database::Error> { let key = tracker_key.key.clone(); query("INSERT INTO torrust_tracker_keys (user_id, tracker_key, date_expiry) VALUES (?, ?, ?)") @@ -209,76 +213,76 @@ impl Database for MysqlDatabase { .execute(&self.pool) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn delete_user(&self, user_id: i64) -> Result<(), DatabaseError> { + async fn delete_user(&self, user_id: i64) -> Result<(), database::Error> { query("DELETE FROM torrust_users WHERE user_id = ?") .bind(user_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::UserNotFound) + Err(database::Error::UserNotFound) } }) } - async fn insert_category_and_get_id(&self, category_name: &str) -> Result { + async fn insert_category_and_get_id(&self, category_name: &str) -> Result { query("INSERT INTO torrust_categories (name) VALUES (?)") .bind(category_name) .execute(&self.pool) .await - .map(|v| v.last_insert_id() as i64) + .map(|v| i64::try_from(v.last_insert_id()).expect("last ID is larger than i64")) .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("UNIQUE") { - DatabaseError::CategoryAlreadyExists + database::Error::CategoryAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error, + _ => database::Error::Error, }) } - async fn get_category_from_id(&self, category_id: i64) -> Result { + async fn get_category_from_id(&self, category_id: i64) -> Result { query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE category_id = ?") .bind(category_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::CategoryNotFound) + .map_err(|_| database::Error::CategoryNotFound) } - async fn get_category_from_name(&self, category_name: &str) -> Result { + async fn get_category_from_name(&self, category_name: &str) -> Result { query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE name = ?") .bind(category_name) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::CategoryNotFound) + .map_err(|_| database::Error::CategoryNotFound) } - async fn get_categories(&self) -> Result, DatabaseError> { + async fn get_categories(&self) -> Result, database::Error> { query_as::<_, Category>("SELECT tc.category_id, tc.name, COUNT(tt.category_id) as num_torrents FROM torrust_categories tc LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id GROUP BY tc.name") .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn delete_category(&self, category_name: &str) -> Result<(), DatabaseError> { + async fn delete_category(&self, category_name: &str) -> Result<(), database::Error> { query("DELETE FROM torrust_categories WHERE name = ?") .bind(category_name) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::CategoryNotFound) + Err(database::Error::CategoryNotFound) } }) } @@ -291,7 +295,7 @@ impl Database for MysqlDatabase { sort: &Sorting, offset: u64, limit: u8, - ) -> Result { + ) -> Result { let title = match search { None => "%".to_string(), Some(v) => format!("%{}%", v), @@ -324,13 +328,13 @@ impl Database for MysqlDatabase { i += 1; } } - if !category_filters.is_empty() { + if category_filters.is_empty() { + String::new() + } else { format!( "INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({}) ", category_filters ) - } else { - String::new() } } else { String::new() @@ -351,12 +355,12 @@ impl Database for MysqlDatabase { let count_query = format!("SELECT COUNT(*) as count FROM ({}) AS count_table", query_string); - let count_result: Result = query_as(&count_query) + let count_result: Result = query_as(&count_query) .bind(title.clone()) .fetch_one(&self.pool) .await .map(|(v,)| v) - .map_err(|_| DatabaseError::Error); + .map_err(|_| database::Error::Error); let count = count_result?; @@ -364,18 +368,19 @@ impl Database for MysqlDatabase { let res: Vec = sqlx::query_as::<_, TorrentListing>(&query_string) .bind(title) - .bind(offset as i64) + .bind(i64::saturating_add_unsigned(0, offset)) .bind(limit) .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; Ok(TorrentsResponse { - total: count as u32, + total: u32::try_from(count).expect("variable `count` is larger than u32"), results: res, }) } + #[allow(clippy::too_many_lines)] async fn insert_torrent_and_get_id( &self, torrent: &Torrent, @@ -383,20 +388,20 @@ impl Database for MysqlDatabase { category_id: i64, title: &str, description: &str, - ) -> Result { + ) -> Result { let info_hash = torrent.info_hash(); // open pool connection - let mut conn = self.pool.acquire().await.map_err(|_| DatabaseError::Error)?; + let mut conn = self.pool.acquire().await.map_err(|_| database::Error::Error)?; // start db transaction - let mut tx = conn.begin().await.map_err(|_| DatabaseError::Error)?; + let mut tx = conn.begin().await.map_err(|_| database::Error::Error)?; // torrent file can only hold a pieces key or a root hash key: http://www.bittorrent.org/beps/bep_0030.html let (pieces, root_hash): (String, bool) = if let Some(pieces) = &torrent.info.pieces { - (bytes_to_hex(pieces.as_ref()), false) + (from_bytes(pieces.as_ref()), false) } else { - let root_hash = torrent.info.root_hash.as_ref().ok_or(DatabaseError::Error)?; + let root_hash = torrent.info.root_hash.as_ref().ok_or(database::Error::Error)?; (root_hash.to_string(), true) }; @@ -415,18 +420,18 @@ impl Database for MysqlDatabase { .bind(root_hash) .execute(&self.pool) .await - .map(|v| v.last_insert_id() as i64) + .map(|v| i64::try_from(v.last_insert_id()).expect("last ID is larger than i64")) .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("info_hash") { - DatabaseError::TorrentAlreadyExists + database::Error::TorrentAlreadyExists } else if err.message().contains("title") { - DatabaseError::TorrentTitleAlreadyExists + database::Error::TorrentTitleAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error + _ => database::Error::Error })?; let insert_torrent_files_result = if let Some(length) = torrent.info.length { @@ -437,7 +442,7 @@ impl Database for MysqlDatabase { .execute(&mut tx) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } else { let files = torrent.info.files.as_ref().unwrap(); @@ -451,7 +456,7 @@ impl Database for MysqlDatabase { .bind(path) .execute(&mut tx) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; } Ok(()) @@ -463,18 +468,19 @@ impl Database for MysqlDatabase { return Err(e); } - let insert_torrent_announce_urls_result: Result<(), DatabaseError> = if let Some(announce_urls) = &torrent.announce_list { + let insert_torrent_announce_urls_result: Result<(), database::Error> = if let Some(announce_urls) = &torrent.announce_list + { // flatten the nested vec (this will however remove the) let announce_urls = announce_urls.iter().flatten().collect::>(); - for tracker_url in announce_urls.iter() { + for tracker_url in &announce_urls { let _ = query("INSERT INTO torrust_torrent_announce_urls (torrent_id, tracker_url) VALUES (?, ?)") .bind(torrent_id) .bind(tracker_url) .execute(&mut tx) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; } Ok(()) @@ -487,7 +493,7 @@ impl Database for MysqlDatabase { .execute(&mut tx) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) }; // rollback transaction on error @@ -506,21 +512,21 @@ impl Database for MysqlDatabase { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("info_hash") { - DatabaseError::TorrentAlreadyExists + database::Error::TorrentAlreadyExists } else if err.message().contains("title") { - DatabaseError::TorrentTitleAlreadyExists + database::Error::TorrentTitleAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error, + _ => database::Error::Error, }); // commit or rollback transaction and return user_id on success match insert_torrent_info_result { Ok(_) => { let _ = tx.commit().await; - Ok(torrent_id as i64) + Ok(torrent_id) } Err(e) => { let _ = tx.rollback().await; @@ -529,38 +535,43 @@ impl Database for MysqlDatabase { } } - async fn get_torrent_info_from_id(&self, torrent_id: i64) -> Result { + async fn get_torrent_info_from_id(&self, torrent_id: i64) -> Result { query_as::<_, DbTorrentInfo>( "SELECT torrent_id, info_hash, name, pieces, piece_length, private, root_hash FROM torrust_torrents WHERE torrent_id = ?", ) .bind(torrent_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_info_from_infohash(&self, infohash: &InfoHash) -> Result { + async fn get_torrent_info_from_infohash(&self, infohash: &InfoHash) -> Result { query_as::<_, DbTorrentInfo>( "SELECT torrent_id, info_hash, name, pieces, piece_length, private, root_hash FROM torrust_torrents WHERE info_hash = ?", ) .bind(infohash.to_hex_string().to_uppercase()) // `info_hash` is stored as uppercase hex string .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_files_from_id(&self, torrent_id: i64) -> Result, DatabaseError> { + async fn get_torrent_files_from_id(&self, torrent_id: i64) -> Result, database::Error> { let db_torrent_files = query_as::<_, DbTorrentFile>("SELECT md5sum, length, path FROM torrust_torrent_files WHERE torrent_id = ?") .bind(torrent_id) .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound)?; + .map_err(|_| database::Error::TorrentNotFound)?; let torrent_files: Vec = db_torrent_files .into_iter() .map(|tf| TorrentFile { - path: tf.path.unwrap_or_default().split('/').map(|v| v.to_string()).collect(), + path: tf + .path + .unwrap_or_default() + .split('/') + .map(std::string::ToString::to_string) + .collect(), length: tf.length, md5sum: tf.md5sum, }) @@ -569,16 +580,16 @@ impl Database for MysqlDatabase { Ok(torrent_files) } - async fn get_torrent_announce_urls_from_id(&self, torrent_id: i64) -> Result>, DatabaseError> { + async fn get_torrent_announce_urls_from_id(&self, torrent_id: i64) -> Result>, database::Error> { query_as::<_, DbTorrentAnnounceUrl>("SELECT tracker_url FROM torrust_torrent_announce_urls WHERE torrent_id = ?") .bind(torrent_id) .fetch_all(&self.pool) .await .map(|v| v.iter().map(|a| vec![a.tracker_url.to_string()]).collect()) - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_listing_from_id(&self, torrent_id: i64) -> Result { + async fn get_torrent_listing_from_id(&self, torrent_id: i64) -> Result { query_as::<_, TorrentListing>( "SELECT tt.torrent_id, tp.username AS uploader, tt.info_hash, ti.title, ti.description, tt.category_id, DATE_FORMAT(tt.date_uploaded, '%Y-%m-%d %H:%i:%s') AS date_uploaded, tt.size AS file_size, CAST(COALESCE(sum(ts.seeders),0) as signed) as seeders, @@ -593,10 +604,10 @@ impl Database for MysqlDatabase { .bind(torrent_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_listing_from_infohash(&self, infohash: &InfoHash) -> Result { + async fn get_torrent_listing_from_infohash(&self, infohash: &InfoHash) -> Result { query_as::<_, TorrentListing>( "SELECT tt.torrent_id, tp.username AS uploader, tt.info_hash, ti.title, ti.description, tt.category_id, DATE_FORMAT(tt.date_uploaded, '%Y-%m-%d %H:%i:%s') AS date_uploaded, tt.size AS file_size, CAST(COALESCE(sum(ts.seeders),0) as signed) as seeders, @@ -611,17 +622,17 @@ impl Database for MysqlDatabase { .bind(infohash.to_hex_string().to_uppercase()) // `info_hash` is stored as uppercase hex string .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_all_torrents_compact(&self) -> Result, DatabaseError> { + async fn get_all_torrents_compact(&self) -> Result, database::Error> { query_as::<_, TorrentCompact>("SELECT torrent_id, info_hash FROM torrust_torrents") .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), DatabaseError> { + async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), database::Error> { query("UPDATE torrust_torrent_info SET title = ? WHERE torrent_id = ?") .bind(title) .bind(torrent_id) @@ -630,34 +641,34 @@ impl Database for MysqlDatabase { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("UNIQUE") { - DatabaseError::TorrentTitleAlreadyExists + database::Error::TorrentTitleAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error, + _ => database::Error::Error, }) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::TorrentNotFound) + Err(database::Error::TorrentNotFound) } }) } - async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), DatabaseError> { + async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), database::Error> { query("UPDATE torrust_torrent_info SET description = ? WHERE torrent_id = ?") .bind(description) .bind(torrent_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::TorrentNotFound) + Err(database::Error::TorrentNotFound) } }) } @@ -668,7 +679,7 @@ impl Database for MysqlDatabase { tracker_url: &str, seeders: i64, leechers: i64, - ) -> Result<(), DatabaseError> { + ) -> Result<(), database::Error> { query("REPLACE INTO torrust_torrent_tracker_stats (torrent_id, tracker_url, seeders, leechers) VALUES (?, ?, ?, ?)") .bind(torrent_id) .bind(tracker_url) @@ -677,74 +688,74 @@ impl Database for MysqlDatabase { .execute(&self.pool) .await .map(|_| ()) - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn delete_torrent(&self, torrent_id: i64) -> Result<(), DatabaseError> { + async fn delete_torrent(&self, torrent_id: i64) -> Result<(), database::Error> { query("DELETE FROM torrust_torrents WHERE torrent_id = ?") .bind(torrent_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::TorrentNotFound) + Err(database::Error::TorrentNotFound) } }) } - async fn delete_all_database_rows(&self) -> Result<(), DatabaseError> { + async fn delete_all_database_rows(&self) -> Result<(), database::Error> { query("DELETE FROM torrust_categories;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_torrents;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_tracker_keys;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_users;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_authentication;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_bans;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_invitations;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_profiles;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_torrents;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_public_keys;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; Ok(()) } diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index 3dc022e5..e7792b44 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -3,22 +3,28 @@ use chrono::NaiveDateTime; use sqlx::sqlite::SqlitePoolOptions; use sqlx::{query, query_as, Acquire, SqlitePool}; -use crate::databases::database::{Category, Database, DatabaseDriver, DatabaseError, Sorting, TorrentCompact}; +use crate::databases::database; +use crate::databases::database::{Category, Database, Driver, Sorting, TorrentCompact}; use crate::models::info_hash::InfoHash; use crate::models::response::TorrentsResponse; use crate::models::torrent::TorrentListing; use crate::models::torrent_file::{DbTorrentAnnounceUrl, DbTorrentFile, DbTorrentInfo, Torrent, TorrentFile}; use crate::models::tracker_key::TrackerKey; use crate::models::user::{User, UserAuthentication, UserCompact, UserProfile}; -use crate::utils::clock::current_time; -use crate::utils::hex::bytes_to_hex; +use crate::utils::clock; +use crate::utils::hex::from_bytes; -pub struct SqliteDatabase { +pub struct Sqlite { pub pool: SqlitePool, } -impl SqliteDatabase { - pub async fn new(database_url: &str) -> Self { +#[async_trait] +impl Database for Sqlite { + fn get_database_driver(&self) -> Driver { + Driver::Sqlite3 + } + + async fn new(database_url: &str) -> Self { let db = SqlitePoolOptions::new() .connect(database_url) .await @@ -31,20 +37,13 @@ impl SqliteDatabase { Self { pool: db } } -} - -#[async_trait] -impl Database for SqliteDatabase { - fn get_database_driver(&self) -> DatabaseDriver { - DatabaseDriver::Sqlite3 - } - async fn insert_user_and_get_id(&self, username: &str, email: &str, password_hash: &str) -> Result { + async fn insert_user_and_get_id(&self, username: &str, email: &str, password_hash: &str) -> Result { // open pool connection - let mut conn = self.pool.acquire().await.map_err(|_| DatabaseError::Error)?; + let mut conn = self.pool.acquire().await.map_err(|_| database::Error::Error)?; // start db transaction - let mut tx = conn.begin().await.map_err(|_| DatabaseError::Error)?; + let mut tx = conn.begin().await.map_err(|_| database::Error::Error)?; // create the user account and get the user id let user_id = @@ -52,7 +51,7 @@ impl Database for SqliteDatabase { .execute(&mut tx) .await .map(|v| v.last_insert_rowid()) - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; // add password hash for account let insert_user_auth_result = query("INSERT INTO torrust_user_authentication (user_id, password_hash) VALUES (?, ?)") @@ -60,7 +59,7 @@ impl Database for SqliteDatabase { .bind(password_hash) .execute(&mut tx) .await - .map_err(|_| DatabaseError::Error); + .map_err(|_| database::Error::Error); // rollback transaction on error if let Err(e) = insert_user_auth_result { @@ -78,21 +77,21 @@ impl Database for SqliteDatabase { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("username") { - DatabaseError::UsernameTaken + database::Error::UsernameTaken } else if err.message().contains("email") { - DatabaseError::EmailTaken + database::Error::EmailTaken } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error + _ => database::Error::Error }); // commit or rollback transaction and return user_id on success match insert_user_profile_result { Ok(_) => { let _ = tx.commit().await; - Ok(user_id as i64) + Ok(user_id) } Err(e) => { let _ = tx.rollback().await; @@ -101,43 +100,43 @@ impl Database for SqliteDatabase { } } - async fn get_user_from_id(&self, user_id: i64) -> Result { + async fn get_user_from_id(&self, user_id: i64) -> Result { query_as::<_, User>("SELECT * FROM torrust_users WHERE user_id = ?") .bind(user_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } - async fn get_user_authentication_from_id(&self, user_id: i64) -> Result { + async fn get_user_authentication_from_id(&self, user_id: i64) -> Result { query_as::<_, UserAuthentication>("SELECT * FROM torrust_user_authentication WHERE user_id = ?") .bind(user_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } - async fn get_user_profile_from_username(&self, username: &str) -> Result { + async fn get_user_profile_from_username(&self, username: &str) -> Result { query_as::<_, UserProfile>("SELECT * FROM torrust_user_profiles WHERE username = ?") .bind(username) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } - async fn get_user_compact_from_id(&self, user_id: i64) -> Result { + async fn get_user_compact_from_id(&self, user_id: i64) -> Result { query_as::<_, UserCompact>("SELECT tu.user_id, tp.username, tu.administrator FROM torrust_users tu INNER JOIN torrust_user_profiles tp ON tu.user_id = tp.user_id WHERE tu.user_id = ?") .bind(user_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::UserNotFound) + .map_err(|_| database::Error::UserNotFound) } async fn get_user_tracker_key(&self, user_id: i64) -> Option { const HOUR_IN_SECONDS: i64 = 3600; // casting current_time() to i64 will overflow in the year 2262 - let current_time_plus_hour = (current_time() as i64) + HOUR_IN_SECONDS; + let current_time_plus_hour = i64::try_from(clock::now()).unwrap().saturating_add(HOUR_IN_SECONDS); // get tracker key that is valid for at least one hour from now query_as::<_, TrackerKey>("SELECT tracker_key AS key, date_expiry AS valid_until FROM torrust_tracker_keys WHERE user_id = $1 AND date_expiry > $2 ORDER BY date_expiry DESC") @@ -148,15 +147,15 @@ impl Database for SqliteDatabase { .ok() } - async fn count_users(&self) -> Result { + async fn count_users(&self) -> Result { query_as("SELECT COUNT(*) FROM torrust_users") .fetch_one(&self.pool) .await .map(|(v,)| v) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), DatabaseError> { + async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), database::Error> { // date needs to be in ISO 8601 format let date_expiry_string = date_expiry.format("%Y-%m-%d %H:%M:%S").to_string(); @@ -167,34 +166,34 @@ impl Database for SqliteDatabase { .execute(&self.pool) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn grant_admin_role(&self, user_id: i64) -> Result<(), DatabaseError> { + async fn grant_admin_role(&self, user_id: i64) -> Result<(), database::Error> { query("UPDATE torrust_users SET administrator = TRUE WHERE user_id = ?") .bind(user_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::UserNotFound) + Err(database::Error::UserNotFound) } }) } - async fn verify_email(&self, user_id: i64) -> Result<(), DatabaseError> { + async fn verify_email(&self, user_id: i64) -> Result<(), database::Error> { query("UPDATE torrust_user_profiles SET email_verified = TRUE WHERE user_id = ?") .bind(user_id) .execute(&self.pool) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), DatabaseError> { + async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), database::Error> { let key = tracker_key.key.clone(); query("INSERT INTO torrust_tracker_keys (user_id, tracker_key, date_expiry) VALUES ($1, $2, $3)") @@ -204,25 +203,25 @@ impl Database for SqliteDatabase { .execute(&self.pool) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn delete_user(&self, user_id: i64) -> Result<(), DatabaseError> { + async fn delete_user(&self, user_id: i64) -> Result<(), database::Error> { query("DELETE FROM torrust_users WHERE user_id = ?") .bind(user_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::UserNotFound) + Err(database::Error::UserNotFound) } }) } - async fn insert_category_and_get_id(&self, category_name: &str) -> Result { + async fn insert_category_and_get_id(&self, category_name: &str) -> Result { query("INSERT INTO torrust_categories (name) VALUES (?)") .bind(category_name) .execute(&self.pool) @@ -231,49 +230,49 @@ impl Database for SqliteDatabase { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("UNIQUE") { - DatabaseError::CategoryAlreadyExists + database::Error::CategoryAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error, + _ => database::Error::Error, }) } - async fn get_category_from_id(&self, category_id: i64) -> Result { + async fn get_category_from_id(&self, category_id: i64) -> Result { query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE category_id = ?") .bind(category_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::CategoryNotFound) + .map_err(|_| database::Error::CategoryNotFound) } - async fn get_category_from_name(&self, category_name: &str) -> Result { + async fn get_category_from_name(&self, category_name: &str) -> Result { query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE name = ?") .bind(category_name) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::CategoryNotFound) + .map_err(|_| database::Error::CategoryNotFound) } - async fn get_categories(&self) -> Result, DatabaseError> { + async fn get_categories(&self) -> Result, database::Error> { query_as::<_, Category>("SELECT tc.category_id, tc.name, COUNT(tt.category_id) as num_torrents FROM torrust_categories tc LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id GROUP BY tc.name") .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn delete_category(&self, category_name: &str) -> Result<(), DatabaseError> { + async fn delete_category(&self, category_name: &str) -> Result<(), database::Error> { query("DELETE FROM torrust_categories WHERE name = ?") .bind(category_name) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::CategoryNotFound) + Err(database::Error::CategoryNotFound) } }) } @@ -286,7 +285,7 @@ impl Database for SqliteDatabase { sort: &Sorting, offset: u64, limit: u8, - ) -> Result { + ) -> Result { let title = match search { None => "%".to_string(), Some(v) => format!("%{}%", v), @@ -319,13 +318,13 @@ impl Database for SqliteDatabase { i += 1; } } - if !category_filters.is_empty() { + if category_filters.is_empty() { + String::new() + } else { format!( "INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({}) ", category_filters ) - } else { - String::new() } } else { String::new() @@ -346,12 +345,12 @@ impl Database for SqliteDatabase { let count_query = format!("SELECT COUNT(*) as count FROM ({}) AS count_table", query_string); - let count_result: Result = query_as(&count_query) + let count_result: Result = query_as(&count_query) .bind(title.clone()) .fetch_one(&self.pool) .await .map(|(v,)| v) - .map_err(|_| DatabaseError::Error); + .map_err(|_| database::Error::Error); let count = count_result?; @@ -359,18 +358,19 @@ impl Database for SqliteDatabase { let res: Vec = sqlx::query_as::<_, TorrentListing>(&query_string) .bind(title) - .bind(offset as i64) + .bind(i64::saturating_add_unsigned(0, offset)) .bind(limit) .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; Ok(TorrentsResponse { - total: count as u32, + total: u32::try_from(count).expect("variable `count` is larger than u32"), results: res, }) } + #[allow(clippy::too_many_lines)] async fn insert_torrent_and_get_id( &self, torrent: &Torrent, @@ -378,20 +378,20 @@ impl Database for SqliteDatabase { category_id: i64, title: &str, description: &str, - ) -> Result { + ) -> Result { let info_hash = torrent.info_hash(); // open pool connection - let mut conn = self.pool.acquire().await.map_err(|_| DatabaseError::Error)?; + let mut conn = self.pool.acquire().await.map_err(|_| database::Error::Error)?; // start db transaction - let mut tx = conn.begin().await.map_err(|_| DatabaseError::Error)?; + let mut tx = conn.begin().await.map_err(|_| database::Error::Error)?; // torrent file can only hold a pieces key or a root hash key: http://www.bittorrent.org/beps/bep_0030.html let (pieces, root_hash): (String, bool) = if let Some(pieces) = &torrent.info.pieces { - (bytes_to_hex(pieces.as_ref()), false) + (from_bytes(pieces.as_ref()), false) } else { - let root_hash = torrent.info.root_hash.as_ref().ok_or(DatabaseError::Error)?; + let root_hash = torrent.info.root_hash.as_ref().ok_or(database::Error::Error)?; (root_hash.to_string(), true) }; @@ -410,18 +410,18 @@ impl Database for SqliteDatabase { .bind(root_hash) .execute(&self.pool) .await - .map(|v| v.last_insert_rowid() as i64) + .map(|v| v.last_insert_rowid()) .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("info_hash") { - DatabaseError::TorrentAlreadyExists + database::Error::TorrentAlreadyExists } else if err.message().contains("title") { - DatabaseError::TorrentTitleAlreadyExists + database::Error::TorrentTitleAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error + _ => database::Error::Error })?; let insert_torrent_files_result = if let Some(length) = torrent.info.length { @@ -432,7 +432,7 @@ impl Database for SqliteDatabase { .execute(&mut tx) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } else { let files = torrent.info.files.as_ref().unwrap(); @@ -446,7 +446,7 @@ impl Database for SqliteDatabase { .bind(path) .execute(&mut tx) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; } Ok(()) @@ -458,18 +458,19 @@ impl Database for SqliteDatabase { return Err(e); } - let insert_torrent_announce_urls_result: Result<(), DatabaseError> = if let Some(announce_urls) = &torrent.announce_list { + let insert_torrent_announce_urls_result: Result<(), database::Error> = if let Some(announce_urls) = &torrent.announce_list + { // flatten the nested vec (this will however remove the) let announce_urls = announce_urls.iter().flatten().collect::>(); - for tracker_url in announce_urls.iter() { + for tracker_url in &announce_urls { let _ = query("INSERT INTO torrust_torrent_announce_urls (torrent_id, tracker_url) VALUES (?, ?)") .bind(torrent_id) .bind(tracker_url) .execute(&mut tx) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; } Ok(()) @@ -482,7 +483,7 @@ impl Database for SqliteDatabase { .execute(&mut tx) .await .map(|_| ()) - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) }; // rollback transaction on error @@ -501,21 +502,21 @@ impl Database for SqliteDatabase { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("info_hash") { - DatabaseError::TorrentAlreadyExists + database::Error::TorrentAlreadyExists } else if err.message().contains("title") { - DatabaseError::TorrentTitleAlreadyExists + database::Error::TorrentTitleAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error, + _ => database::Error::Error, }); // commit or rollback transaction and return user_id on success match insert_torrent_info_result { Ok(_) => { let _ = tx.commit().await; - Ok(torrent_id as i64) + Ok(torrent_id) } Err(e) => { let _ = tx.rollback().await; @@ -524,38 +525,43 @@ impl Database for SqliteDatabase { } } - async fn get_torrent_info_from_id(&self, torrent_id: i64) -> Result { + async fn get_torrent_info_from_id(&self, torrent_id: i64) -> Result { query_as::<_, DbTorrentInfo>( "SELECT torrent_id, info_hash, name, pieces, piece_length, private, root_hash FROM torrust_torrents WHERE torrent_id = ?", ) .bind(torrent_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_info_from_infohash(&self, infohash: &InfoHash) -> Result { + async fn get_torrent_info_from_infohash(&self, infohash: &InfoHash) -> Result { query_as::<_, DbTorrentInfo>( "SELECT torrent_id, info_hash, name, pieces, piece_length, private, root_hash FROM torrust_torrents WHERE info_hash = ?", ) .bind(infohash.to_hex_string().to_uppercase()) // `info_hash` is stored as uppercase hex string .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_files_from_id(&self, torrent_id: i64) -> Result, DatabaseError> { + async fn get_torrent_files_from_id(&self, torrent_id: i64) -> Result, database::Error> { let db_torrent_files = query_as::<_, DbTorrentFile>("SELECT md5sum, length, path FROM torrust_torrent_files WHERE torrent_id = ?") .bind(torrent_id) .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound)?; + .map_err(|_| database::Error::TorrentNotFound)?; let torrent_files: Vec = db_torrent_files .into_iter() .map(|tf| TorrentFile { - path: tf.path.unwrap_or_default().split('/').map(|v| v.to_string()).collect(), + path: tf + .path + .unwrap_or_default() + .split('/') + .map(std::string::ToString::to_string) + .collect(), length: tf.length, md5sum: tf.md5sum, }) @@ -564,16 +570,16 @@ impl Database for SqliteDatabase { Ok(torrent_files) } - async fn get_torrent_announce_urls_from_id(&self, torrent_id: i64) -> Result>, DatabaseError> { + async fn get_torrent_announce_urls_from_id(&self, torrent_id: i64) -> Result>, database::Error> { query_as::<_, DbTorrentAnnounceUrl>("SELECT tracker_url FROM torrust_torrent_announce_urls WHERE torrent_id = ?") .bind(torrent_id) .fetch_all(&self.pool) .await .map(|v| v.iter().map(|a| vec![a.tracker_url.to_string()]).collect()) - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_listing_from_id(&self, torrent_id: i64) -> Result { + async fn get_torrent_listing_from_id(&self, torrent_id: i64) -> Result { query_as::<_, TorrentListing>( "SELECT tt.torrent_id, tp.username AS uploader, tt.info_hash, ti.title, ti.description, tt.category_id, tt.date_uploaded, tt.size AS file_size, CAST(COALESCE(sum(ts.seeders),0) as signed) as seeders, @@ -588,10 +594,10 @@ impl Database for SqliteDatabase { .bind(torrent_id) .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_torrent_listing_from_infohash(&self, infohash: &InfoHash) -> Result { + async fn get_torrent_listing_from_infohash(&self, infohash: &InfoHash) -> Result { query_as::<_, TorrentListing>( "SELECT tt.torrent_id, tp.username AS uploader, tt.info_hash, ti.title, ti.description, tt.category_id, tt.date_uploaded, tt.size AS file_size, CAST(COALESCE(sum(ts.seeders),0) as signed) as seeders, @@ -606,17 +612,17 @@ impl Database for SqliteDatabase { .bind(infohash.to_string().to_uppercase()) // `info_hash` is stored as uppercase .fetch_one(&self.pool) .await - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn get_all_torrents_compact(&self) -> Result, DatabaseError> { + async fn get_all_torrents_compact(&self) -> Result, database::Error> { query_as::<_, TorrentCompact>("SELECT torrent_id, info_hash FROM torrust_torrents") .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), DatabaseError> { + async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), database::Error> { query("UPDATE torrust_torrent_info SET title = $1 WHERE torrent_id = $2") .bind(title) .bind(torrent_id) @@ -625,34 +631,34 @@ impl Database for SqliteDatabase { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("UNIQUE") { - DatabaseError::TorrentTitleAlreadyExists + database::Error::TorrentTitleAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error, + _ => database::Error::Error, }) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::TorrentNotFound) + Err(database::Error::TorrentNotFound) } }) } - async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), DatabaseError> { + async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), database::Error> { query("UPDATE torrust_torrent_info SET description = $1 WHERE torrent_id = $2") .bind(description) .bind(torrent_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::TorrentNotFound) + Err(database::Error::TorrentNotFound) } }) } @@ -663,7 +669,7 @@ impl Database for SqliteDatabase { tracker_url: &str, seeders: i64, leechers: i64, - ) -> Result<(), DatabaseError> { + ) -> Result<(), database::Error> { query("REPLACE INTO torrust_torrent_tracker_stats (torrent_id, tracker_url, seeders, leechers) VALUES ($1, $2, $3, $4)") .bind(torrent_id) .bind(tracker_url) @@ -672,74 +678,74 @@ impl Database for SqliteDatabase { .execute(&self.pool) .await .map(|_| ()) - .map_err(|_| DatabaseError::TorrentNotFound) + .map_err(|_| database::Error::TorrentNotFound) } - async fn delete_torrent(&self, torrent_id: i64) -> Result<(), DatabaseError> { + async fn delete_torrent(&self, torrent_id: i64) -> Result<(), database::Error> { query("DELETE FROM torrust_torrents WHERE torrent_id = ?") .bind(torrent_id) .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) .and_then(|v| { if v.rows_affected() > 0 { Ok(()) } else { - Err(DatabaseError::TorrentNotFound) + Err(database::Error::TorrentNotFound) } }) } - async fn delete_all_database_rows(&self) -> Result<(), DatabaseError> { + async fn delete_all_database_rows(&self) -> Result<(), database::Error> { query("DELETE FROM torrust_categories;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_torrents;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_tracker_keys;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_users;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_authentication;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_bans;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_invitations;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_profiles;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_torrents;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; query("DELETE FROM torrust_user_public_keys;") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error)?; + .map_err(|_| database::Error::Error)?; Ok(()) } diff --git a/src/errors.rs b/src/errors.rs index 571fd9fe..12601e3c 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -6,7 +6,7 @@ use actix_web::{HttpResponse, HttpResponseBuilder, ResponseError}; use derive_more::{Display, Error}; use serde::{Deserialize, Serialize}; -use crate::databases::database::DatabaseError; +use crate::databases::database; pub type ServiceResult = Result; @@ -131,6 +131,7 @@ pub struct ErrorToResponse { impl ResponseError for ServiceError { fn status_code(&self) -> StatusCode { + #[allow(clippy::match_same_arms)] match self { ServiceError::ClosedForRegistration => StatusCode::FORBIDDEN, ServiceError::EmailInvalid => StatusCode::BAD_REQUEST, @@ -139,47 +140,34 @@ impl ResponseError for ServiceError { ServiceError::UsernameNotFound => StatusCode::NOT_FOUND, ServiceError::UserNotFound => StatusCode::NOT_FOUND, ServiceError::AccountNotFound => StatusCode::NOT_FOUND, - ServiceError::ProfanityError => StatusCode::BAD_REQUEST, ServiceError::BlacklistError => StatusCode::BAD_REQUEST, ServiceError::UsernameCaseMappedError => StatusCode::BAD_REQUEST, - ServiceError::PasswordTooShort => StatusCode::BAD_REQUEST, ServiceError::PasswordTooLong => StatusCode::BAD_REQUEST, ServiceError::PasswordsDontMatch => StatusCode::BAD_REQUEST, - ServiceError::UsernameTaken => StatusCode::BAD_REQUEST, ServiceError::UsernameInvalid => StatusCode::BAD_REQUEST, ServiceError::EmailTaken => StatusCode::BAD_REQUEST, ServiceError::EmailNotVerified => StatusCode::FORBIDDEN, - ServiceError::TokenNotFound => StatusCode::UNAUTHORIZED, ServiceError::TokenExpired => StatusCode::UNAUTHORIZED, ServiceError::TokenInvalid => StatusCode::UNAUTHORIZED, - ServiceError::TorrentNotFound => StatusCode::BAD_REQUEST, - ServiceError::InvalidTorrentFile => StatusCode::BAD_REQUEST, ServiceError::InvalidTorrentPiecesLength => StatusCode::BAD_REQUEST, ServiceError::InvalidFileType => StatusCode::BAD_REQUEST, - ServiceError::BadRequest => StatusCode::BAD_REQUEST, - ServiceError::InvalidCategory => StatusCode::BAD_REQUEST, - ServiceError::Unauthorized => StatusCode::FORBIDDEN, - ServiceError::InfoHashAlreadyExists => StatusCode::BAD_REQUEST, - ServiceError::TorrentTitleAlreadyExists => StatusCode::BAD_REQUEST, - ServiceError::TrackerOffline => StatusCode::INTERNAL_SERVER_ERROR, - - ServiceError::WhitelistingError => StatusCode::INTERNAL_SERVER_ERROR, - ServiceError::CategoryExists => StatusCode::BAD_REQUEST, - - _ => StatusCode::INTERNAL_SERVER_ERROR, + ServiceError::InternalServerError => StatusCode::INTERNAL_SERVER_ERROR, + ServiceError::EmailMissing => StatusCode::NOT_FOUND, + ServiceError::FailedToSendVerificationEmail => StatusCode::INTERNAL_SERVER_ERROR, + ServiceError::WhitelistingError => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -192,7 +180,7 @@ impl ResponseError for ServiceError { impl From for ServiceError { fn from(e: sqlx::Error) -> Self { - eprintln!("{:?}", e); + eprintln!("{e:?}"); if let Some(err) = e.as_database_error() { return if err.code() == Some(Cow::from("2067")) { @@ -210,47 +198,48 @@ impl From for ServiceError { } } -impl From for ServiceError { - fn from(e: DatabaseError) -> Self { +impl From for ServiceError { + fn from(e: database::Error) -> Self { + #[allow(clippy::match_same_arms)] match e { - DatabaseError::Error => ServiceError::InternalServerError, - DatabaseError::UsernameTaken => ServiceError::UsernameTaken, - DatabaseError::EmailTaken => ServiceError::EmailTaken, - DatabaseError::UserNotFound => ServiceError::UserNotFound, - DatabaseError::CategoryAlreadyExists => ServiceError::CategoryExists, - DatabaseError::CategoryNotFound => ServiceError::InvalidCategory, - DatabaseError::TorrentNotFound => ServiceError::TorrentNotFound, - DatabaseError::TorrentAlreadyExists => ServiceError::InfoHashAlreadyExists, - DatabaseError::TorrentTitleAlreadyExists => ServiceError::TorrentTitleAlreadyExists, - DatabaseError::UnrecognizedDatabaseDriver => ServiceError::InternalServerError, + database::Error::Error => ServiceError::InternalServerError, + database::Error::UsernameTaken => ServiceError::UsernameTaken, + database::Error::EmailTaken => ServiceError::EmailTaken, + database::Error::UserNotFound => ServiceError::UserNotFound, + database::Error::CategoryAlreadyExists => ServiceError::CategoryExists, + database::Error::CategoryNotFound => ServiceError::InvalidCategory, + database::Error::TorrentNotFound => ServiceError::TorrentNotFound, + database::Error::TorrentAlreadyExists => ServiceError::InfoHashAlreadyExists, + database::Error::TorrentTitleAlreadyExists => ServiceError::TorrentTitleAlreadyExists, + database::Error::UnrecognizedDatabaseDriver => ServiceError::InternalServerError, } } } impl From for ServiceError { fn from(e: argon2::password_hash::Error) -> Self { - eprintln!("{}", e); + eprintln!("{e}"); ServiceError::InternalServerError } } impl From for ServiceError { fn from(e: std::io::Error) -> Self { - eprintln!("{}", e); + eprintln!("{e}"); ServiceError::InternalServerError } } impl From> for ServiceError { fn from(e: Box) -> Self { - eprintln!("{}", e); + eprintln!("{e}"); ServiceError::InternalServerError } } impl From for ServiceError { fn from(e: serde_json::Error) -> Self { - eprintln!("{}", e); + eprintln!("{e}"); ServiceError::InternalServerError } } diff --git a/src/lib.rs b/src/lib.rs index 6db3f410..a2b7d173 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -33,7 +33,7 @@ where let mut acc = vec![]; for s in s.as_ref().split(',') { let item = s.trim().parse::().map_err(|_| ())?; - acc.push(item) + acc.push(item); } if acc.is_empty() { Ok(None) diff --git a/src/mailer.rs b/src/mailer.rs index 258106d2..64c2826e 100644 --- a/src/mailer.rs +++ b/src/mailer.rs @@ -9,9 +9,9 @@ use serde::{Deserialize, Serialize}; use crate::config::Configuration; use crate::errors::ServiceError; -use crate::utils::clock::current_time; +use crate::utils::clock; -pub struct MailerService { +pub struct Service { cfg: Arc, mailer: Arc, } @@ -30,8 +30,8 @@ struct VerifyTemplate { verification_url: String, } -impl MailerService { - pub async fn new(cfg: Arc) -> MailerService { +impl Service { + pub async fn new(cfg: Arc) -> Service { let mailer = Arc::new(Self::get_mailer(&cfg).await); Self { cfg, mailer } @@ -57,6 +57,11 @@ impl MailerService { } } + /// Send Verification Email + /// + /// # Errors + /// + /// This function will return an error if unable to send an email. pub async fn send_verification_mail( &self, to: &str, @@ -96,10 +101,13 @@ impl MailerService { .singlepart( SinglePart::builder() .header(lettre::message::header::ContentType::TEXT_HTML) - .body(ctx.render_once().unwrap()), + .body( + ctx.render_once() + .expect("value `ctx` must have some internal error passed into it"), + ), ), ) - .unwrap(); + .expect("the `multipart` builder had an error"); match self.mailer.send(mail).await { Ok(_res) => Ok(()), @@ -129,7 +137,7 @@ impl MailerService { let claims = VerifyClaims { iss: String::from("email-verification"), sub: user_id, - exp: current_time() + 315_569_260, // 10 years from now + exp: clock::now() + 315_569_260, // 10 years from now }; let token = encode(&Header::default(), &claims, &EncodingKey::from_secret(key)).unwrap(); diff --git a/src/models/response.rs b/src/models/response.rs index 059e1c3b..8340c4f0 100644 --- a/src/models/response.rs +++ b/src/models/response.rs @@ -8,16 +8,19 @@ pub enum OkResponses { TokenResponse(TokenResponse), } +#[allow(clippy::module_name_repetitions)] #[derive(Serialize, Deserialize, Debug)] pub struct OkResponse { pub data: T, } +#[allow(clippy::module_name_repetitions)] #[derive(Serialize, Deserialize, Debug)] pub struct ErrorResponse { pub errors: Vec, } +#[allow(clippy::module_name_repetitions)] #[derive(Serialize, Deserialize, Debug)] pub struct TokenResponse { pub token: String, @@ -25,11 +28,13 @@ pub struct TokenResponse { pub admin: bool, } +#[allow(clippy::module_name_repetitions)] #[derive(Serialize, Deserialize, Debug)] pub struct NewTorrentResponse { pub torrent_id: i64, } +#[allow(clippy::module_name_repetitions)] #[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] pub struct TorrentResponse { pub torrent_id: i64, @@ -48,6 +53,7 @@ pub struct TorrentResponse { } impl TorrentResponse { + #[must_use] pub fn from_listing(torrent_listing: TorrentListing) -> TorrentResponse { TorrentResponse { torrent_id: torrent_listing.torrent_id, @@ -57,7 +63,7 @@ impl TorrentResponse { description: torrent_listing.description, category: Category { category_id: 0, - name: "".to_string(), + name: String::new(), num_torrents: 0, }, upload_date: torrent_listing.date_uploaded, @@ -66,11 +72,12 @@ impl TorrentResponse { leechers: torrent_listing.leechers, files: vec![], trackers: vec![], - magnet_link: "".to_string(), + magnet_link: String::new(), } } } +#[allow(clippy::module_name_repetitions)] #[derive(Serialize, Deserialize, Debug, sqlx::FromRow)] pub struct TorrentsResponse { pub total: u32, diff --git a/src/models/torrent.rs b/src/models/torrent.rs index 9063c7f3..2ecbf984 100644 --- a/src/models/torrent.rs +++ b/src/models/torrent.rs @@ -1,8 +1,9 @@ use serde::{Deserialize, Serialize}; use crate::models::torrent_file::Torrent; -use crate::routes::torrent::CreateTorrent; +use crate::routes::torrent::Create; +#[allow(clippy::module_name_repetitions)] #[allow(dead_code)] #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, sqlx::FromRow)] pub struct TorrentListing { @@ -18,8 +19,9 @@ pub struct TorrentListing { pub leechers: i64, } +#[allow(clippy::module_name_repetitions)] #[derive(Debug)] pub struct TorrentRequest { - pub fields: CreateTorrent, + pub fields: Create, pub torrent: Torrent, } diff --git a/src/models/torrent_file.rs b/src/models/torrent_file.rs index be3b6101..e3c0a49f 100644 --- a/src/models/torrent_file.rs +++ b/src/models/torrent_file.rs @@ -4,7 +4,7 @@ use serde_bytes::ByteBuf; use sha1::{Digest, Sha1}; use crate::config::Configuration; -use crate::utils::hex::{bytes_to_hex, hex_to_bytes}; +use crate::utils::hex::{from_bytes, into_bytes}; #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] pub struct TorrentNode(String, i64); @@ -41,25 +41,31 @@ pub struct TorrentInfo { impl TorrentInfo { /// torrent file can only hold a pieces key or a root hash key: - /// http://www.bittorrent.org/beps/bep_0030.html + /// [BEP 39](http://www.bittorrent.org/beps/bep_0030.html) + #[must_use] pub fn get_pieces_as_string(&self) -> String { match &self.pieces { - None => "".to_string(), - Some(byte_buf) => bytes_to_hex(byte_buf.as_ref()), + None => String::new(), + Some(byte_buf) => from_bytes(byte_buf.as_ref()), } } + #[must_use] pub fn get_root_hash_as_i64(&self) -> i64 { match &self.root_hash { None => 0i64, - Some(root_hash) => root_hash.parse::().unwrap(), + Some(root_hash) => root_hash + .parse::() + .expect("variable `root_hash` cannot be converted into a `i64`"), } } + #[must_use] pub fn is_a_single_file_torrent(&self) -> bool { self.length.is_some() } + #[must_use] pub fn is_a_multiple_file_torrent(&self) -> bool { self.files.is_some() } @@ -90,18 +96,13 @@ pub struct Torrent { } impl Torrent { + #[must_use] pub fn from_db_info_files_and_announce_urls( torrent_info: DbTorrentInfo, torrent_files: Vec, torrent_announce_urls: Vec>, ) -> Self { - let private = if let Some(private_i64) = torrent_info.private { - // must fit in a byte - let private = if (0..256).contains(&private_i64) { private_i64 } else { 0 }; - Some(private as u8) - } else { - None - }; + let private = u8::try_from(torrent_info.private.unwrap_or(0)).ok(); // the info part of the torrent file let mut info = TorrentInfo { @@ -120,20 +121,27 @@ impl Torrent { if torrent_info.root_hash > 0 { info.root_hash = Some(torrent_info.pieces); } else { - let pieces = hex_to_bytes(&torrent_info.pieces).unwrap(); + let pieces = into_bytes(&torrent_info.pieces).expect("variable `torrent_info.pieces` is not a valid hex string"); info.pieces = Some(ByteBuf::from(pieces)); } // either set the single file or the multiple files information if torrent_files.len() == 1 { - // can safely unwrap because we know there is 1 element - let torrent_file = torrent_files.first().unwrap(); + let torrent_file = torrent_files + .first() + .expect("vector `torrent_files` should have at least one element"); info.md5sum = torrent_file.md5sum.clone(); info.length = Some(torrent_file.length); - let path = if torrent_file.path.first().as_ref().unwrap().is_empty() { + let path = if torrent_file + .path + .first() + .as_ref() + .expect("the vector for the `path` should have at least one element") + .is_empty() + { None } else { Some(torrent_file.path.clone()) @@ -170,8 +178,9 @@ impl Torrent { } } + #[must_use] pub fn calculate_info_hash_as_bytes(&self) -> [u8; 20] { - let info_bencoded = ser::to_bytes(&self.info).unwrap(); + let info_bencoded = ser::to_bytes(&self.info).expect("variable `info` was not able to be serialized."); let mut hasher = Sha1::new(); hasher.update(info_bencoded); let sum_hex = hasher.finalize(); @@ -180,15 +189,16 @@ impl Torrent { sum_bytes } + #[must_use] pub fn info_hash(&self) -> String { - bytes_to_hex(&self.calculate_info_hash_as_bytes()) + from_bytes(&self.calculate_info_hash_as_bytes()) } + #[must_use] pub fn file_size(&self) -> i64 { - if self.info.length.is_some() { - self.info.length.unwrap() - } else { - match &self.info.files { + match self.info.length { + Some(length) => length, + None => match &self.info.files { None => 0, Some(files) => { let mut file_size = 0; @@ -197,32 +207,30 @@ impl Torrent { } file_size } - } + }, } } + #[must_use] pub fn announce_urls(&self) -> Vec { - if self.announce_list.is_none() { - return vec![self.announce.clone().unwrap()]; + match &self.announce_list { + Some(list) => list.clone().into_iter().flatten().collect::>(), + None => vec![self.announce.clone().expect("variable `announce` should not be None")], } - - self.announce_list - .clone() - .unwrap() - .into_iter() - .flatten() - .collect::>() } + #[must_use] pub fn is_a_single_file_torrent(&self) -> bool { self.info.is_a_single_file_torrent() } + #[must_use] pub fn is_a_multiple_file_torrent(&self) -> bool { self.info.is_a_multiple_file_torrent() } } +#[allow(clippy::module_name_repetitions)] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, sqlx::FromRow)] pub struct DbTorrentFile { pub path: Option, diff --git a/src/models/user.rs b/src/models/user.rs index 9a500d4d..f808c87a 100644 --- a/src/models/user.rs +++ b/src/models/user.rs @@ -8,12 +8,14 @@ pub struct User { pub administrator: bool, } +#[allow(clippy::module_name_repetitions)] #[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] pub struct UserAuthentication { pub user_id: i64, pub password_hash: String, } +#[allow(clippy::module_name_repetitions)] #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, sqlx::FromRow)] pub struct UserProfile { pub user_id: i64, @@ -24,6 +26,7 @@ pub struct UserProfile { pub avatar: String, } +#[allow(clippy::module_name_repetitions)] #[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] pub struct UserCompact { pub user_id: i64, @@ -31,6 +34,7 @@ pub struct UserCompact { pub administrator: bool, } +#[allow(clippy::module_name_repetitions)] #[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] pub struct UserFull { pub user_id: i64, @@ -44,6 +48,7 @@ pub struct UserFull { pub avatar: String, } +#[allow(clippy::module_name_repetitions)] #[derive(Debug, Serialize, Deserialize, Clone)] pub struct UserClaims { pub user: UserCompact, diff --git a/src/routes/about.rs b/src/routes/about.rs index 2a632c85..c5b81d2d 100644 --- a/src/routes/about.rs +++ b/src/routes/about.rs @@ -3,11 +3,11 @@ use actix_web::{web, HttpResponse, Responder}; use crate::errors::ServiceResult; -pub fn init_routes(cfg: &mut web::ServiceConfig) { +pub fn init(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("/about") - .service(web::resource("").route(web::get().to(get_about))) - .service(web::resource("/license").route(web::get().to(get_license))), + .service(web::resource("").route(web::get().to(get))) + .service(web::resource("/license").route(web::get().to(license))), ); } @@ -29,7 +29,13 @@ const ABOUT: &str = r#" "#; -pub async fn get_about() -> ServiceResult { +/// Get About Section HTML +/// +/// # Errors +/// +/// This function will not return an error. +#[allow(clippy::unused_async)] +pub async fn get() -> ServiceResult { Ok(HttpResponse::build(StatusCode::OK) .content_type("text/html; charset=utf-8") .body(ABOUT)) @@ -63,7 +69,13 @@ const LICENSE: &str = r#" "#; -pub async fn get_license() -> ServiceResult { +/// Get the License in HTML +/// +/// # Errors +/// +/// This function will not return an error. +#[allow(clippy::unused_async)] +pub async fn license() -> ServiceResult { Ok(HttpResponse::build(StatusCode::OK) .content_type("text/html; charset=utf-8") .body(LICENSE)) diff --git a/src/routes/category.rs b/src/routes/category.rs index 823c267e..865d233d 100644 --- a/src/routes/category.rs +++ b/src/routes/category.rs @@ -5,18 +5,23 @@ use crate::common::WebAppData; use crate::errors::{ServiceError, ServiceResult}; use crate::models::response::OkResponse; -pub fn init_routes(cfg: &mut web::ServiceConfig) { +pub fn init(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("/category").service( web::resource("") - .route(web::get().to(get_categories)) - .route(web::post().to(add_category)) - .route(web::delete().to(delete_category)), + .route(web::get().to(get)) + .route(web::post().to(add)) + .route(web::delete().to(delete)), ), ); } -pub async fn get_categories(app_data: WebAppData) -> ServiceResult { +/// Gets the Categories +/// +/// # Errors +/// +/// This function will return an error if there is a database error. +pub async fn get(app_data: WebAppData) -> ServiceResult { let categories = app_data.database.get_categories().await?; Ok(HttpResponse::Ok().json(OkResponse { data: categories })) @@ -28,7 +33,13 @@ pub struct Category { pub icon: Option, } -pub async fn add_category(req: HttpRequest, payload: web::Json, app_data: WebAppData) -> ServiceResult { +/// Adds a New Category +/// +/// # Errors +/// +/// This function will return an error if unable to get user. +/// This function will return an error if unable to insert into the database the new category. +pub async fn add(req: HttpRequest, payload: web::Json, app_data: WebAppData) -> ServiceResult { // check for user let user = app_data.auth.get_user_compact_from_request(&req).await?; @@ -44,11 +55,13 @@ pub async fn add_category(req: HttpRequest, payload: web::Json, app_da })) } -pub async fn delete_category( - req: HttpRequest, - payload: web::Json, - app_data: WebAppData, -) -> ServiceResult { +/// Deletes a Category +/// +/// # Errors +/// +/// This function will return an error if unable to get user. +/// This function will return an error if unable to delete the category from the database. +pub async fn delete(req: HttpRequest, payload: web::Json, app_data: WebAppData) -> ServiceResult { // code-review: why do we need to send the whole category object to delete it? // And we should use the ID instead of the name, because the name could change // or we could add support for multiple languages. diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 946e776f..ce833698 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -8,12 +8,12 @@ pub mod settings; pub mod torrent; pub mod user; -pub fn init_routes(cfg: &mut web::ServiceConfig) { - user::init_routes(cfg); - torrent::init_routes(cfg); - category::init_routes(cfg); - settings::init_routes(cfg); - about::init_routes(cfg); - proxy::init_routes(cfg); - root::init_routes(cfg); +pub fn init(cfg: &mut web::ServiceConfig) { + user::init(cfg); + torrent::init(cfg); + category::init(cfg); + settings::init(cfg); + about::init(cfg); + proxy::init(cfg); + root::init(cfg); } diff --git a/src/routes/proxy.rs b/src/routes/proxy.rs index 443900df..c61b9326 100644 --- a/src/routes/proxy.rs +++ b/src/routes/proxy.rs @@ -26,7 +26,7 @@ const ERROR_IMAGE_TOO_BIG_TEXT: &str = "Image is too big."; const ERROR_IMAGE_USER_QUOTA_MET_TEXT: &str = "Image proxy quota met."; const ERROR_IMAGE_UNAUTHENTICATED_TEXT: &str = "Sign in to see image."; -pub fn init_routes(cfg: &mut web::ServiceConfig) { +pub fn init(cfg: &mut web::ServiceConfig) { cfg.service(web::scope("/proxy").service(web::resource("/image/{url}").route(web::get().to(get_proxy_image)))); load_error_images(); @@ -53,6 +53,11 @@ fn load_error_images() { }); } +/// Get the proxy image. +/// +/// # Errors +/// +/// This function will return `Ok` only for now. pub async fn get_proxy_image(req: HttpRequest, app_data: WebAppData, path: web::Path) -> ServiceResult { // Check for optional user. let opt_user = app_data.auth.get_user_compact_from_request(&req).await.ok(); diff --git a/src/routes/root.rs b/src/routes/root.rs index 69f11fd6..ffeb1ed4 100644 --- a/src/routes/root.rs +++ b/src/routes/root.rs @@ -2,6 +2,6 @@ use actix_web::web; use crate::routes::about; -pub fn init_routes(cfg: &mut web::ServiceConfig) { - cfg.service(web::scope("/").service(web::resource("").route(web::get().to(about::get_about)))); +pub fn init(cfg: &mut web::ServiceConfig) { + cfg.service(web::scope("/").service(web::resource("").route(web::get().to(about::get)))); } diff --git a/src/routes/settings.rs b/src/routes/settings.rs index e2b6849f..a5c34e04 100644 --- a/src/routes/settings.rs +++ b/src/routes/settings.rs @@ -1,24 +1,25 @@ use actix_web::{web, HttpRequest, HttpResponse, Responder}; use crate::common::WebAppData; -use crate::config::AppConfiguration; +use crate::config; use crate::errors::{ServiceError, ServiceResult}; use crate::models::response::OkResponse; -pub fn init_routes(cfg: &mut web::ServiceConfig) { +pub fn init(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("/settings") - .service( - web::resource("") - .route(web::get().to(get_settings)) - .route(web::post().to(update_settings_handler)), - ) - .service(web::resource("/name").route(web::get().to(get_site_name))) - .service(web::resource("/public").route(web::get().to(get_public_settings))), + .service(web::resource("").route(web::get().to(get)).route(web::post().to(update))) + .service(web::resource("/name").route(web::get().to(site_name))) + .service(web::resource("/public").route(web::get().to(get_public))), ); } -pub async fn get_settings(req: HttpRequest, app_data: WebAppData) -> ServiceResult { +/// Get Settings +/// +/// # Errors +/// +/// This function will return an error if unable to get user from database. +pub async fn get(req: HttpRequest, app_data: WebAppData) -> ServiceResult { // check for user let user = app_data.auth.get_user_compact_from_request(&req).await?; @@ -27,18 +28,28 @@ pub async fn get_settings(req: HttpRequest, app_data: WebAppData) -> ServiceResu return Err(ServiceError::Unauthorized); } - let settings: tokio::sync::RwLockReadGuard = app_data.cfg.settings.read().await; + let settings: tokio::sync::RwLockReadGuard = app_data.cfg.settings.read().await; Ok(HttpResponse::Ok().json(OkResponse { data: &*settings })) } -pub async fn get_public_settings(app_data: WebAppData) -> ServiceResult { +/// Get Public Settings +/// +/// # Errors +/// +/// This function should not return an error. +pub async fn get_public(app_data: WebAppData) -> ServiceResult { let public_settings = app_data.cfg.get_public().await; Ok(HttpResponse::Ok().json(OkResponse { data: public_settings })) } -pub async fn get_site_name(app_data: WebAppData) -> ServiceResult { +/// Get Name of Website +/// +/// # Errors +/// +/// This function should not return an error. +pub async fn site_name(app_data: WebAppData) -> ServiceResult { let settings = app_data.cfg.settings.read().await; Ok(HttpResponse::Ok().json(OkResponse { @@ -55,9 +66,9 @@ pub async fn get_site_name(app_data: WebAppData) -> ServiceResult, + payload: web::Json, app_data: WebAppData, ) -> ServiceResult { // check for user diff --git a/src/routes/torrent.rs b/src/routes/torrent.rs index 293e31b5..0f121fd4 100644 --- a/src/routes/torrent.rs +++ b/src/routes/torrent.rs @@ -17,45 +17,50 @@ use crate::models::torrent::TorrentRequest; use crate::utils::parse_torrent; use crate::AsCSV; -pub fn init_routes(cfg: &mut web::ServiceConfig) { +pub fn init(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("/torrent") - .service(web::resource("/upload").route(web::post().to(upload_torrent))) + .service(web::resource("/upload").route(web::post().to(upload))) .service(web::resource("/download/{info_hash}").route(web::get().to(download_torrent_handler))) .service( web::resource("/{info_hash}") - .route(web::get().to(get_torrent_handler)) - .route(web::put().to(update_torrent_handler)) - .route(web::delete().to(delete_torrent_handler)), + .route(web::get().to(get)) + .route(web::put().to(update)) + .route(web::delete().to(delete)), ), ); cfg.service(web::scope("/torrents").service(web::resource("").route(web::get().to(get_torrents_handler)))); } #[derive(FromRow)] -pub struct TorrentCount { +pub struct Count { pub count: i32, } #[derive(Debug, Deserialize)] -pub struct CreateTorrent { +pub struct Create { pub title: String, pub description: String, pub category: String, } -impl CreateTorrent { +impl Create { + /// Returns the verify of this [`Create`]. + /// + /// # Errors + /// + /// This function will return an `BadRequest` error if the `title` or the `category` is empty. pub fn verify(&self) -> Result<(), ServiceError> { - if !self.title.is_empty() && !self.category.is_empty() { - return Ok(()); + if self.title.is_empty() || self.category.is_empty() { + Err(ServiceError::BadRequest) + } else { + Ok(()) } - - Err(ServiceError::BadRequest) } } #[derive(Debug, Deserialize)] -pub struct TorrentSearch { +pub struct Search { page_size: Option, page: Option, sort: Option, @@ -65,12 +70,21 @@ pub struct TorrentSearch { } #[derive(Debug, Deserialize)] -pub struct TorrentUpdate { +pub struct Update { title: Option, description: Option, } -pub async fn upload_torrent(req: HttpRequest, payload: Multipart, app_data: WebAppData) -> ServiceResult { +/// Upload a Torrent to the Index +/// +/// # Errors +/// +/// This function will return an error if unable to get the user from the database. +/// This function will return an error if unable to get torrent request from payload. +/// This function will return an error if unable to get the category from the database. +/// This function will return an error if unable to insert the torrent into the database. +/// This function will return an error if unable to add the torrent to the whitelist. +pub async fn upload(req: HttpRequest, payload: Multipart, app_data: WebAppData) -> ServiceResult { let user = app_data.auth.get_user_compact_from_request(&req).await?; // get torrent and fields from request @@ -166,7 +180,17 @@ pub async fn download_torrent_handler(req: HttpRequest, app_data: WebAppData) -> Ok(HttpResponse::Ok().content_type("application/x-bittorrent").body(buffer)) } -pub async fn get_torrent_handler(req: HttpRequest, app_data: WebAppData) -> ServiceResult { +/// Get Torrent from the Index +/// +/// # Errors +/// +/// This function will return an error if unable to get torrent ID. +/// This function will return an error if unable to get torrent listing from id. +/// This function will return an error if unable to get torrent category from id. +/// This function will return an error if unable to get torrent files from id. +/// This function will return an error if unable to get torrent info from id. +/// This function will return an error if unable to get torrent announce url(s) from id. +pub async fn get(req: HttpRequest, app_data: WebAppData) -> ServiceResult { // optional let user = app_data.auth.get_user_compact_from_request(&req).await; @@ -251,11 +275,16 @@ pub async fn get_torrent_handler(req: HttpRequest, app_data: WebAppData) -> Serv Ok(HttpResponse::Ok().json(OkResponse { data: torrent_response })) } -pub async fn update_torrent_handler( - req: HttpRequest, - payload: web::Json, - app_data: WebAppData, -) -> ServiceResult { +/// Update a Torrent in the Index +/// +/// # Errors +/// +/// This function will return an error if unable to get user. +/// This function will return an error if unable to get torrent id from request. +/// This function will return an error if unable to get listing from id. +/// This function will return an `ServiceError::Unauthorized` if user is not a owner or an administrator. +/// This function will return an error if unable to update the torrent tile or description. +pub async fn update(req: HttpRequest, payload: web::Json, app_data: WebAppData) -> ServiceResult { let user = app_data.auth.get_user_compact_from_request(&req).await?; let infohash = get_torrent_infohash_from_request(&req)?; @@ -293,7 +322,15 @@ pub async fn update_torrent_handler( Ok(HttpResponse::Ok().json(OkResponse { data: torrent_response })) } -pub async fn delete_torrent_handler(req: HttpRequest, app_data: WebAppData) -> ServiceResult { +/// Delete a Torrent from the Index +/// +/// # Errors +/// +/// This function will return an error if unable to get the user. +/// This function will return an `ServiceError::Unauthorized` if the user is not an administrator. +/// This function will return an error if unable to get the torrent listing from it's ID. +/// This function will return an error if unable to delete the torrent from the database. +pub async fn delete(req: HttpRequest, app_data: WebAppData) -> ServiceResult { let user = app_data.auth.get_user_compact_from_request(&req).await?; // check if user is administrator @@ -327,7 +364,7 @@ pub async fn delete_torrent_handler(req: HttpRequest, app_data: WebAppData) -> S /// # Errors /// /// Returns a `ServiceError::DatabaseError` if the database query fails. -pub async fn get_torrents_handler(params: Query, app_data: WebAppData) -> ServiceResult { +pub async fn get_torrents_handler(params: Query, app_data: WebAppData) -> ServiceResult { let settings = app_data.cfg.settings.read().await; let sort = params.sort.unwrap_or(Sorting::UploadedDesc); @@ -370,9 +407,9 @@ async fn get_torrent_request_from_payload(mut payload: Multipart) -> Result Result Result, app_data: WebAppData) -> ServiceResult { info!("registering user: {}", payload.username); @@ -60,7 +73,7 @@ pub async fn register(req: HttpRequest, mut payload: web::Json, app_da } } EmailOnSignup::None => payload.email = None, - _ => {} + EmailOnSignup::Optional => {} } if let Some(email) = &payload.email { @@ -96,7 +109,7 @@ pub async fn register(req: HttpRequest, mut payload: web::Json, app_da return Err(ServiceError::UsernameInvalid); } - let email = payload.email.as_ref().unwrap_or(&"".to_string()).to_string(); + let email = payload.email.as_ref().unwrap_or(&String::new()).to_string(); let user_id = app_data .database @@ -108,13 +121,13 @@ pub async fn register(req: HttpRequest, mut payload: web::Json, app_da let _ = app_data.database.grant_admin_role(user_id).await; } - let conn_info = req.connection_info(); + let conn_info = req.connection_info().clone(); if settings.mail.email_verification_enabled && payload.email.is_some() { let mail_res = app_data .mailer .send_verification_mail( - payload.email.as_ref().unwrap(), + payload.email.as_ref().expect("variable `email` is checked above"), &payload.username, user_id, format!("{}://{}", conn_info.scheme(), conn_info.host()).as_str(), @@ -130,6 +143,15 @@ pub async fn register(req: HttpRequest, mut payload: web::Json, app_da Ok(HttpResponse::Ok()) } +/// Login user to Index +/// +/// # Errors +/// +/// This function will return a `ServiceError::WrongPasswordOrUsername` if unable to get user profile. +/// This function will return a `ServiceError::InternalServerError` if unable to get user authentication data from the user id. +/// This function will return an error if unable to verify the password. +/// This function will return a `ServiceError::EmailNotVerified` if the email should be, but is not verified. +/// This function will return an error if unable to get the user data from the database. pub async fn login(payload: web::Json, app_data: WebAppData) -> ServiceResult { // get the user profile from database let user_profile = app_data @@ -172,6 +194,11 @@ pub async fn login(payload: web::Json, app_data: WebAppData) -> ServiceRe } /// Verify if the user supplied and the database supplied passwords match +/// +/// # Errors +/// +/// This function will return an error if unable to parse password hash from the stored user authentication value. +/// This function will return a `ServiceError::WrongPasswordOrUsername` if unable to match the password with either `argon2id` or `pbkdf2-sha256`. pub fn verify_password(password: &[u8], user_authentication: &UserAuthentication) -> Result<(), ServiceError> { // wrap string of the hashed password into a PasswordHash struct for verification let parsed_hash = PasswordHash::new(&user_authentication.password_hash)?; @@ -195,6 +222,11 @@ pub fn verify_password(password: &[u8], user_authentication: &UserAuthentication } } +/// Verify a supplied JWT. +/// +/// # Errors +/// +/// This function will return an error if unable to verify the supplied payload as a valid jwt. pub async fn verify_token(payload: web::Json, app_data: WebAppData) -> ServiceResult { // verify if token is valid let _claims = app_data.auth.verify_jwt(&payload.token).await?; @@ -204,16 +236,22 @@ pub async fn verify_token(payload: web::Json, app_data: WebAppData) -> Se })) } +/// Renew a supplied JWT. +/// +/// # Errors +/// +/// This function will return an error if unable to verify the supplied payload as a valid jwt. +/// This function will return an error if unable to get user data from the database. pub async fn renew_token(payload: web::Json, app_data: WebAppData) -> ServiceResult { + const ONE_WEEK_IN_SECONDS: u64 = 604_800; + // verify if token is valid let claims = app_data.auth.verify_jwt(&payload.token).await?; let user_compact = app_data.database.get_user_compact_from_id(claims.user.user_id).await?; - const ONE_WEEK_IN_SECONDS: u64 = 604_800; - // renew token if it is valid for less than one week - let token = match claims.exp - current_time() { + let token = match claims.exp - clock::now() { x if x < ONE_WEEK_IN_SECONDS => app_data.auth.sign_jwt(user_compact.clone()).await, _ => payload.token.clone(), }; @@ -229,7 +267,10 @@ pub async fn renew_token(payload: web::Json, app_data: WebAppData) -> Ser pub async fn verify_email(req: HttpRequest, app_data: WebAppData) -> String { let settings = app_data.cfg.settings.read().await; - let token = req.match_info().get("token").unwrap(); + let token = match req.match_info().get("token").ok_or(ServiceError::InternalServerError) { + Ok(token) => token, + Err(err) => return err.to_string(), + }; let token_data = match decode::( token, @@ -255,8 +296,16 @@ pub async fn verify_email(req: HttpRequest, app_data: WebAppData) -> String { String::from("Email verified, you can close this page.") } -// TODO: add reason and date_expiry parameters to request -pub async fn ban_user(req: HttpRequest, app_data: WebAppData) -> ServiceResult { +/// Ban a user from the Index +/// +/// TODO: add reason and `date_expiry` parameters to request +/// +/// # Errors +/// +/// This function will return a `ServiceError::InternalServerError` if unable get user from the request. +/// This function will return an error if unable to get user profile from supplied username. +/// This function will return an error if unable to ser the ban of the user in the database. +pub async fn ban(req: HttpRequest, app_data: WebAppData) -> ServiceResult { debug!("banning user"); let user = app_data.auth.get_user_compact_from_request(&req).await?; @@ -266,7 +315,7 @@ pub async fn ban_user(req: HttpRequest, app_data: WebAppData) -> ServiceResult Result<(), DatabaseError> { + pub async fn import_all_torrents_statistics(&self) -> Result<(), database::Error> { info!("Importing torrents statistics from tracker ..."); let torrents = self.database.get_all_torrents_compact().await?; diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v1_0_0.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v1_0_0.rs index 1f4987c6..ae15a037 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v1_0_0.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v1_0_0.rs @@ -1,8 +1,10 @@ +#![allow(clippy::missing_errors_doc)] + use serde::{Deserialize, Serialize}; use sqlx::sqlite::SqlitePoolOptions; use sqlx::{query_as, SqlitePool}; -use crate::databases::database::DatabaseError; +use crate::databases::database; #[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] pub struct CategoryRecordV1 { @@ -64,11 +66,11 @@ impl SqliteDatabaseV1_0_0 { Self { pool: db } } - pub async fn get_categories_order_by_id(&self) -> Result, DatabaseError> { + pub async fn get_categories_order_by_id(&self) -> Result, database::Error> { query_as::<_, CategoryRecordV1>("SELECT category_id, name FROM torrust_categories ORDER BY category_id ASC") .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } pub async fn get_users(&self) -> Result, sqlx::Error> { diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v2_0_0.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v2_0_0.rs index 9107356b..d054ca1c 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v2_0_0.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/databases/sqlite_v2_0_0.rs @@ -1,10 +1,12 @@ +#![allow(clippy::missing_errors_doc)] + use chrono::{DateTime, NaiveDateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::sqlite::{SqlitePoolOptions, SqliteQueryResult}; use sqlx::{query, query_as, SqlitePool}; use super::sqlite_v1_0_0::{TorrentRecordV1, UserRecordV1}; -use crate::databases::database::DatabaseError; +use crate::databases::database; use crate::models::torrent_file::{TorrentFile, TorrentInfo}; #[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] @@ -79,21 +81,21 @@ impl SqliteDatabaseV2_0_0 { .expect("Could not run database migrations."); } - pub async fn reset_categories_sequence(&self) -> Result { + pub async fn reset_categories_sequence(&self) -> Result { query("DELETE FROM `sqlite_sequence` WHERE `name` = 'torrust_categories'") .execute(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - pub async fn get_categories(&self) -> Result, DatabaseError> { + pub async fn get_categories(&self) -> Result, database::Error> { query_as::<_, CategoryRecordV2>("SELECT tc.category_id, tc.name, COUNT(tt.category_id) as num_torrents FROM torrust_categories tc LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id GROUP BY tc.name") .fetch_all(&self.pool) .await - .map_err(|_| DatabaseError::Error) + .map_err(|_| database::Error::Error) } - pub async fn insert_category_and_get_id(&self, category_name: &str) -> Result { + pub async fn insert_category_and_get_id(&self, category_name: &str) -> Result { query("INSERT INTO torrust_categories (name) VALUES (?)") .bind(category_name) .execute(&self.pool) @@ -102,12 +104,12 @@ impl SqliteDatabaseV2_0_0 { .map_err(|e| match e { sqlx::Error::Database(err) => { if err.message().contains("UNIQUE") { - DatabaseError::CategoryAlreadyExists + database::Error::CategoryAlreadyExists } else { - DatabaseError::Error + database::Error::Error } } - _ => DatabaseError::Error, + _ => database::Error::Error, }) } @@ -257,7 +259,8 @@ impl SqliteDatabaseV2_0_0 { .map(|v| v.last_insert_rowid()) } - pub async fn delete_all_database_rows(&self) -> Result<(), DatabaseError> { + #[allow(clippy::missing_panics_doc)] + pub async fn delete_all_database_rows(&self) -> Result<(), database::Error> { query("DELETE FROM torrust_categories").execute(&self.pool).await.unwrap(); query("DELETE FROM torrust_torrents").execute(&self.pool).await.unwrap(); diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs index f3d83d9b..4226a944 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::{CategoryRecordV2, SqliteDatabaseV2_0_0}; +#[allow(clippy::missing_panics_doc)] pub async fn transfer_categories(source_database: Arc, target_database: Arc) { println!("Transferring categories ..."); @@ -22,12 +23,11 @@ pub async fn transfer_categories(source_database: Arc, tar .await .unwrap(); - if id != cat.category_id { - panic!( - "Error copying category {:?} from source DB to the target DB", - &cat.category_id - ); - } + assert!( + id == cat.category_id, + "Error copying category {:?} from source DB to the target DB", + &cat.category_id + ); println!("[v2] category: {:?} {:?} added.", id, &cat.name); } diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs index 88a681f0..5e6f9656 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs @@ -1,3 +1,5 @@ +#![allow(clippy::missing_errors_doc)] + use std::sync::Arc; use std::{error, fs}; @@ -6,6 +8,8 @@ use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteData use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::{SqliteDatabaseV2_0_0, TorrentRecordV2}; use crate::utils::parse_torrent::decode_torrent; +#[allow(clippy::missing_panics_doc)] +#[allow(clippy::too_many_lines)] pub async fn transfer_torrents( source_database: Arc, target_database: Arc, @@ -29,21 +33,22 @@ pub async fn transfer_torrents( let uploader = source_database.get_user_by_username(&torrent.uploader).await.unwrap(); - if uploader.username != torrent.uploader { - panic!( - "Error copying torrent with id {:?}. + assert!( + uploader.username == torrent.uploader, + "Error copying torrent with id {:?}. Username (`uploader`) in `torrust_torrents` table does not match `username` in `torrust_users` table", - &torrent.torrent_id - ); - } + &torrent.torrent_id + ); let filepath = format!("{}/{}.torrent", upload_path, &torrent.torrent_id); let torrent_from_file_result = read_torrent_from_file(&filepath); - if torrent_from_file_result.is_err() { - panic!("Error torrent file not found: {:?}", &filepath); - } + assert!( + torrent_from_file_result.is_ok(), + "Error torrent file not found: {:?}", + &filepath + ); let torrent_from_file = torrent_from_file_result.unwrap(); @@ -52,12 +57,11 @@ pub async fn transfer_torrents( .await .unwrap(); - if id != torrent.torrent_id { - panic!( - "Error copying torrent {:?} from source DB to the target DB", - &torrent.torrent_id - ); - } + assert!( + id == torrent.torrent_id, + "Error copying torrent {:?} from source DB to the target DB", + &torrent.torrent_id + ); println!("[v2][torrust_torrents] torrent with id {:?} added.", &torrent.torrent_id); @@ -144,7 +148,7 @@ pub async fn transfer_torrents( .flatten() .collect::>(); - for tracker_url in announce_urls.iter() { + for tracker_url in &announce_urls { println!( "[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs index 51c451b0..88e8a1a2 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +#[allow(clippy::missing_panics_doc)] pub async fn transfer_tracker_keys(source_database: Arc, target_database: Arc) { println!("Transferring tracker keys ..."); @@ -28,12 +29,11 @@ pub async fn transfer_tracker_keys(source_database: Arc, t .await .unwrap(); - if id != tracker_key.key_id { - panic!( - "Error copying tracker key {:?} from source DB to the target DB", - &tracker_key.key_id - ); - } + assert!( + id == tracker_key.key_id, + "Error copying tracker key {:?} from source DB to the target DB", + &tracker_key.key_id + ); println!( "[v2][torrust_tracker_keys] tracker key with id {:?} added.", diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs index 76f5ff44..ca127f5a 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +#[allow(clippy::missing_panics_doc)] pub async fn transfer_users( source_database: Arc, target_database: Arc, @@ -27,9 +28,11 @@ pub async fn transfer_users( .await .unwrap(); - if id != user.user_id { - panic!("Error copying user {:?} from source DB to the target DB", &user.user_id); - } + assert!( + id == user.user_id, + "Error copying user {:?} from source DB to the target DB", + &user.user_id + ); println!("[v2][torrust_users] user: {:?} {:?} added.", &user.user_id, &user.username); diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs index 0cc0ea53..d724ffb7 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs @@ -15,7 +15,7 @@ use std::env; use std::time::SystemTime; use chrono::prelude::{DateTime, Utc}; -use text_colorizer::*; +use text_colorizer::Colorize; use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::{current_db, migrate_target_database, new_db, reset_target_database}; use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::category_transferrer::transfer_categories; @@ -67,7 +67,7 @@ fn parse_args() -> Arguments { } } -pub async fn run_upgrader() { +pub async fn run() { let now = datetime_iso_8601(); upgrade(&parse_args(), &now).await; } @@ -102,6 +102,7 @@ pub async fn upgrade(args: &Arguments, date_imported: &str) { /// Current datetime in ISO8601 without time zone. /// For example: 2022-11-10 10:35:15 +#[must_use] pub fn datetime_iso_8601() -> String { let dt: DateTime = SystemTime::now().into(); format!("{}", dt.format("%Y-%m-%d %H:%M:%S")) diff --git a/src/utils/clock.rs b/src/utils/clock.rs index 6ba681a5..4c4f0bf0 100644 --- a/src/utils/clock.rs +++ b/src/utils/clock.rs @@ -1,4 +1,4 @@ #[must_use] -pub fn current_time() -> u64 { +pub fn now() -> u64 { u64::try_from(chrono::prelude::Utc::now().timestamp()).expect("timestamp should be positive") } diff --git a/src/utils/hex.rs b/src/utils/hex.rs index 7903c741..be8e82f5 100644 --- a/src/utils/hex.rs +++ b/src/utils/hex.rs @@ -1,17 +1,23 @@ use std::fmt::Write; use std::num::ParseIntError; -pub fn bytes_to_hex(bytes: &[u8]) -> String { +#[must_use] +pub fn from_bytes(bytes: &[u8]) -> String { let mut s = String::with_capacity(2 * bytes.len()); for byte in bytes { - write!(s, "{:02X}", byte).unwrap(); + write!(s, "{byte:02X}").unwrap(); } s } -pub fn hex_to_bytes(s: &str) -> Result, ParseIntError> { +/// Encodes a String into Hex Bytes +/// +/// # Errors +/// +/// This function will return an error if unable to encode into Hex +pub fn into_bytes(s: &str) -> Result, ParseIntError> { (0..s.len()) .step_by(2) .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) diff --git a/src/utils/parse_torrent.rs b/src/utils/parse_torrent.rs index e272ede8..9ac4b44f 100644 --- a/src/utils/parse_torrent.rs +++ b/src/utils/parse_torrent.rs @@ -4,21 +4,31 @@ use serde_bencode::{de, Error}; use crate::models::torrent_file::Torrent; +/// Decode a Torrent from Bencoded Bytes +/// +/// # Errors +/// +/// This function will return an error if unable to parse bytes into torrent. pub fn decode_torrent(bytes: &[u8]) -> Result> { match de::from_bytes::(bytes) { Ok(torrent) => Ok(torrent), Err(e) => { - println!("{:?}", e); + println!("{e:?}"); Err(e.into()) } } } +/// Encode a Torrent into Bencoded Bytes +/// +/// # Errors +/// +/// This function will return an error if unable to bencode torrent. pub fn encode_torrent(torrent: &Torrent) -> Result, Error> { match serde_bencode::to_bytes(torrent) { Ok(bencode_bytes) => Ok(bencode_bytes), Err(e) => { - eprintln!("{:?}", e); + eprintln!("{e:?}"); Err(e) } } diff --git a/src/utils/regex.rs b/src/utils/regex.rs index 4c5b55ff..f423fdaf 100644 --- a/src/utils/regex.rs +++ b/src/utils/regex.rs @@ -1,7 +1,9 @@ use regex::Regex; +#[must_use] pub fn validate_email_address(email_address_to_be_checked: &str) -> bool { - let email_regex = Regex::new(r"^([a-z\d_+]([a-z\d_+.]*[a-z\d_+])?)@([a-z\d]+([\-.][a-z\d]+)*\.[a-z]{2,6})").unwrap(); + let email_regex = Regex::new(r"^([a-z\d_+]([a-z\d_+.]*[a-z\d_+])?)@([a-z\d]+([\-.][a-z\d]+)*\.[a-z]{2,6})") + .expect("regex failed to compile"); email_regex.is_match(email_address_to_be_checked) } @@ -26,6 +28,6 @@ mod tests { assert!(validate_email_address("test@torrust.com")); - assert!(validate_email_address("t@torrust.org")) + assert!(validate_email_address("t@torrust.org")); } } diff --git a/tests/common/contexts/settings/mod.rs b/tests/common/contexts/settings/mod.rs index 4e4f1643..604297f4 100644 --- a/tests/common/contexts/settings/mod.rs +++ b/tests/common/contexts/settings/mod.rs @@ -3,9 +3,8 @@ pub mod responses; use serde::{Deserialize, Serialize}; use torrust_index_backend::config::{ - Api as DomainApi, AppConfiguration as DomainSettings, Auth as DomainAuth, Database as DomainDatabase, - ImageCache as DomainImageCache, Mail as DomainMail, Network as DomainNetwork, Tracker as DomainTracker, - Website as DomainWebsite, + Api as DomainApi, Auth as DomainAuth, Database as DomainDatabase, ImageCache as DomainImageCache, Mail as DomainMail, + Network as DomainNetwork, TorrustBackend as DomainSettings, Tracker as DomainTracker, Website as DomainWebsite, }; #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] diff --git a/tests/databases/mod.rs b/tests/databases/mod.rs index c5a03519..22d83c5e 100644 --- a/tests/databases/mod.rs +++ b/tests/databases/mod.rs @@ -1,15 +1,16 @@ use std::future::Future; -use torrust_index_backend::databases::database::{connect_database, Database}; +use torrust_index_backend::databases::database; +use torrust_index_backend::databases::database::Database; mod mysql; mod sqlite; mod tests; // used to run tests with a clean database -async fn run_test<'a, T, F>(db_fn: T, db: &'a Box) +async fn run_test<'a, T, F, DB: Database + ?Sized>(db_fn: T, db: &'a DB) where - T: FnOnce(&'a Box) -> F + 'a, + T: FnOnce(&'a DB) -> F + 'a, F: Future, { // cleanup database before testing @@ -21,13 +22,15 @@ where // runs all tests pub async fn run_tests(db_path: &str) { - let db_res = connect_database(db_path).await; + let db_res = database::connect(db_path).await; assert!(db_res.is_ok()); - let db = db_res.unwrap(); + let db_boxed = db_res.unwrap(); - run_test(tests::it_can_add_a_user, &db).await; - run_test(tests::it_can_add_a_torrent_category, &db).await; - run_test(tests::it_can_add_a_torrent_and_tracker_stats_to_that_torrent, &db).await; + let db: &dyn Database = db_boxed.as_ref(); + + run_test(tests::it_can_add_a_user, db).await; + run_test(tests::it_can_add_a_torrent_category, db).await; + run_test(tests::it_can_add_a_torrent_and_tracker_stats_to_that_torrent, db).await; } diff --git a/tests/databases/tests.rs b/tests/databases/tests.rs index d74088ff..98c24d60 100644 --- a/tests/databases/tests.rs +++ b/tests/databases/tests.rs @@ -1,5 +1,6 @@ use serde_bytes::ByteBuf; -use torrust_index_backend::databases::database::{Database, DatabaseError}; +use torrust_index_backend::databases::database; +use torrust_index_backend::databases::database::Database; use torrust_index_backend::models::torrent::TorrentListing; use torrust_index_backend::models::torrent_file::{Torrent, TorrentInfo}; use torrust_index_backend::models::user::UserProfile; @@ -19,16 +20,16 @@ const TEST_TORRENT_FILE_SIZE: i64 = 128_000; const TEST_TORRENT_SEEDERS: i64 = 437; const TEST_TORRENT_LEECHERS: i64 = 1289; -async fn add_test_user(db: &Box) -> Result { +async fn add_test_user(db: &T) -> Result { db.insert_user_and_get_id(TEST_USER_USERNAME, TEST_USER_EMAIL, TEST_USER_PASSWORD) .await } -async fn add_test_torrent_category(db: &Box) -> Result { +async fn add_test_torrent_category(db: &T) -> Result { db.insert_category_and_get_id(TEST_CATEGORY_NAME).await } -pub async fn it_can_add_a_user(db: &Box) { +pub async fn it_can_add_a_user(db: &T) { let add_test_user_result = add_test_user(db).await; assert!(add_test_user_result.is_ok()); @@ -56,7 +57,7 @@ pub async fn it_can_add_a_user(db: &Box) { ); } -pub async fn it_can_add_a_torrent_category(db: &Box) { +pub async fn it_can_add_a_torrent_category(db: &T) { let add_test_torrent_category_result = add_test_torrent_category(db).await; assert!(add_test_torrent_category_result.is_ok()); @@ -70,7 +71,7 @@ pub async fn it_can_add_a_torrent_category(db: &Box) { assert_eq!(category.name, TEST_CATEGORY_NAME.to_string()); } -pub async fn it_can_add_a_torrent_and_tracker_stats_to_that_torrent(db: &Box) { +pub async fn it_can_add_a_torrent_and_tracker_stats_to_that_torrent(db: &T) { // set pre-conditions let user_id = add_test_user(db).await.expect("add_test_user failed."); let torrent_category_id = add_test_torrent_category(db) @@ -81,7 +82,7 @@ pub async fn it_can_add_a_torrent_and_tracker_stats_to_that_torrent(db: &Box LoggedInUserData { let user = new_logged_in_user(env).await; let database = Arc::new( - connect_database(&env.database_connect_url().unwrap()) + database::connect(&env.database_connect_url().unwrap()) .await .expect("Database error."), ); diff --git a/tests/environments/app_starter.rs b/tests/environments/app_starter.rs index 55cbb355..251f0481 100644 --- a/tests/environments/app_starter.rs +++ b/tests/environments/app_starter.rs @@ -2,12 +2,12 @@ use std::net::SocketAddr; use log::info; use tokio::sync::{oneshot, RwLock}; -use torrust_index_backend::app; -use torrust_index_backend::config::{AppConfiguration, Configuration}; +use torrust_index_backend::config::Configuration; +use torrust_index_backend::{app, config}; /// It launches the app and provides a way to stop it. pub struct AppStarter { - configuration: AppConfiguration, + configuration: config::TorrustBackend, config_path: Option, /// The application binary state (started or not): /// - `None`: if the app is not started, @@ -17,7 +17,7 @@ pub struct AppStarter { impl AppStarter { #[must_use] - pub fn with_custom_configuration(configuration: AppConfiguration, config_path: Option) -> Self { + pub fn with_custom_configuration(configuration: config::TorrustBackend, config_path: Option) -> Self { Self { configuration, config_path, @@ -75,7 +75,7 @@ impl AppStarter { } #[must_use] - pub fn server_configuration(&self) -> AppConfiguration { + pub fn server_configuration(&self) -> config::TorrustBackend { self.configuration.clone() } diff --git a/tests/environments/isolated.rs b/tests/environments/isolated.rs index 943497ee..e619e191 100644 --- a/tests/environments/isolated.rs +++ b/tests/environments/isolated.rs @@ -1,5 +1,6 @@ use tempfile::TempDir; -use torrust_index_backend::config::{AppConfiguration, FREE_PORT}; +use torrust_index_backend::config; +use torrust_index_backend::config::FREE_PORT; use super::app_starter::AppStarter; use crate::common::random; @@ -44,7 +45,7 @@ impl TestEnv { /// Provides the whole server configuration. #[must_use] - pub fn server_configuration(&self) -> AppConfiguration { + pub fn server_configuration(&self) -> config::TorrustBackend { self.app_starter.server_configuration() } @@ -67,8 +68,8 @@ impl Default for TestEnv { } /// Provides a configuration with ephemeral data for testing. -fn ephemeral(temp_dir: &TempDir) -> AppConfiguration { - let mut configuration = AppConfiguration::default(); +fn ephemeral(temp_dir: &TempDir) -> config::TorrustBackend { + let mut configuration = config::TorrustBackend::default(); // Ephemeral API port configuration.net.port = FREE_PORT; diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs index fa1adc92..2b8dd1c4 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs @@ -1,3 +1,5 @@ +#![allow(clippy::missing_errors_doc)] + use std::fs; use sqlx::sqlite::SqlitePoolOptions; @@ -63,6 +65,7 @@ impl SqliteDatabaseV1_0_0 { .map(|v| v.last_insert_rowid()) } + #[allow(clippy::missing_panics_doc)] pub async fn delete_all_categories(&self) -> Result<(), sqlx::Error> { query("DELETE FROM torrust_categories").execute(&self.pool).await.unwrap(); Ok(()) diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs index 8d863c10..eff4187e 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs @@ -1,3 +1,5 @@ +#![allow(clippy::missing_errors_doc)] + use serde::{Deserialize, Serialize}; use sqlx::sqlite::SqlitePoolOptions; use sqlx::{query_as, SqlitePool}; diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/category_transferrer_tester.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/category_transferrer_tester.rs index c10f93b8..86a9464c 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/category_transferrer_tester.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/category_transferrer_tester.rs @@ -39,6 +39,7 @@ impl CategoryTester { self.test_data.categories[0].category_id } + #[allow(clippy::missing_panics_doc)] /// Table `torrust_categories` pub async fn load_data_into_source_db(&self) { // Delete categories added by migrations @@ -46,10 +47,11 @@ impl CategoryTester { // Add test categories for categories in &self.test_data.categories { - self.source_database.insert_category(&categories).await.unwrap(); + self.source_database.insert_category(categories).await.unwrap(); } } + #[allow(clippy::missing_panics_doc)] /// Table `torrust_categories` pub async fn assert_data_in_target_db(&self) { for categories in &self.test_data.categories { diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/torrent_transferrer_tester.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/torrent_transferrer_tester.rs index 86bd1e52..ecc3511c 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/torrent_transferrer_tester.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/torrent_transferrer_tester.rs @@ -41,8 +41,8 @@ impl TorrentTester { " .to_string(), ), - upload_date: 1667546358, // 2022-11-04 07:19:18 - file_size: 9219566, + upload_date: 1_667_546_358, // 2022-11-04 07:19:18 + file_size: 9_219_566, seeders: 0, leechers: 0, }; @@ -61,8 +61,8 @@ impl TorrentTester { " .to_string(), ), - upload_date: 1667546358, // 2022-11-04 07:19:18 - file_size: 9219566, + upload_date: 1_667_546_358, // 2022-11-04 07:19:18 + file_size: 9_219_566, seeders: 0, leechers: 0, }; @@ -77,26 +77,28 @@ impl TorrentTester { } } + #[allow(clippy::missing_panics_doc)] pub async fn load_data_into_source_db(&self) { for torrent in &self.test_data.torrents { - self.source_database.insert_torrent(&torrent).await.unwrap(); + self.source_database.insert_torrent(torrent).await.unwrap(); } } + #[allow(clippy::missing_panics_doc)] pub async fn assert_data_in_target_db(&self, upload_path: &str) { for torrent in &self.test_data.torrents { - let filepath = self.torrent_file_path(upload_path, torrent.torrent_id); + let filepath = Self::torrent_file_path(upload_path, torrent.torrent_id); let torrent_file = read_torrent_from_file(&filepath).unwrap(); - self.assert_torrent(&torrent, &torrent_file).await; - self.assert_torrent_info(&torrent).await; - self.assert_torrent_announce_urls(&torrent, &torrent_file).await; - self.assert_torrent_files(&torrent, &torrent_file).await; + self.assert_torrent(torrent, &torrent_file).await; + self.assert_torrent_info(torrent).await; + self.assert_torrent_announce_urls(torrent, &torrent_file).await; + self.assert_torrent_files(torrent, &torrent_file).await; } } - pub fn torrent_file_path(&self, upload_path: &str, torrent_id: i64) -> String { + pub fn torrent_file_path(upload_path: &str, torrent_id: i64) -> String { format!("{}/{}.torrent", &upload_path, &torrent_id) } diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/tracker_key_transferrer_tester.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/tracker_key_transferrer_tester.rs index e50ac861..0c212720 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/tracker_key_transferrer_tester.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/tracker_key_transferrer_tester.rs @@ -21,7 +21,7 @@ impl TrackerKeyTester { key_id: 1, user_id, key: "rRstSTM5rx0sgxjLkRSJf3rXODcRBI5T".to_string(), - valid_until: 2456956800, // 11-10-2047 00:00:00 UTC + valid_until: 2_456_956_800, // 11-10-2047 00:00:00 UTC }; Self { @@ -31,6 +31,7 @@ impl TrackerKeyTester { } } + #[allow(clippy::missing_panics_doc)] pub async fn load_data_into_source_db(&self) { self.source_database .insert_tracker_key(&self.test_data.tracker_key) @@ -38,6 +39,7 @@ impl TrackerKeyTester { .unwrap(); } + #[allow(clippy::missing_panics_doc)] /// Table `torrust_tracker_keys` pub async fn assert_data_in_target_db(&self) { let imported_key = self diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/user_transferrer_tester.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/user_transferrer_tester.rs index 2d52a683..6ba97ef8 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/user_transferrer_tester.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/transferrer_testers/user_transferrer_tester.rs @@ -42,6 +42,7 @@ impl UserTester { } } + #[allow(clippy::missing_panics_doc)] pub async fn load_data_into_source_db(&self) { self.source_database.insert_user(&self.test_data.user).await.unwrap(); } diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs index 9e207b22..750a19e1 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs @@ -1,6 +1,6 @@ //! You can run this test with: //! -//! //! ```text +//! ```text //! cargo test upgrades_data_from_version_v1_0_0_to_v2_0_0 //! ``` //! @@ -38,8 +38,8 @@ impl Default for TestConfig { let fixtures_dir = "./tests/upgrades/from_v1_0_0_to_v2_0_0/fixtures/".to_string(); let upload_path = format!("{}uploads/", &fixtures_dir); let output_dir = "./tests/upgrades/from_v1_0_0_to_v2_0_0/output/".to_string(); - let source_database_file = format!("{}source.db", output_dir); - let target_database_file = format!("{}target.db", output_dir); + let source_database_file = format!("{output_dir}source.db"); + let target_database_file = format!("{output_dir}target.db"); Self { fixtures_dir, upload_path, @@ -105,20 +105,20 @@ async fn setup_databases(config: &TestConfig) -> (Arc, Arc } async fn source_db_connection(source_database_file: &str) -> Arc { - Arc::new(SqliteDatabaseV1_0_0::db_connection(&source_database_file).await) + Arc::new(SqliteDatabaseV1_0_0::db_connection(source_database_file).await) } async fn target_db_connection(target_database_file: &str) -> Arc { - Arc::new(SqliteDatabaseV2_0_0::db_connection(&target_database_file).await) + Arc::new(SqliteDatabaseV2_0_0::db_connection(target_database_file).await) } /// Reset databases from previous executions fn reset_databases(source_database_file: &str, target_database_file: &str) { if Path::new(source_database_file).exists() { - fs::remove_file(&source_database_file).expect("Can't remove the source DB file."); + fs::remove_file(source_database_file).expect("Can't remove the source DB file."); } if Path::new(target_database_file).exists() { - fs::remove_file(&target_database_file).expect("Can't remove the target DB file."); + fs::remove_file(target_database_file).expect("Can't remove the target DB file."); } }