From e4e86ba39a577231f6b9ff3aa321504d6952c564 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Thu, 5 Oct 2023 17:53:35 -0700 Subject: [PATCH] revs --- Cargo.toml | 6 +- src/auth/pats.rs | 4 +- src/auth/validate.rs | 4 +- src/database/models/collection_item.rs | 5 +- src/database/models/organization_item.rs | 3 +- src/database/models/pat_item.rs | 18 +- src/database/models/project_item.rs | 29 +-- src/database/models/session_item.rs | 18 +- src/database/models/thread_item.rs | 6 +- src/database/models/user_item.rs | 5 +- src/database/redis.rs | 22 +- src/lib.rs | 120 ++++++++++- src/main.rs | 122 +----------- src/models/pats.rs | 52 ++--- src/routes/v2/collections.rs | 17 +- src/routes/v2/threads.rs | 1 - src/routes/v2/version_file.rs | 7 +- tests/common/database.rs | 33 ++- tests/common/dummy_data.rs | 229 +++++++++++++++++++++ tests/common/environment.rs | 165 ++------------- tests/common/mod.rs | 104 +--------- tests/common/pats.rs | 30 +++ tests/common/scopes.rs | 124 ++++++++++++ tests/files/dummy-project-alpha.jar | Bin 0 -> 680 bytes tests/files/dummy-project-beta.jar | Bin 0 -> 678 bytes tests/files/dummy_data.sql | 80 ++++---- tests/pats.rs | 16 +- tests/project.rs | 68 +++---- tests/scopes.rs | 243 ++++++++++++----------- 29 files changed, 845 insertions(+), 686 deletions(-) create mode 100644 tests/common/dummy_data.rs create mode 100644 tests/common/pats.rs create mode 100644 tests/common/scopes.rs create mode 100644 tests/files/dummy-project-alpha.jar create mode 100644 tests/files/dummy-project-beta.jar diff --git a/Cargo.toml b/Cargo.toml index 55dff9a0..8e464ad5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,7 +18,6 @@ actix-multipart = "0.6.0" actix-cors = "0.6.4" actix-ws = "0.2.5" actix-files = "0.6.2" -actix-http = "3.4.0" tokio = { version = "1.29.1", features = ["sync"] } tokio-stream = "0.1.14" @@ -92,4 +91,7 @@ color-thief = "0.2.2" woothee = "0.13.0" -lettre = "0.10.4" \ No newline at end of file +lettre = "0.10.4" + +[dev-dependencies] +actix-http = "3.4.0" diff --git a/src/auth/pats.rs b/src/auth/pats.rs index 15ff23dc..b8b2d918 100644 --- a/src/auth/pats.rs +++ b/src/auth/pats.rs @@ -81,7 +81,7 @@ pub async fn create_pat( .validate() .map_err(|err| ApiError::InvalidInput(validation_errors_to_string(err, None)))?; - if info.scopes.restricted() { + if info.scopes.is_restricted() { return Err(ApiError::InvalidInput( "Invalid scopes requested!".to_string(), )); @@ -181,7 +181,7 @@ pub async fn edit_pat( let mut transaction = pool.begin().await?; if let Some(scopes) = &info.scopes { - if scopes.restricted() { + if scopes.is_restricted() { return Err(ApiError::InvalidInput( "Invalid scopes requested!".to_string(), )); diff --git a/src/auth/validate.rs b/src/auth/validate.rs index e37d1415..34a0d128 100644 --- a/src/auth/validate.rs +++ b/src/auth/validate.rs @@ -141,7 +141,7 @@ where session_queue.add_session(session.id, metadata).await; } - user.map(|x| (Scopes::ALL, x)) + user.map(|x| (Scopes::all(), x)) } Some(("github", _)) | Some(("gho", _)) | Some(("ghp", _)) => { let user = AuthProvider::GitHub.get_user(token).await?; @@ -154,7 +154,7 @@ where ) .await?; - user.map(|x| (Scopes::NOT_RESTRICTED, x)) + user.map(|x| ((Scopes::all() ^ Scopes::restricted()), x)) } _ => return Err(AuthenticationError::InvalidAuthMethod), }; diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index 4aacfe1c..12ff7838 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -166,10 +166,7 @@ impl Collection { if !collection_ids.is_empty() { let collections = redis - .multi_get::( - COLLECTIONS_NAMESPACE, - collection_ids.iter().map(|x| x.0).collect(), - ) + .multi_get::(COLLECTIONS_NAMESPACE, collection_ids.iter().map(|x| x.0)) .await?; for collection in collections { diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index 9f435d8f..5e910bf6 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -124,8 +124,7 @@ impl Organization { ORGANIZATIONS_TITLES_NAMESPACE, organization_strings .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + .map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index cf458ea0..bfb881a9 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -108,7 +108,7 @@ impl PersonalAccessToken { &mut redis .multi_get::( PATS_TOKENS_NAMESPACE, - pat_strings.iter().map(|x| x.to_string()).collect(), + pat_strings.iter().map(|x| x.to_string()), ) .await? .into_iter() @@ -238,15 +238,13 @@ impl PersonalAccessToken { } for (id, token, user_id) in clear_pats { - if let Some(id) = id { - redis.delete(PATS_NAMESPACE, id.0).await?; - } - if let Some(token) = token { - redis.delete(PATS_TOKENS_NAMESPACE, token).await?; - } - if let Some(user_id) = user_id { - redis.delete(PATS_USERS_NAMESPACE, user_id.0).await?; - } + redis + .delete_many([ + (PATS_NAMESPACE, id.map(|i| i.0.to_string())), + (PATS_TOKENS_NAMESPACE, token), + (PATS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())), + ]) + .await?; } Ok(()) diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index bd59f124..f841f934 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -499,10 +499,7 @@ impl Project { &mut redis .multi_get::( PROJECTS_SLUGS_NAMESPACE, - project_strings - .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + project_strings.iter().map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() @@ -795,16 +792,20 @@ impl Project { clear_dependencies: Option, redis: &RedisPool, ) -> Result<(), DatabaseError> { - redis.delete(PROJECTS_NAMESPACE, id.0).await?; - if let Some(slug) = slug { - redis - .delete(PROJECTS_SLUGS_NAMESPACE, slug.to_lowercase()) - .await?; - } - if clear_dependencies.unwrap_or(false) { - redis.delete(PROJECTS_DEPENDENCIES_NAMESPACE, id.0).await?; - } - + redis + .delete_many([ + (PROJECTS_NAMESPACE, Some(id.0.to_string())), + (PROJECTS_SLUGS_NAMESPACE, slug.map(|x| x.to_lowercase())), + ( + PROJECTS_DEPENDENCIES_NAMESPACE, + if clear_dependencies.unwrap_or(false) { + Some(id.0.to_string()) + } else { + None + }, + ), + ]) + .await?; Ok(()) } } diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index aeb2c849..b14e2dba 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -149,7 +149,7 @@ impl Session { &mut redis .multi_get::( SESSIONS_IDS_NAMESPACE, - session_strings.iter().map(|x| x.to_string()).collect(), + session_strings.iter().map(|x| x.to_string()), ) .await? .into_iter() @@ -288,15 +288,13 @@ impl Session { } for (id, session, user_id) in clear_sessions { - if let Some(id) = id { - redis.delete(SESSIONS_NAMESPACE, id.0).await?; - } - if let Some(session) = session { - redis.delete(SESSIONS_IDS_NAMESPACE, session).await?; - } - if let Some(user_id) = user_id { - redis.delete(SESSIONS_USERS_NAMESPACE, user_id.0).await?; - } + redis + .delete_many([ + (SESSIONS_NAMESPACE, id.map(|i| i.0.to_string())), + (SESSIONS_IDS_NAMESPACE, session), + (SESSIONS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())), + ]) + .await?; } Ok(()) diff --git a/src/database/models/thread_item.rs b/src/database/models/thread_item.rs index 091eece3..c81b2db4 100644 --- a/src/database/models/thread_item.rs +++ b/src/database/models/thread_item.rs @@ -2,7 +2,7 @@ use super::ids::*; use crate::database::models::DatabaseError; use crate::models::threads::{MessageBody, ThreadType}; use chrono::{DateTime, Utc}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; pub struct ThreadBuilder { pub type_: ThreadType, @@ -11,7 +11,7 @@ pub struct ThreadBuilder { pub report_id: Option, } -#[derive(Clone)] +#[derive(Clone, Serialize)] pub struct Thread { pub id: ThreadId, @@ -30,7 +30,7 @@ pub struct ThreadMessageBuilder { pub thread_id: ThreadId, } -#[derive(Deserialize, Clone)] +#[derive(Serialize, Deserialize, Clone)] pub struct ThreadMessage { pub id: ThreadMessageId, pub thread_id: ThreadId, diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 42de0ea0..04cac010 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -153,10 +153,7 @@ impl User { &mut redis .multi_get::( USER_USERNAMES_NAMESPACE, - users_strings - .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + users_strings.iter().map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() diff --git a/src/database/redis.rs b/src/database/redis.rs index 941cdcd7..35a17c5f 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -76,7 +76,7 @@ impl RedisPool { pub async fn multi_get( &self, namespace: &str, - ids: Vec, + ids: impl IntoIterator, ) -> Result>, DatabaseError> where T1: Display, @@ -85,7 +85,7 @@ impl RedisPool { let mut redis_connection = self.pool.get().await?; let res = cmd("MGET") .arg( - ids.iter() + ids.into_iter() .map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x)) .collect::>(), ) @@ -107,4 +107,22 @@ impl RedisPool { Ok(()) } + + pub async fn delete_many( + &self, + iter: impl IntoIterator)>, + ) -> Result<(), DatabaseError> +where { + let mut redis_connection = self.pool.get().await?; + + let mut cmd = cmd("DEL"); + for (namespace, id) in iter { + if let Some(id) = id { + cmd.arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)); + } + } + cmd.query_async::<_, ()>(&mut redis_connection).await?; + + Ok(()) + } } diff --git a/src/lib.rs b/src/lib.rs index 26ef471c..01ff0bcd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,7 +16,9 @@ use clickhouse_crate::Client; use util::cors::default_cors; use crate::{ - queue::payouts::process_payout, search::indexing::index_projects, util::env::parse_var, + queue::payouts::process_payout, + search::indexing::index_projects, + util::env::{parse_strings_from_var, parse_var}, }; pub mod auth; @@ -293,3 +295,119 @@ pub fn app_config(cfg: &mut web::ServiceConfig, labrinth_config: LabrinthConfig) .configure(routes::root_config) .default_service(web::get().wrap(default_cors()).to(routes::not_found)); } + +// This is so that env vars not used immediately don't panic at runtime +pub fn check_env_vars() -> bool { + let mut failed = false; + + fn check_var(var: &'static str) -> bool { + let check = parse_var::(var).is_none(); + if check { + warn!( + "Variable `{}` missing in dotenv or not of type `{}`", + var, + std::any::type_name::() + ); + } + check + } + + failed |= check_var::("SITE_URL"); + failed |= check_var::("CDN_URL"); + failed |= check_var::("LABRINTH_ADMIN_KEY"); + failed |= check_var::("RATE_LIMIT_IGNORE_KEY"); + failed |= check_var::("DATABASE_URL"); + failed |= check_var::("MEILISEARCH_ADDR"); + failed |= check_var::("MEILISEARCH_KEY"); + failed |= check_var::("REDIS_URL"); + failed |= check_var::("BIND_ADDR"); + failed |= check_var::("SELF_ADDR"); + + failed |= check_var::("STORAGE_BACKEND"); + + let storage_backend = dotenvy::var("STORAGE_BACKEND").ok(); + match storage_backend.as_deref() { + Some("backblaze") => { + failed |= check_var::("BACKBLAZE_KEY_ID"); + failed |= check_var::("BACKBLAZE_KEY"); + failed |= check_var::("BACKBLAZE_BUCKET_ID"); + } + Some("s3") => { + failed |= check_var::("S3_ACCESS_TOKEN"); + failed |= check_var::("S3_SECRET"); + failed |= check_var::("S3_URL"); + failed |= check_var::("S3_REGION"); + failed |= check_var::("S3_BUCKET_NAME"); + } + Some("local") => { + failed |= check_var::("MOCK_FILE_PATH"); + } + Some(backend) => { + warn!("Variable `STORAGE_BACKEND` contains an invalid value: {}. Expected \"backblaze\", \"s3\", or \"local\".", backend); + failed |= true; + } + _ => { + warn!("Variable `STORAGE_BACKEND` is not set!"); + failed |= true; + } + } + + failed |= check_var::("LOCAL_INDEX_INTERVAL"); + failed |= check_var::("VERSION_INDEX_INTERVAL"); + + if parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").is_none() { + warn!("Variable `WHITELISTED_MODPACK_DOMAINS` missing in dotenv or not a json array of strings"); + failed |= true; + } + + if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { + warn!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings"); + failed |= true; + } + + failed |= check_var::("PAYPAL_API_URL"); + failed |= check_var::("PAYPAL_CLIENT_ID"); + failed |= check_var::("PAYPAL_CLIENT_SECRET"); + + failed |= check_var::("GITHUB_CLIENT_ID"); + failed |= check_var::("GITHUB_CLIENT_SECRET"); + failed |= check_var::("GITLAB_CLIENT_ID"); + failed |= check_var::("GITLAB_CLIENT_SECRET"); + failed |= check_var::("DISCORD_CLIENT_ID"); + failed |= check_var::("DISCORD_CLIENT_SECRET"); + failed |= check_var::("MICROSOFT_CLIENT_ID"); + failed |= check_var::("MICROSOFT_CLIENT_SECRET"); + failed |= check_var::("GOOGLE_CLIENT_ID"); + failed |= check_var::("GOOGLE_CLIENT_SECRET"); + failed |= check_var::("STEAM_API_KEY"); + + failed |= check_var::("TURNSTILE_SECRET"); + + failed |= check_var::("SMTP_USERNAME"); + failed |= check_var::("SMTP_PASSWORD"); + failed |= check_var::("SMTP_HOST"); + + failed |= check_var::("SITE_VERIFY_EMAIL_PATH"); + failed |= check_var::("SITE_RESET_PASSWORD_PATH"); + + failed |= check_var::("BEEHIIV_PUBLICATION_ID"); + failed |= check_var::("BEEHIIV_API_KEY"); + + if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { + warn!( + "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" + ); + failed |= true; + } + + failed |= check_var::("CLICKHOUSE_URL"); + failed |= check_var::("CLICKHOUSE_USER"); + failed |= check_var::("CLICKHOUSE_PASSWORD"); + failed |= check_var::("CLICKHOUSE_DATABASE"); + + failed |= check_var::("MAXMIND_LICENSE_KEY"); + + failed |= check_var::("PAYOUTS_BUDGET"); + + failed +} diff --git a/src/main.rs b/src/main.rs index f25f7c2a..e0d0e0ff 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,9 +5,9 @@ use labrinth::file_hosting::S3Host; use labrinth::ratelimit::errors::ARError; use labrinth::ratelimit::memory::{MemoryStore, MemoryStoreActor}; use labrinth::ratelimit::middleware::RateLimiter; -use labrinth::util::env::{parse_strings_from_var, parse_var}; -use labrinth::{clickhouse, database, file_hosting, queue}; -use log::{error, info, warn}; +use labrinth::util::env::parse_var; +use labrinth::{check_env_vars, clickhouse, database, file_hosting, queue}; +use log::{error, info}; use std::sync::Arc; @@ -130,119 +130,3 @@ async fn main() -> std::io::Result<()> { .run() .await } - -// This is so that env vars not used immediately don't panic at runtime -fn check_env_vars() -> bool { - let mut failed = false; - - fn check_var(var: &'static str) -> bool { - let check = parse_var::(var).is_none(); - if check { - warn!( - "Variable `{}` missing in dotenv or not of type `{}`", - var, - std::any::type_name::() - ); - } - check - } - - failed |= check_var::("SITE_URL"); - failed |= check_var::("CDN_URL"); - failed |= check_var::("LABRINTH_ADMIN_KEY"); - failed |= check_var::("RATE_LIMIT_IGNORE_KEY"); - failed |= check_var::("DATABASE_URL"); - failed |= check_var::("MEILISEARCH_ADDR"); - failed |= check_var::("MEILISEARCH_KEY"); - failed |= check_var::("REDIS_URL"); - failed |= check_var::("BIND_ADDR"); - failed |= check_var::("SELF_ADDR"); - - failed |= check_var::("STORAGE_BACKEND"); - - let storage_backend = dotenvy::var("STORAGE_BACKEND").ok(); - match storage_backend.as_deref() { - Some("backblaze") => { - failed |= check_var::("BACKBLAZE_KEY_ID"); - failed |= check_var::("BACKBLAZE_KEY"); - failed |= check_var::("BACKBLAZE_BUCKET_ID"); - } - Some("s3") => { - failed |= check_var::("S3_ACCESS_TOKEN"); - failed |= check_var::("S3_SECRET"); - failed |= check_var::("S3_URL"); - failed |= check_var::("S3_REGION"); - failed |= check_var::("S3_BUCKET_NAME"); - } - Some("local") => { - failed |= check_var::("MOCK_FILE_PATH"); - } - Some(backend) => { - warn!("Variable `STORAGE_BACKEND` contains an invalid value: {}. Expected \"backblaze\", \"s3\", or \"local\".", backend); - failed |= true; - } - _ => { - warn!("Variable `STORAGE_BACKEND` is not set!"); - failed |= true; - } - } - - failed |= check_var::("LOCAL_INDEX_INTERVAL"); - failed |= check_var::("VERSION_INDEX_INTERVAL"); - - if parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").is_none() { - warn!("Variable `WHITELISTED_MODPACK_DOMAINS` missing in dotenv or not a json array of strings"); - failed |= true; - } - - if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { - warn!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings"); - failed |= true; - } - - failed |= check_var::("PAYPAL_API_URL"); - failed |= check_var::("PAYPAL_CLIENT_ID"); - failed |= check_var::("PAYPAL_CLIENT_SECRET"); - - failed |= check_var::("GITHUB_CLIENT_ID"); - failed |= check_var::("GITHUB_CLIENT_SECRET"); - failed |= check_var::("GITLAB_CLIENT_ID"); - failed |= check_var::("GITLAB_CLIENT_SECRET"); - failed |= check_var::("DISCORD_CLIENT_ID"); - failed |= check_var::("DISCORD_CLIENT_SECRET"); - failed |= check_var::("MICROSOFT_CLIENT_ID"); - failed |= check_var::("MICROSOFT_CLIENT_SECRET"); - failed |= check_var::("GOOGLE_CLIENT_ID"); - failed |= check_var::("GOOGLE_CLIENT_SECRET"); - failed |= check_var::("STEAM_API_KEY"); - - failed |= check_var::("TURNSTILE_SECRET"); - - failed |= check_var::("SMTP_USERNAME"); - failed |= check_var::("SMTP_PASSWORD"); - failed |= check_var::("SMTP_HOST"); - - failed |= check_var::("SITE_VERIFY_EMAIL_PATH"); - failed |= check_var::("SITE_RESET_PASSWORD_PATH"); - - failed |= check_var::("BEEHIIV_PUBLICATION_ID"); - failed |= check_var::("BEEHIIV_API_KEY"); - - if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { - warn!( - "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" - ); - failed |= true; - } - - failed |= check_var::("CLICKHOUSE_URL"); - failed |= check_var::("CLICKHOUSE_USER"); - failed |= check_var::("CLICKHOUSE_PASSWORD"); - failed |= check_var::("CLICKHOUSE_DATABASE"); - - failed |= check_var::("MAXMIND_LICENSE_KEY"); - - failed |= check_var::("PAYOUTS_BUDGET"); - - failed -} diff --git a/src/models/pats.rs b/src/models/pats.rs index 44b9ee9c..5d3f65ca 100644 --- a/src/models/pats.rs +++ b/src/models/pats.rs @@ -103,26 +103,26 @@ bitflags::bitflags! { // delete an organization const ORGANIZATION_DELETE = 1 << 38; - const ALL = 0b111111111111111111111111111111111111111; - const NOT_RESTRICTED = 0b1111111110000000111111111111111111100111; const NONE = 0b0; } } impl Scopes { // these scopes cannot be specified in a personal access token - pub fn restricted(&self) -> bool { - self.intersects( - Scopes::PAT_CREATE - | Scopes::PAT_READ - | Scopes::PAT_WRITE - | Scopes::PAT_DELETE - | Scopes::SESSION_READ - | Scopes::SESSION_DELETE - | Scopes::USER_AUTH_WRITE - | Scopes::USER_DELETE - | Scopes::PERFORM_ANALYTICS, - ) + pub fn restricted() -> Scopes { + Scopes::PAT_CREATE + | Scopes::PAT_READ + | Scopes::PAT_WRITE + | Scopes::PAT_DELETE + | Scopes::SESSION_READ + | Scopes::SESSION_DELETE + | Scopes::USER_AUTH_WRITE + | Scopes::USER_DELETE + | Scopes::PERFORM_ANALYTICS + } + + pub fn is_restricted(&self) -> bool { + self.intersects(Self::restricted()) } } @@ -159,27 +159,3 @@ impl PersonalAccessToken { } } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - pub fn pat_sanity() { - assert_eq!(Scopes::NONE, Scopes::empty()); - - // Ensure PATs add up and match - // (Such as NOT_RESTRICTED lining up with is_restricted()) - let mut calculated_not_restricted = Scopes::NONE; - let mut calculated_all = Scopes::NONE; - for i in 0..64 { - let scope = Scopes::from_bits_truncate(1 << i); - if !scope.restricted() { - calculated_not_restricted |= scope; - } - calculated_all |= scope; - } - assert_eq!(Scopes::ALL | Scopes::NOT_RESTRICTED, calculated_all); - assert_eq!(Scopes::NOT_RESTRICTED, calculated_not_restricted); - } -} diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs index 56b658c1..01372b0e 100644 --- a/src/routes/v2/collections.rs +++ b/src/routes/v2/collections.rs @@ -1,6 +1,5 @@ use crate::auth::checks::{filter_authorized_collections, is_authorized_collection}; use crate::auth::get_user_from_headers; -use crate::database; use crate::database::models::{collection_item, generate_collection_id, project_item}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; @@ -12,6 +11,7 @@ use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::util::routes::read_from_payload; use crate::util::validate::validation_errors_to_string; +use crate::{database, models}; use actix_web::web::Data; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use chrono::Utc; @@ -231,7 +231,7 @@ pub async fn collection_edit( let result = database::models::Collection::get(id, &**pool, &redis).await?; if let Some(collection_item) = result { - if collection_item.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection_item, &user) { return Ok(HttpResponse::Unauthorized().body("")); } @@ -371,7 +371,7 @@ pub async fn collection_icon_edit( ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if collection_item.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection_item, &user) { return Ok(HttpResponse::Unauthorized().body("")); } @@ -452,7 +452,7 @@ pub async fn delete_collection_icon( .ok_or_else(|| { ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if collection_item.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection_item, &user) { return Ok(HttpResponse::Unauthorized().body("")); } @@ -510,7 +510,7 @@ pub async fn collection_delete( .ok_or_else(|| { ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if collection.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection, &user) { return Ok(HttpResponse::Unauthorized().body("")); } let mut transaction = pool.begin().await?; @@ -527,3 +527,10 @@ pub async fn collection_delete( Ok(HttpResponse::NotFound().body("")) } } + +fn can_modify_collection( + collection: &database::models::Collection, + user: &models::users::User, +) -> bool { + collection.user_id == user.id.into() || user.role.is_mod() +} diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index 79930a0b..af2a5782 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -535,7 +535,6 @@ pub async fn moderation_inbox( let threads_data = database::models::Thread::get_many(&ids, &**pool).await?; let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?; - Ok(HttpResponse::Ok().json(threads)) } diff --git a/src/routes/v2/version_file.rs b/src/routes/v2/version_file.rs index 392d5fe3..171788b1 100644 --- a/src/routes/v2/version_file.rs +++ b/src/routes/v2/version_file.rs @@ -23,7 +23,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { .service(get_version_from_hash) .service(download_version) .service(get_update_from_hash) - .service(get_projects_from_hashes), // TODO: confirm this should be added + .service(get_projects_from_hashes), ); cfg.service( @@ -34,7 +34,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { ); } -#[derive(Deserialize)] +#[derive(Serialize, Deserialize)] pub struct HashQuery { #[serde(default = "default_algorithm")] pub algorithm: String, @@ -65,7 +65,6 @@ pub async fn get_version_from_hash( .await .map(|x| x.1) .ok(); - let hash = info.into_inner().0.to_lowercase(); let file = database::models::Version::get_file_from_hash( hash_query.algorithm.clone(), @@ -75,10 +74,8 @@ pub async fn get_version_from_hash( &redis, ) .await?; - if let Some(file) = file { let version = database::models::Version::get(file.version_id, &**pool, &redis).await?; - if let Some(version) = version { if !is_authorized_version(&version.inner, &user_option, &pool).await? { return Ok(HttpResponse::NotFound().body("")); diff --git a/tests/common/database.rs b/tests/common/database.rs index 5236b076..c1208d51 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -31,26 +31,26 @@ pub const ENEMY_USER_PAT: &str = "mrp_patenemy"; // There are two test projects. They are both created by user 3 (USER_USER_ID). // They differ only in that 'ALPHA' is a public, approved project, and 'BETA' is a private, project in queue. // The same goes for their corresponding versions- one listed, one draft. -pub const PROJECT_ALPHA_TEAM_ID: &str = "1c"; -pub const PROJECT_BETA_TEAM_ID: &str = "1d"; +// pub const PROJECT_ALPHA_TEAM_ID: &str = "1c"; +// pub const PROJECT_BETA_TEAM_ID: &str = "1d"; -pub const PROJECT_ALPHA_PROJECT_ID: &str = "G8"; -pub const PROJECT_BETA_PROJECT_ID: &str = "G9"; +// pub const PROJECT_ALPHA_PROJECT_ID: &str = "G8"; +// pub const PROJECT_BETA_PROJECT_ID: &str = "G9"; -pub const PROJECT_ALPHA_PROJECT_SLUG: &str = "testslug"; -pub const PROJECT_BETA_PROJECT_SLUG: &str = "testslug2"; +// pub const PROJECT_ALPHA_PROJECT_SLUG: &str = "testslug"; +// pub const PROJECT_BETA_PROJECT_SLUG: &str = "testslug2"; -pub const PROJECT_ALPHA_VERSION_ID: &str = "Hk"; -pub const PROJECT_BETA_VERSION_ID: &str = "Hl"; +// pub const alpha_version_id: &str = "Hk"; +// pub const beta_version_id: &str = "Hl"; // These are threads created alongside the projects. -pub const PROJECT_ALPHA_THREAD_ID: &str = "U"; -pub const PROJECT_BETA_THREAD_ID: &str = "V"; +// pub const alpha_thread_id: &str = "U"; +// pub const PROJECT_BETA_THREAD_ID: &str = "V"; // These are the hashes of the files attached to their versions: they do not reflect a 'real' hash of data. // This can be used for /version_file/ type endpoints which get a project's data from its hash. -pub const PROJECT_ALPHA_THREAD_FILE_HASH: &str = "000000000"; -pub const PROJECT_BETA_THREAD_FILE_HASH: &str = "111111111"; +// pub const alpha_file_hash: &str = "000000000"; +// pub const beta_file_hash: &str = "111111111"; pub struct TemporaryDatabase { pub pool: PgPool, @@ -112,12 +112,6 @@ impl TemporaryDatabase { } } - pub async fn create_with_dummy() -> Self { - let db = Self::create().await; - db.add_dummy_data().await; - db - } - // Deletes the temporary database // If a temporary db is created, it must be cleaned up with cleanup. // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. @@ -149,10 +143,13 @@ impl TemporaryDatabase { } pub async fn add_dummy_data(&self) { + // Adds basic dummy data to the database directly with sql (user, pats) let pool = &self.pool.clone(); pool.execute(include_str!("../files/dummy_data.sql")) .await .unwrap(); + + // Adds dummy data to the database with sqlx (projects, versions, threads) } } diff --git a/tests/common/dummy_data.rs b/tests/common/dummy_data.rs new file mode 100644 index 00000000..d3cd9667 --- /dev/null +++ b/tests/common/dummy_data.rs @@ -0,0 +1,229 @@ +use actix_web::test::{self, TestRequest}; +use labrinth::{models::projects::Project, models::projects::Version}; +use serde_json::json; +use sqlx::Executor; + +use crate::common::{ + actix::AppendsMultipart, + database::{MOD_USER_PAT, USER_USER_PAT}, +}; + +use super::{ + actix::{MultipartSegment, MultipartSegmentData}, + environment::TestEnvironment, +}; + +pub struct DummyData { + pub alpha_team_id: String, + pub beta_team_id: String, + + pub alpha_project_id: String, + pub beta_project_id: String, + + pub alpha_project_slug: String, + pub beta_project_slug: String, + + pub alpha_version_id: String, + pub beta_version_id: String, + + pub alpha_thread_id: String, + pub beta_thread_id: String, + + pub alpha_file_hash: String, + pub beta_file_hash: String, +} + +pub async fn add_dummy_data(test_env: &TestEnvironment) -> DummyData { + // Adds basic dummy data to the database directly with sql (user, pats) + let pool = &test_env.db.pool.clone(); + pool.execute(include_str!("../files/dummy_data.sql")) + .await + .unwrap(); + + let (alpha_project, alpha_version) = add_project_alpha(test_env).await; + let (beta_project, beta_version) = add_project_beta(test_env).await; + + DummyData { + alpha_team_id: alpha_project.team.to_string(), + beta_team_id: beta_project.team.to_string(), + + alpha_project_id: alpha_project.id.to_string(), + beta_project_id: beta_project.id.to_string(), + + alpha_project_slug: alpha_project.slug.unwrap(), + beta_project_slug: beta_project.slug.unwrap(), + + alpha_version_id: alpha_version.id.to_string(), + beta_version_id: beta_version.id.to_string(), + + alpha_thread_id: alpha_project.thread_id.to_string(), + beta_thread_id: beta_project.thread_id.to_string(), + + alpha_file_hash: alpha_version.files[0].hashes["sha1"].clone(), + beta_file_hash: beta_version.files[0].hashes["sha1"].clone(), + } +} + +pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) { + // Adds dummy data to the database with sqlx (projects, versions, threads) + // Generate test project data. + let json_data = json!( + { + "title": "Test Project Alpha", + "slug": "alpha", + "description": "A dummy project for testing with.", + "body": "This project is approved, and versions are listed.", + "client_side": "required", + "server_side": "optional", + "initial_versions": [{ + "file_parts": ["dummy-project-alpha.jar"], + "version_number": "1.2.3", + "version_title": "start", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + }], + "categories": [], + "license_id": "MIT" + } + ); + + // Basic json + let json_segment = MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + }; + + // Basic file + let file_segment = MultipartSegment { + name: "dummy-project-alpha.jar".to_string(), + filename: Some("dummy-project-alpha.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: MultipartSegmentData::Binary( + include_bytes!("../../tests/files/dummy-project-alpha.jar").to_vec(), + ), + }; + + // Add a project. + let req = TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization", USER_USER_PAT)) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 200); + + // Approve as a moderator. + let req = TestRequest::patch() + .uri("/v2/project/alpha") + .append_header(("Authorization", MOD_USER_PAT)) + .set_json(json!( + { + "status": "approved" + } + )) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + + // Get project + let req = TestRequest::get() + .uri("/v2/project/alpha") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let project: Project = test::read_body_json(resp).await; + + // Get project's versions + let req = TestRequest::get() + .uri("/v2/project/alpha/version") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let versions: Vec = test::read_body_json(resp).await; + let version = versions.into_iter().next().unwrap(); + + (project, version) +} + +pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version) { + // Adds dummy data to the database with sqlx (projects, versions, threads) + // Generate test project data. + let json_data = json!( + { + "title": "Test Project Beta", + "slug": "beta", + "description": "A dummy project for testing with.", + "body": "This project is not-yet-approved, and versions are draft.", + "client_side": "required", + "server_side": "optional", + "initial_versions": [{ + "file_parts": ["dummy-project-beta.jar"], + "version_number": "1.2.3", + "version_title": "start", + "status": "unlisted", + "requested_status": "unlisted", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + }], + "status": "private", + "requested_status": "private", + "categories": [], + "license_id": "MIT" + } + ); + + // Basic json + let json_segment = MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + }; + + // Basic file + let file_segment = MultipartSegment { + name: "dummy-project-beta.jar".to_string(), + filename: Some("dummy-project-beta.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: MultipartSegmentData::Binary( + include_bytes!("../../tests/files/dummy-project-beta.jar").to_vec(), + ), + }; + + // Add a project. + let req = TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization", USER_USER_PAT)) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + .to_request(); + let resp = test_env.call(req).await; + + assert_eq!(resp.status(), 200); + + // Get project + let req = TestRequest::get() + .uri("/v2/project/beta") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let project: Project = test::read_body_json(resp).await; + + // Get project's versions + let req = TestRequest::get() + .uri("/v2/project/beta/version") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let versions: Vec = test::read_body_json(resp).await; + let version = versions.into_iter().next().unwrap(); + + (project, version) +} diff --git a/tests/common/environment.rs b/tests/common/environment.rs index fb26f373..bcf5c686 100644 --- a/tests/common/environment.rs +++ b/tests/common/environment.rs @@ -1,17 +1,8 @@ #![allow(dead_code)] -use super::database::{TemporaryDatabase, USER_USER_ID_PARSED}; +use super::{database::TemporaryDatabase, dummy_data}; use crate::common::setup; -use actix_web::{ - dev::ServiceResponse, - test::{self, TestRequest}, - App, -}; -use chrono::Utc; -use labrinth::{ - database::{self, models::generate_pat_id}, - models::pats::Scopes, -}; +use actix_web::{dev::ServiceResponse, test, App}; // A complete test environment, with a test actix app and a database. // Must be called in an #[actix_rt::test] context. It also simulates a @@ -20,17 +11,27 @@ use labrinth::{ pub struct TestEnvironment { test_app: Box, pub db: TemporaryDatabase, + + pub dummy: Option, } impl TestEnvironment { - pub async fn new() -> Self { - let db = TemporaryDatabase::create_with_dummy().await; + pub async fn build_with_dummy() -> Self { + let mut test_env = Self::build().await; + let dummy = dummy_data::add_dummy_data(&test_env).await; + test_env.dummy = Some(dummy); + test_env + } + + pub async fn build() -> Self { + let db = TemporaryDatabase::create().await; let labrinth_config = setup(&db).await; let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; Self { test_app: Box::new(test_app), db, + dummy: None, } } pub async fn cleanup(self) { @@ -68,141 +69,3 @@ where Box::pin(self.call(req)) } } - -// A reusable test type that works for any scope test testing an endpoint that: -// - returns a known 'expected_failure_code' if the scope is not present (defaults to 401) -// - returns a 200-299 if the scope is present -// - returns failure and success JSON bodies for requests that are 200 (for performing non-simple follow-up tests on) -// This uses a builder format, so you can chain methods to set the parameters to non-defaults (most will probably be not need to be set). -pub struct ScopeTest<'a> { - test_env: &'a TestEnvironment, - // Scopes expected to fail on this test. By default, this is all scopes except the success scopes. - // (To ensure we have isolated the scope we are testing) - failure_scopes: Option, - // User ID to use for the PATs. By default, this is the USER_USER_ID_PARSED constant. - user_id: i64, - // The code that is expected to be returned if the scope is not present. By default, this is 401 (Unauthorized) - expected_failure_code: u16, -} - -impl<'a> ScopeTest<'a> { - pub fn new(test_env: &'a TestEnvironment) -> Self { - Self { - test_env, - failure_scopes: None, - user_id: USER_USER_ID_PARSED, - expected_failure_code: 401, - } - } - - // Set non-standard failure scopes - // If not set, it will be set to all scopes except the success scopes - // (eg: if a combination of scopes is needed, but you want to make sure that the endpoint does not work with all-but-one of them) - pub fn with_failure_scopes(mut self, scopes: Scopes) -> Self { - self.failure_scopes = Some(scopes); - self - } - - // Set the user ID to use - // (eg: a moderator, or friend) - pub fn with_user_id(mut self, user_id: i64) -> Self { - self.user_id = user_id; - self - } - - // If a non-401 code is expected. - // (eg: a 404 for a hidden resource, or 200 for a resource with hidden values deeper in) - pub fn with_failure_code(mut self, code: u16) -> Self { - self.expected_failure_code = code; - self - } - - // Call the endpoint generated by req_gen twice, once with a PAT with the failure scopes, and once with the success scopes. - // success_scopes : the scopes that we are testing that should succeed - // returns a tuple of (failure_body, success_body) - // Should return a String error if on unexpected status code, allowing unwrapping in tests. - pub async fn test( - &self, - req_gen: T, - success_scopes: Scopes, - ) -> Result<(serde_json::Value, serde_json::Value), String> - where - T: Fn() -> TestRequest, - { - // First, create a PAT with failure scopes - let failure_scopes = self.failure_scopes.unwrap_or(Scopes::ALL ^ success_scopes); - let access_token_all_others = - create_test_pat(failure_scopes, self.user_id, &self.test_env.db).await; - - // Create a PAT with the success scopes - let access_token = create_test_pat(success_scopes, self.user_id, &self.test_env.db).await; - - // Perform test twice, once with each PAT - // the first time, we expect a 401 (or known failure code) - let req = req_gen() - .append_header(("Authorization", access_token_all_others.as_str())) - .to_request(); - let resp = self.test_env.test_app.call(req).await.unwrap(); - - if resp.status().as_u16() != self.expected_failure_code { - return Err(format!( - "Expected failure code {}, got {}", - self.expected_failure_code, - resp.status().as_u16() - )); - } - - let failure_body = if resp.status() == 200 - && resp.headers().contains_key("Content-Type") - && resp.headers().get("Content-Type").unwrap() == "application/json" - { - test::read_body_json(resp).await - } else { - serde_json::Value::Null - }; - - // The second time, we expect a success code - let req = req_gen() - .append_header(("Authorization", access_token.as_str())) - .to_request(); - let resp = self.test_env.test_app.call(req).await.unwrap(); - - if !(resp.status().is_success() || resp.status().is_redirection()) { - return Err(format!( - "Expected success code, got {}", - resp.status().as_u16() - )); - } - - let success_body = if resp.status() == 200 - && resp.headers().contains_key("Content-Type") - && resp.headers().get("Content-Type").unwrap() == "application/json" - { - test::read_body_json(resp).await - } else { - serde_json::Value::Null - }; - Ok((failure_body, success_body)) - } -} - -// Creates a PAT with the given scopes, and returns the access token -// Interfacing with the db directly, rather than using a ourte, -// allows us to test with scopes that are not allowed to be created by PATs -async fn create_test_pat(scopes: Scopes, user_id: i64, db: &TemporaryDatabase) -> String { - let mut transaction = db.pool.begin().await.unwrap(); - let id = generate_pat_id(&mut transaction).await.unwrap(); - let pat = database::models::pat_item::PersonalAccessToken { - id, - name: format!("test_pat_{}", scopes.bits()), - access_token: format!("mrp_{}", id.0), - scopes, - user_id: database::models::ids::UserId(user_id), - created: Utc::now(), - expires: Utc::now() + chrono::Duration::days(1), - last_used: None, - }; - pat.insert(&mut transaction).await.unwrap(); - transaction.commit().await.unwrap(); - pat.access_token -} diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 122849ea..cde6fc8d 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1,25 +1,24 @@ -use labrinth::clickhouse; -use labrinth::{ - file_hosting, queue, - util::env::{parse_strings_from_var, parse_var}, - LabrinthConfig, -}; +use labrinth::{check_env_vars, clickhouse}; +use labrinth::{file_hosting, queue, LabrinthConfig}; use std::sync::Arc; use self::database::TemporaryDatabase; pub mod actix; pub mod database; +pub mod dummy_data; pub mod environment; +pub mod pats; +pub mod scopes; // Testing equivalent to 'setup' function, producing a LabrinthConfig -// If making a test, you should probably use environment::TestEnvironment::new() (which calls this) +// If making a test, you should probably use environment::TestEnvironment::build_with_dummy() (which calls this) pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { println!("Setting up labrinth config"); dotenvy::dotenv().ok(); - if check_test_vars() { + if check_env_vars() { println!("Some environment variables are missing!"); } @@ -39,92 +38,3 @@ pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { maxmind_reader.clone(), ) } - -// This is so that env vars not used immediately don't panic at runtime -// Currently, these are the same as main.rs ones. -// TODO: go through after all tests are created and remove any that are not used -// Low priority as .env file should include all of these anyway -fn check_test_vars() -> bool { - let mut failed = false; - - fn check_var(var: &'static str) -> bool { - let check = parse_var::(var).is_none(); - if check { - println!( - "Variable `{}` missing in dotenv or not of type `{}`", - var, - std::any::type_name::() - ); - } - check - } - - failed |= check_var::("DATABASE_URL"); - failed |= check_var::("MEILISEARCH_ADDR"); - failed |= check_var::("MEILISEARCH_KEY"); - failed |= check_var::("BIND_ADDR"); - failed |= check_var::("SELF_ADDR"); - - failed |= check_var::("MOCK_FILE_PATH"); - - failed |= check_var::("LOCAL_INDEX_INTERVAL"); - failed |= check_var::("VERSION_INDEX_INTERVAL"); - - if parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").is_none() { - println!("Variable `WHITELISTED_MODPACK_DOMAINS` missing in dotenv or not a json array of strings"); - failed |= true; - } - - if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { - println!( - "Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings" - ); - failed |= true; - } - - failed |= check_var::("PAYPAL_API_URL"); - failed |= check_var::("PAYPAL_CLIENT_ID"); - failed |= check_var::("PAYPAL_CLIENT_SECRET"); - - failed |= check_var::("GITHUB_CLIENT_ID"); - failed |= check_var::("GITHUB_CLIENT_SECRET"); - failed |= check_var::("GITLAB_CLIENT_ID"); - failed |= check_var::("GITLAB_CLIENT_SECRET"); - failed |= check_var::("DISCORD_CLIENT_ID"); - failed |= check_var::("DISCORD_CLIENT_SECRET"); - failed |= check_var::("MICROSOFT_CLIENT_ID"); - failed |= check_var::("MICROSOFT_CLIENT_SECRET"); - failed |= check_var::("GOOGLE_CLIENT_ID"); - failed |= check_var::("GOOGLE_CLIENT_SECRET"); - failed |= check_var::("STEAM_API_KEY"); - - failed |= check_var::("TURNSTILE_SECRET"); - - failed |= check_var::("SMTP_USERNAME"); - failed |= check_var::("SMTP_PASSWORD"); - failed |= check_var::("SMTP_HOST"); - - failed |= check_var::("SITE_VERIFY_EMAIL_PATH"); - failed |= check_var::("SITE_RESET_PASSWORD_PATH"); - - failed |= check_var::("BEEHIIV_PUBLICATION_ID"); - failed |= check_var::("BEEHIIV_API_KEY"); - - if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { - println!( - "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" - ); - failed |= true; - } - - failed |= check_var::("CLICKHOUSE_URL"); - failed |= check_var::("CLICKHOUSE_USER"); - failed |= check_var::("CLICKHOUSE_PASSWORD"); - failed |= check_var::("CLICKHOUSE_DATABASE"); - - failed |= check_var::("MAXMIND_LICENSE_KEY"); - - failed |= check_var::("PAYOUTS_BUDGET"); - - failed -} diff --git a/tests/common/pats.rs b/tests/common/pats.rs new file mode 100644 index 00000000..d63517cf --- /dev/null +++ b/tests/common/pats.rs @@ -0,0 +1,30 @@ +#![allow(dead_code)] + +use chrono::Utc; +use labrinth::{ + database::{self, models::generate_pat_id}, + models::pats::Scopes, +}; + +use super::database::TemporaryDatabase; + +// Creates a PAT with the given scopes, and returns the access token +// Interfacing with the db directly, rather than using a ourte, +// allows us to test with scopes that are not allowed to be created by PATs +pub async fn create_test_pat(scopes: Scopes, user_id: i64, db: &TemporaryDatabase) -> String { + let mut transaction = db.pool.begin().await.unwrap(); + let id = generate_pat_id(&mut transaction).await.unwrap(); + let pat = database::models::pat_item::PersonalAccessToken { + id, + name: format!("test_pat_{}", scopes.bits()), + access_token: format!("mrp_{}", id.0), + scopes, + user_id: database::models::ids::UserId(user_id), + created: Utc::now(), + expires: Utc::now() + chrono::Duration::days(1), + last_used: None, + }; + pat.insert(&mut transaction).await.unwrap(); + transaction.commit().await.unwrap(); + pat.access_token +} diff --git a/tests/common/scopes.rs b/tests/common/scopes.rs new file mode 100644 index 00000000..44a4b7df --- /dev/null +++ b/tests/common/scopes.rs @@ -0,0 +1,124 @@ +#![allow(dead_code)] +use actix_web::test::{self, TestRequest}; +use labrinth::models::pats::Scopes; + +use super::{database::USER_USER_ID_PARSED, environment::TestEnvironment, pats::create_test_pat}; + +// A reusable test type that works for any scope test testing an endpoint that: +// - returns a known 'expected_failure_code' if the scope is not present (defaults to 401) +// - returns a 200-299 if the scope is present +// - returns failure and success JSON bodies for requests that are 200 (for performing non-simple follow-up tests on) +// This uses a builder format, so you can chain methods to set the parameters to non-defaults (most will probably be not need to be set). +pub struct ScopeTest<'a> { + test_env: &'a TestEnvironment, + // Scopes expected to fail on this test. By default, this is all scopes except the success scopes. + // (To ensure we have isolated the scope we are testing) + failure_scopes: Option, + // User ID to use for the PATs. By default, this is the USER_USER_ID_PARSED constant. + user_id: i64, + // The code that is expected to be returned if the scope is not present. By default, this is 401 (Unauthorized) + expected_failure_code: u16, +} + +impl<'a> ScopeTest<'a> { + pub fn new(test_env: &'a TestEnvironment) -> Self { + Self { + test_env, + failure_scopes: None, + user_id: USER_USER_ID_PARSED, + expected_failure_code: 401, + } + } + + // Set non-standard failure scopes + // If not set, it will be set to all scopes except the success scopes + // (eg: if a combination of scopes is needed, but you want to make sure that the endpoint does not work with all-but-one of them) + pub fn with_failure_scopes(mut self, scopes: Scopes) -> Self { + self.failure_scopes = Some(scopes); + self + } + + // Set the user ID to use + // (eg: a moderator, or friend) + pub fn with_user_id(mut self, user_id: i64) -> Self { + self.user_id = user_id; + self + } + + // If a non-401 code is expected. + // (eg: a 404 for a hidden resource, or 200 for a resource with hidden values deeper in) + pub fn with_failure_code(mut self, code: u16) -> Self { + self.expected_failure_code = code; + self + } + + // Call the endpoint generated by req_gen twice, once with a PAT with the failure scopes, and once with the success scopes. + // success_scopes : the scopes that we are testing that should succeed + // returns a tuple of (failure_body, success_body) + // Should return a String error if on unexpected status code, allowing unwrapping in tests. + pub async fn test( + &self, + req_gen: T, + success_scopes: Scopes, + ) -> Result<(serde_json::Value, serde_json::Value), String> + where + T: Fn() -> TestRequest, + { + // First, create a PAT with failure scopes + let failure_scopes = self + .failure_scopes + .unwrap_or(Scopes::all() ^ success_scopes); + let access_token_all_others = + create_test_pat(failure_scopes, self.user_id, &self.test_env.db).await; + + // Create a PAT with the success scopes + let access_token = create_test_pat(success_scopes, self.user_id, &self.test_env.db).await; + + // Perform test twice, once with each PAT + // the first time, we expect a 401 (or known failure code) + let req = req_gen() + .append_header(("Authorization", access_token_all_others.as_str())) + .to_request(); + let resp = self.test_env.call(req).await; + + if resp.status().as_u16() != self.expected_failure_code { + return Err(format!( + "Expected failure code {}, got {}", + self.expected_failure_code, + resp.status().as_u16() + )); + } + + let failure_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + + // The second time, we expect a success code + let req = req_gen() + .append_header(("Authorization", access_token.as_str())) + .to_request(); + let resp = self.test_env.call(req).await; + + if !(resp.status().is_success() || resp.status().is_redirection()) { + return Err(format!( + "Expected success code, got {}", + resp.status().as_u16() + )); + } + + let success_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + Ok((failure_body, success_body)) + } +} diff --git a/tests/files/dummy-project-alpha.jar b/tests/files/dummy-project-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..61f82078c78f2bd825f2f329f4b588a7102a6f0c GIT binary patch literal 680 zcmWIWW@Zs#U|`^2h>mp)*AnHP^9RU#$H>6I52Vu)lZrBv^>Xu5^s^ zX!&h(^kiv4(Uge`7A^D;zqml;*!DT$R_~+Y;{7yE?S4F8!_nkIZ@Sy-zu)$}vQoX) zXr3>?I`hi9*Vm68RrF3>_Mm6gv6o>1d+)un+kI5+@x$<9hK+*GySBwkEK&C3%gkXl zRzDtC^kd%qn=@D!o%$4!e0oZcuch7g8ynjbj{WNQ);y87IdYn7wVYB=(XCDECv)?j zzu5F=d6&{>`Li*5OwMuYPPiiaZGHQ(#%+Q^!Y=||cJY=RI`O2Dt9g^K*w(zjuK32! zH9H?yHka&nl{{T_t53g@ z4uA*Lc8j%PlPyd>?*Il+A~2vp!RPB5;;8HC=ce!L=;!I?8XThM>lWMRnBU|e(E48G z5Bs|`b)Ai&J7-PvetB@lJ$H#SD(~K;9dP+q8z>yPXW#qT=Ih-a_We5eXX3XvOV8*$ z$^6u-{`6Q#*W1>#ZLHkiT? z-!a{6sGDdxJHF%!d(Mh85o|&3x{LcVC#n4p@MdHZ0s9Jjq5yiAfssK0L;(R>5<%9C gEucVJfMEs%nm`I$umyOtvVr88fUq7&3p0Uu01;dcb^rhX literal 0 HcmV?d00001 diff --git a/tests/files/dummy-project-beta.jar b/tests/files/dummy-project-beta.jar new file mode 100644 index 0000000000000000000000000000000000000000..1b072b2076b1e97727e1716a99ff6949a7a2ab6c GIT binary patch literal 678 zcmWIWW@Zs#U|`^2NQrd~_uaZt`xlV+hLM4RA4sPqCKY8S>*eOB=w%h>=k*5c&O2y3cm4FHjPuDZUVhQvOm~(%w2YeY zKO!*Y-{G0P)5AV@h-vzCVqRf^o-7v%ul`Q zPmhIky=_g~#>)N8wSG_e$5_VM;;VliOZs+?bspEgJm!(raHT_Wg- zn#NiFex1dZ6AlY_JYJ?-a0^?E>>6LU%b9n;N* zx`~#v<4dlv=d3sr!4~AMySOiNlG^_OZ$>5&u&=Nu37~fw7#S2m6cC^#5M<5R0t%!B c7-m4A4y3RJTYxt!8%UlB2(PROJECTS_NAMESPACE, 1000) - .await - .unwrap() - .is_none()); + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_project_id = &test_env.dummy.as_ref().unwrap().alpha_project_id; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id; + let alpha_project_slug = &test_env.dummy.as_ref().unwrap().alpha_project_slug; + let alpha_version_id = &test_env.dummy.as_ref().unwrap().alpha_version_id; // Perform request on dummy data let req = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}")) + .uri(&format!("/v2/project/{alpha_project_id}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; @@ -33,36 +29,36 @@ async fn test_get_project() { let body: serde_json::Value = test::read_body_json(resp).await; assert_eq!(status, 200); - assert_eq!(body["id"], json!(PROJECT_ALPHA_PROJECT_ID)); - assert_eq!(body["slug"], json!("testslug")); + assert_eq!(body["id"], json!(alpha_project_id)); + assert_eq!(body["slug"], json!(alpha_project_slug)); let versions = body["versions"].as_array().unwrap(); assert!(!versions.is_empty()); - assert_eq!(versions[0], json!(PROJECT_ALPHA_VERSION_ID)); + assert_eq!(versions[0], json!(alpha_version_id)); // Confirm that the request was cached assert_eq!( test_env .db .redis_pool - .get::(PROJECTS_SLUGS_NAMESPACE, "testslug") + .get::(PROJECTS_SLUGS_NAMESPACE, alpha_project_slug) .await .unwrap(), - Some(1000) + Some(parse_base62(alpha_project_id).unwrap() as i64) ); let cached_project = test_env .db .redis_pool - .get::(PROJECTS_NAMESPACE, 1000) + .get::(PROJECTS_NAMESPACE, parse_base62(alpha_project_id).unwrap()) .await .unwrap() .unwrap(); let cached_project: serde_json::Value = serde_json::from_str(&cached_project).unwrap(); - assert_eq!(cached_project["inner"]["slug"], json!("testslug")); + assert_eq!(cached_project["inner"]["slug"], json!(alpha_project_slug)); // Make the request again, this time it should be cached let req = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}")) + .uri(&format!("/v2/project/{alpha_project_id}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; @@ -70,8 +66,8 @@ async fn test_get_project() { assert_eq!(status, 200); let body: serde_json::Value = test::read_body_json(resp).await; - assert_eq!(body["id"], json!(PROJECT_ALPHA_PROJECT_ID)); - assert_eq!(body["slug"], json!("testslug")); + assert_eq!(body["id"], json!(alpha_project_id)); + assert_eq!(body["slug"], json!(alpha_project_slug)); // Request should fail on non-existent project let req = test::TestRequest::get() @@ -84,7 +80,7 @@ async fn test_get_project() { // Similarly, request should fail on non-authorized user, on a yet-to-be-approved or hidden project, with a 404 (hiding the existence of the project) let req = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .uri(&format!("/v2/project/{beta_project_id}")) .append_header(("Authorization", ENEMY_USER_PAT)) .to_request(); @@ -98,7 +94,7 @@ async fn test_get_project() { #[actix_rt::test] async fn test_add_remove_project() { // Test setup and dummy data - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // Generate test project data. let mut json_data = json!( @@ -310,12 +306,14 @@ async fn test_add_remove_project() { #[actix_rt::test] pub async fn test_patch_project() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_project_slug = &test_env.dummy.as_ref().unwrap().alpha_project_slug; + let beta_project_slug = &test_env.dummy.as_ref().unwrap().beta_project_slug; // First, we do some patch requests that should fail. // Failure because the user is not authorized. let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", ENEMY_USER_PAT)) .set_json(json!({ "title": "Test_Add_Project project - test 1", @@ -327,7 +325,7 @@ pub async fn test_patch_project() { // Failure because we are setting URL fields to invalid urls. for url_type in ["issues_url", "source_url", "wiki_url", "discord_url"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ url_type: "w.fake.url", @@ -340,7 +338,7 @@ pub async fn test_patch_project() { // Failure because these are illegal requested statuses for a normal user. for req in ["unknown", "processing", "withheld", "scheduled"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "requested_status": req, @@ -353,7 +351,7 @@ pub async fn test_patch_project() { // Failure because these should not be able to be set by a non-mod for key in ["moderation_message", "moderation_message_body"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ key: "test", @@ -364,7 +362,7 @@ pub async fn test_patch_project() { // (should work for a mod, though) let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", MOD_USER_PAT)) .set_json(json!({ key: "test", @@ -376,18 +374,18 @@ pub async fn test_patch_project() { // Failure because the slug is already taken. let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "slug": "testslug2", // the other dummy project has this slug + "slug": beta_project_slug, // the other dummy project has this slug })) .to_request(); let resp = test_env.call(req).await; assert_eq!(resp.status(), 400); - // Not allowed to directly set status, as 'testslug2' (the other project) is "processing" and cannot have its status changed like this. + // Not allowed to directly set status, as 'beta_project_slug' (the other project) is "processing" and cannot have its status changed like this. let req = test::TestRequest::patch() - .uri("/v2/project/testslug2") + .uri(&format!("/v2/project/{beta_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "status": "private" @@ -398,7 +396,7 @@ pub async fn test_patch_project() { // Sucessful request to patch many fields. let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "slug": "newslug", @@ -424,7 +422,7 @@ pub async fn test_patch_project() { // Old slug no longer works let req = test::TestRequest::get() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; diff --git a/tests/scopes.rs b/tests/scopes.rs index 9145461a..806905ab 100644 --- a/tests/scopes.rs +++ b/tests/scopes.rs @@ -1,14 +1,11 @@ use actix_web::test::{self, TestRequest}; use bytes::Bytes; use chrono::{Duration, Utc}; -use common::{actix::AppendsMultipart, database::PROJECT_ALPHA_THREAD_ID}; +use common::actix::AppendsMultipart; use labrinth::models::pats::Scopes; use serde_json::json; -use crate::common::{ - database::*, - environment::{ScopeTest, TestEnvironment}, -}; +use crate::common::{database::*, environment::TestEnvironment, scopes::ScopeTest}; // importing common module. mod common; @@ -23,7 +20,7 @@ mod common; #[actix_rt::test] async fn user_scopes() { // Test setup and dummy data - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // User reading let read_user = Scopes::USER_READ; @@ -74,7 +71,7 @@ async fn user_scopes() { .unwrap(); // User payout info writing - let failure_write_user_payout = Scopes::ALL ^ Scopes::PAYOUTS_WRITE; // Failure case should include USER_WRITE + let failure_write_user_payout = Scopes::all() ^ Scopes::PAYOUTS_WRITE; // Failure case should include USER_WRITE let write_user_payout = Scopes::USER_WRITE | Scopes::PAYOUTS_WRITE; let req_gen = || { TestRequest::patch().uri("/v2/user/user").set_json(json!( { @@ -108,12 +105,13 @@ async fn user_scopes() { // Notifications #[actix_rt::test] pub async fn notifications_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id.clone(); // We will invite user 'friend' to project team, and use that as a notification // Get notifications let req = TestRequest::post() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .uri(&format!("/v2/team/{alpha_team_id}/members")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!( { "user_id": FRIEND_USER_ID // friend @@ -185,7 +183,7 @@ pub async fn notifications_scopes() { // Mass notification delete // We invite mod, get the notification ID, and do mass delete using that let req = test::TestRequest::post() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .uri(&format!("/v2/team/{alpha_team_id}/members")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!( { "user_id": MOD_USER_ID // mod @@ -221,7 +219,7 @@ pub async fn notifications_scopes() { // Project version creation scopes #[actix_rt::test] pub async fn project_version_create_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // Create project let create_project = Scopes::PROJECT_CREATE; @@ -320,24 +318,24 @@ pub async fn project_version_create_scopes() { // Project management scopes #[actix_rt::test] pub async fn project_version_reads_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); + let beta_version_id = &test_env.dummy.as_ref().unwrap().beta_version_id.clone(); + let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id.clone(); + let beta_file_hash = &test_env.dummy.as_ref().unwrap().beta_file_hash.clone(); // Project reading // Uses 404 as the expected failure code (or 200 and an empty list for mass reads) let read_project = Scopes::PROJECT_READ; - let req_gen = - || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")); + let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project) .await .unwrap(); - let req_gen = || { - test::TestRequest::get().uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/dependencies" - )) - }; + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}/dependencies")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project) @@ -347,7 +345,7 @@ pub async fn project_version_reads_scopes() { let req_gen = || { test::TestRequest::get().uri(&format!( "/v2/projects?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{PROJECT_BETA_PROJECT_ID}\"")) + uri = urlencoding::encode(&format!("\"{beta_project_id}\"")) )) }; let (failure, success) = ScopeTest::new(&test_env) @@ -360,7 +358,7 @@ pub async fn project_version_reads_scopes() { // Team project reading let req_gen = - || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/members")); + || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}/members")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project) @@ -370,8 +368,7 @@ pub async fn project_version_reads_scopes() { // Get team members // In this case, as these are public endpoints, logging in only is relevant to showing permissions // So for our test project (with 1 user, 'user') we will check the permissions before and after having the scope. - let req_gen = - || test::TestRequest::get().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")); + let req_gen = || test::TestRequest::get().uri(&format!("/v2/team/{alpha_team_id}/members")); let (failure, success) = ScopeTest::new(&test_env) .with_failure_code(200) .test(req_gen, read_project) @@ -383,7 +380,7 @@ pub async fn project_version_reads_scopes() { let req_gen = || { test::TestRequest::get().uri(&format!( "/v2/teams?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{PROJECT_ALPHA_TEAM_ID}\"")) + uri = urlencoding::encode(&format!("\"{alpha_team_id}\"")) )) }; let (failure, success) = ScopeTest::new(&test_env) @@ -422,7 +419,7 @@ pub async fn project_version_reads_scopes() { // Project metadata reading let req_gen = || { test::TestRequest::get().uri(&format!( - "/maven/maven/modrinth/{PROJECT_BETA_PROJECT_ID}/maven-metadata.xml" + "/maven/maven/modrinth/{beta_project_id}/maven-metadata.xml" )) }; ScopeTest::new(&test_env) @@ -435,7 +432,7 @@ pub async fn project_version_reads_scopes() { // First, set version to hidden (which is when the scope is required to read it) let read_version = Scopes::VERSION_READ; let req = test::TestRequest::patch() - .uri(&format!("/v2/version/{PROJECT_BETA_VERSION_ID}")) + .uri(&format!("/v2/version/{beta_version_id}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "status": "draft" @@ -444,20 +441,15 @@ pub async fn project_version_reads_scopes() { let resp = test_env.call(req).await; assert_eq!(resp.status(), 204); - let req_gen = || { - test::TestRequest::get().uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}")) - }; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/version_file/{beta_file_hash}")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_version) .await .unwrap(); - let req_gen = || { - test::TestRequest::get().uri(&format!( - "/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/download" - )) - }; + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/version_file/{beta_file_hash}/download")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_version) @@ -468,7 +460,7 @@ pub async fn project_version_reads_scopes() { // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope // let req_gen = || { // test::TestRequest::post() - // .uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/update")) + // .uri(&format!("/v2/version_file/{beta_file_hash}/update")) // .set_json(json!({})) // }; // ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_version).await.unwrap(); @@ -478,7 +470,7 @@ pub async fn project_version_reads_scopes() { test::TestRequest::post() .uri("/v2/version_files") .set_json(json!({ - "hashes": [PROJECT_BETA_THREAD_FILE_HASH] + "hashes": [beta_file_hash] })) }; let (failure, success) = ScopeTest::new(&test_env) @@ -486,14 +478,8 @@ pub async fn project_version_reads_scopes() { .test(req_gen, read_version) .await .unwrap(); - assert!(!failure - .as_object() - .unwrap() - .contains_key(PROJECT_BETA_THREAD_FILE_HASH)); - assert!(success - .as_object() - .unwrap() - .contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + assert!(!failure.as_object().unwrap().contains_key(beta_file_hash)); + assert!(success.as_object().unwrap().contains_key(beta_file_hash)); // Update version file // TODO: Should this be /POST? Looks like /GET @@ -504,13 +490,13 @@ pub async fn project_version_reads_scopes() { // .uri(&format!("/v2/version_files/update_individual")) // .set_json(json!({ // "hashes": [{ - // "hash": PROJECT_BETA_THREAD_FILE_HASH, + // "hash": beta_file_hash, // }] // })) // }; // let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); - // assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); - // assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + // assert!(!failure.as_object().unwrap().contains_key(beta_file_hash)); + // assert!(success.as_object().unwrap().contains_key(beta_file_hash)); // Update version file // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope @@ -518,17 +504,17 @@ pub async fn project_version_reads_scopes() { // test::TestRequest::post() // .uri(&format!("/v2/version_files/update")) // .set_json(json!({ - // "hashes": [PROJECT_BETA_THREAD_FILE_HASH] + // "hashes": [beta_file_hash] // })) // }; // let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); - // assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); - // assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + // assert!(!failure.as_object().unwrap().contains_key(beta_file_hash)); + // assert!(success.as_object().unwrap().contains_key(beta_file_hash)); // Both project and version reading let read_project_and_version = Scopes::PROJECT_READ | Scopes::VERSION_READ; let req_gen = - || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version")); + || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}/version")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project_and_version) @@ -538,7 +524,7 @@ pub async fn project_version_reads_scopes() { // TODO: fails for the same reason as above // let req_gen = || { // test::TestRequest::get() - // .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version/{PROJECT_BETA_VERSION_ID}")) + // .uri(&format!("/v2/project/{beta_project_id}/version/{beta_version_id}")) // }; // ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project_and_version).await.unwrap(); @@ -549,13 +535,16 @@ pub async fn project_version_reads_scopes() { // Project writing #[actix_rt::test] pub async fn project_write_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); + let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id.clone(); // Projects writing let write_project = Scopes::PROJECT_WRITE; let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .uri(&format!("/v2/project/{beta_project_id}")) .set_json(json!( { "title": "test_project_version_write_scopes Title", @@ -571,7 +560,7 @@ pub async fn project_write_scopes() { test::TestRequest::patch() .uri(&format!( "/v2/projects?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{PROJECT_BETA_PROJECT_ID}\"")) + uri = urlencoding::encode(&format!("\"{beta_project_id}\"")) )) .set_json(json!( { @@ -584,9 +573,20 @@ pub async fn project_write_scopes() { .await .unwrap(); + // Approve beta as private so we can schedule it + let req = test::TestRequest::patch() + .uri(&format!("/v2/project/{beta_project_id}")) + .append_header(("Authorization", MOD_USER_PAT)) + .set_json(json!({ + "status": "private" + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}/schedule")) // PROJECT_ALPHA_PROJECT_ID is an *approved* project, so we can schedule it + .uri(&format!("/v2/project/{beta_project_id}/schedule")) // beta_project_id is an unpublished can schedule it .set_json(json!( { "requested_status": "private", @@ -602,9 +602,7 @@ pub async fn project_write_scopes() { // Icons and gallery images let req_gen = || { test::TestRequest::patch() - .uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/icon?ext=png" - )) + .uri(&format!("/v2/project/{beta_project_id}/icon?ext=png")) .set_payload(Bytes::from( include_bytes!("../tests/files/200x200.png") as &[u8] )) @@ -615,7 +613,7 @@ pub async fn project_write_scopes() { .unwrap(); let req_gen = - || test::TestRequest::delete().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/icon")); + || test::TestRequest::delete().uri(&format!("/v2/project/{beta_project_id}/icon")); ScopeTest::new(&test_env) .test(req_gen, write_project) .await @@ -624,7 +622,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::post() .uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?ext=png&featured=true" + "/v2/project/{beta_project_id}/gallery?ext=png&featured=true" )) .set_payload(Bytes::from( include_bytes!("../tests/files/200x200.png") as &[u8] @@ -637,7 +635,7 @@ pub async fn project_write_scopes() { // Get project, as we need the gallery image url let req_gen = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .uri(&format!("/v2/project/{beta_project_id}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req_gen).await; @@ -646,7 +644,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::patch().uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}" + "/v2/project/{beta_project_id}/gallery?url={gallery_url}" )) }; ScopeTest::new(&test_env) @@ -656,7 +654,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::delete().uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}" + "/v2/project/{beta_project_id}/gallery?url={gallery_url}" )) }; ScopeTest::new(&test_env) @@ -667,7 +665,7 @@ pub async fn project_write_scopes() { // Team scopes - add user 'friend' let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .uri(&format!("/v2/team/{alpha_team_id}/members")) .set_json(json!({ "user_id": FRIEND_USER_ID })) @@ -678,8 +676,7 @@ pub async fn project_write_scopes() { .unwrap(); // Accept team invite as 'friend' - let req_gen = - || test::TestRequest::post().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/join")); + let req_gen = || test::TestRequest::post().uri(&format!("/v2/team/{alpha_team_id}/join")); ScopeTest::new(&test_env) .with_user_id(FRIEND_USER_ID_PARSED) .test(req_gen, write_project) @@ -690,7 +687,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::patch() .uri(&format!( - "/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{FRIEND_USER_ID}" + "/v2/team/{alpha_team_id}/members/{FRIEND_USER_ID}" )) .set_json(json!({ "permissions": 1 @@ -704,7 +701,7 @@ pub async fn project_write_scopes() { // Transfer ownership to 'friend' let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/owner")) + .uri(&format!("/v2/team/{alpha_team_id}/owner")) .set_json(json!({ "user_id": FRIEND_USER_ID })) @@ -716,9 +713,7 @@ pub async fn project_write_scopes() { // Now as 'friend', delete 'user' let req_gen = || { - test::TestRequest::delete().uri(&format!( - "/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{USER_USER_ID}" - )) + test::TestRequest::delete().uri(&format!("/v2/team/{alpha_team_id}/members/{USER_USER_ID}")) }; ScopeTest::new(&test_env) .with_user_id(FRIEND_USER_ID_PARSED) @@ -733,7 +728,7 @@ pub async fn project_write_scopes() { // let delete_version = Scopes::PROJECT_DELETE; // let req_gen = || { // test::TestRequest::delete() - // .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + // .uri(&format!("/v2/project/{beta_project_id}")) // }; // ScopeTest::new(&test_env).test(req_gen, delete_version).await.unwrap(); @@ -744,14 +739,29 @@ pub async fn project_write_scopes() { // Version write #[actix_rt::test] pub async fn version_write_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_version_id = &test_env.dummy.as_ref().unwrap().beta_version_id.clone(); + let beta_version_id = &test_env.dummy.as_ref().unwrap().beta_version_id.clone(); + let alpha_file_hash = &test_env.dummy.as_ref().unwrap().beta_file_hash.clone(); let write_version = Scopes::VERSION_WRITE; + // Approve beta version as private so we can schedule it + let req = test::TestRequest::patch() + .uri(&format!("/v2/version/{beta_version_id}")) + .append_header(("Authorization", MOD_USER_PAT)) + .set_json(json!({ + "status": "unlisted" + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + // Schedule version let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}/schedule")) // PROJECT_ALPHA_VERSION_ID is an *approved* version, so we can schedule it + .uri(&format!("/v2/version/{beta_version_id}/schedule")) // beta_version_id is an *approved* version, so we can schedule it .set_json(json!( { "requested_status": "archived", @@ -767,7 +777,7 @@ pub async fn version_write_scopes() { // Patch version let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")) + .uri(&format!("/v2/version/{alpha_version_id}")) .set_json(json!( { "version_title": "test_version_write_scopes Title", @@ -810,7 +820,7 @@ pub async fn version_write_scopes() { // Upload version file let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}/file")) + .uri(&format!("/v2/version/{alpha_version_id}/file")) .set_multipart(vec![json_segment.clone(), content_segment.clone()]) }; ScopeTest::new(&test_env) @@ -821,9 +831,8 @@ pub async fn version_write_scopes() { // Delete version file // TODO: Should this scope be VERSION_DELETE? let req_gen = || { - test::TestRequest::delete().uri(&format!( - "/v2/version_file/{PROJECT_ALPHA_THREAD_FILE_HASH}" - )) // Delete from PROJECT_ALPHA_VERSION_ID, as we uploaded to PROJECT_ALPHA_VERSION_ID and it needs another file + test::TestRequest::delete().uri(&format!("/v2/version_file/{alpha_file_hash}")) + // Delete from alpha_version_id, as we uploaded to alpha_version_id and it needs another file }; ScopeTest::new(&test_env) .test(req_gen, write_version) @@ -832,8 +841,7 @@ pub async fn version_write_scopes() { // Delete version let delete_version = Scopes::VERSION_DELETE; - let req_gen = - || test::TestRequest::delete().uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")); + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/version/{alpha_version_id}")); ScopeTest::new(&test_env) .test(req_gen, delete_version) .await @@ -846,14 +854,16 @@ pub async fn version_write_scopes() { // Report scopes #[actix_rt::test] pub async fn report_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); // Create report let report_create = Scopes::REPORT_CREATE; let req_gen = || { test::TestRequest::post().uri("/v2/report").set_json(json!({ "report_type": "copyright", - "item_id": PROJECT_BETA_PROJECT_ID, + "item_id": beta_project_id, "item_type": "project", "body": "This is a reupload of my mod, ", })) @@ -920,11 +930,14 @@ pub async fn report_scopes() { // Thread scopes #[actix_rt::test] pub async fn thread_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_thread_id = &test_env.dummy.as_ref().unwrap().alpha_thread_id.clone(); + let beta_thread_id = &test_env.dummy.as_ref().unwrap().beta_thread_id.clone(); // Thread read let thread_read = Scopes::THREAD_READ; - let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/{PROJECT_ALPHA_THREAD_ID}")); + let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/{alpha_thread_id}")); ScopeTest::new(&test_env) .test(req_gen, thread_read) .await @@ -941,6 +954,24 @@ pub async fn thread_scopes() { .await .unwrap(); + // Thread write (to also push to moderator inbox) + let thread_write = Scopes::THREAD_WRITE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/thread/{beta_thread_id}")) + .set_json(json!({ + "body": { + "type": "text", + "body": "test_thread_scopes Body" + } + })) + }; + ScopeTest::new(&test_env) + .with_user_id(USER_USER_ID_PARSED) + .test(req_gen, thread_write) + .await + .unwrap(); + // Check moderation inbox // Uses moderator PAT, as only moderators can see the moderation inbox let req_gen = || test::TestRequest::get().uri("/v2/thread/inbox"); @@ -961,24 +992,6 @@ pub async fn thread_scopes() { .await .unwrap(); - // Thread write - let thread_write = Scopes::THREAD_WRITE; - let req_gen = || { - test::TestRequest::post() - .uri(&format!("/v2/thread/{thread_id}")) - .set_json(json!({ - "body": { - "type": "text", - "body": "test_thread_scopes Body" - } - })) - }; - ScopeTest::new(&test_env) - .with_user_id(MOD_USER_ID_PARSED) - .test(req_gen, thread_write) - .await - .unwrap(); - // Delete that message // First, get message id let req_gen = test::TestRequest::get() @@ -1005,7 +1018,7 @@ pub async fn thread_scopes() { // Pat scopes #[actix_rt::test] pub async fn pat_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // Pat create let pat_create = Scopes::PAT_CREATE; @@ -1057,7 +1070,9 @@ pub async fn pat_scopes() { // Collection scopes #[actix_rt::test] pub async fn collections_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_project_id = &test_env.dummy.as_ref().unwrap().alpha_project_id.clone(); // Create collection let collection_create = Scopes::COLLECTION_CREATE; @@ -1067,7 +1082,7 @@ pub async fn collections_scopes() { .set_json(json!({ "title": "Test Collection", "description": "Test Collection Description", - "projects": [PROJECT_ALPHA_PROJECT_ID] + "projects": [alpha_project_id] })) }; let (_, success) = ScopeTest::new(&test_env) @@ -1150,7 +1165,9 @@ pub async fn collections_scopes() { // Organization scopes (and a couple PROJECT_WRITE scopes that are only allowed for orgs) #[actix_rt::test] pub async fn organization_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); // Create organization let organization_create = Scopes::ORGANIZATION_CREATE; @@ -1207,11 +1224,11 @@ pub async fn organization_scopes() { test::TestRequest::post() .uri(&format!("/v2/organization/{organization_id}/projects")) .set_json(json!({ - "project_id": PROJECT_BETA_PROJECT_ID + "project_id": beta_project_id })) }; ScopeTest::new(&test_env) - .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE) + .with_failure_scopes(Scopes::all() ^ Scopes::ORGANIZATION_WRITE) .test(req_gen, organization_project_edit) .await .unwrap(); @@ -1271,7 +1288,7 @@ pub async fn organization_scopes() { || test::TestRequest::get().uri(&format!("/v2/organization/{organization_id}/projects")); let (failure, success) = ScopeTest::new(&test_env) .with_failure_code(200) - .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_READ) + .with_failure_scopes(Scopes::all() ^ Scopes::ORGANIZATION_READ) .test(req_gen, organization_project_read) .await .unwrap(); @@ -1281,11 +1298,11 @@ pub async fn organization_scopes() { // remove project (now that we've checked) let req_gen = || { test::TestRequest::delete().uri(&format!( - "/v2/organization/{organization_id}/projects/{PROJECT_BETA_PROJECT_ID}" + "/v2/organization/{organization_id}/projects/{beta_project_id}" )) }; ScopeTest::new(&test_env) - .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE) + .with_failure_scopes(Scopes::all() ^ Scopes::ORGANIZATION_WRITE) .test(req_gen, organization_project_edit) .await .unwrap();