From 2bd7b388a59872ed471d825369dc0267ccc80c14 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Sat, 23 Sep 2023 18:05:07 -0700 Subject: [PATCH 01/16] computer switch --- src/auth/flows.rs | 43 +-- src/auth/pats.rs | 9 +- src/auth/session.rs | 9 +- src/auth/validate.rs | 7 +- src/database/mod.rs | 1 + src/database/models/categories.rs | 122 ++------- src/database/models/collection_item.rs | 38 +-- src/database/models/flow_item.rs | 31 +-- src/database/models/image_item.rs | 37 +-- src/database/models/pat_item.rs | 79 ++---- src/database/models/project_item.rs | 121 +++----- src/database/models/session_item.rs | 78 ++---- src/database/models/team_item.rs | 36 +-- src/database/models/user_item.rs | 77 ++---- src/database/models/version_item.rs | 63 ++--- src/database/redis.rs | 101 +++++++ src/lib.rs | 365 +++++++++++++++++++++++++ src/main.rs | 344 ++--------------------- src/models/pack.rs | 3 +- src/models/users.rs | 8 +- src/queue/payouts.rs | 4 +- src/queue/session.rs | 3 +- src/ratelimit/memory.rs | 2 +- src/routes/analytics.rs | 7 +- src/routes/maven.rs | 11 +- src/routes/updates.rs | 3 +- src/routes/v2/admin.rs | 5 +- src/routes/v2/analytics_get.rs | 14 +- src/routes/v2/collections.rs | 15 +- src/routes/v2/images.rs | 3 +- src/routes/v2/moderation.rs | 3 +- src/routes/v2/notifications.rs | 13 +- src/routes/v2/project_creation.rs | 6 +- src/routes/v2/projects.rs | 38 +-- src/routes/v2/reports.rs | 13 +- src/routes/v2/tags.rs | 15 +- src/routes/v2/teams.rs | 17 +- src/routes/v2/threads.rs | 15 +- src/routes/v2/users.rs | 25 +- src/routes/v2/version_creation.rs | 9 +- src/routes/v2/version_file.rs | 17 +- src/routes/v2/versions.rs | 15 +- src/util/img.rs | 4 +- src/util/webhook.rs | 3 +- tests/common/actix.rs | 51 ++++ tests/common/database.rs | 126 +++++++++ tests/common/mod.rs | 120 ++++++++ tests/files/basic-mod.jar | Bin 0 -> 678 bytes tests/files/dummy_data.sql | 67 +++++ tests/project.rs | 183 +++++++++++++ 50 files changed, 1376 insertions(+), 1003 deletions(-) create mode 100644 src/database/redis.rs create mode 100644 src/lib.rs create mode 100644 tests/common/actix.rs create mode 100644 tests/common/database.rs create mode 100644 tests/common/mod.rs create mode 100644 tests/files/basic-mod.jar create mode 100644 tests/files/dummy_data.sql create mode 100644 tests/project.rs diff --git a/src/auth/flows.rs b/src/auth/flows.rs index 8b13524b..03771312 100644 --- a/src/auth/flows.rs +++ b/src/auth/flows.rs @@ -3,16 +3,17 @@ use crate::auth::session::issue_session; use crate::auth::validate::get_user_record_from_bearer_token; use crate::auth::{get_user_from_headers, AuthenticationError}; use crate::database::models::flow_item::Flow; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::ids::random_base62_rng; use crate::models::pats::Scopes; use crate::models::users::{Badges, Role}; -use crate::parse_strings_from_var; use crate::queue::session::AuthQueue; use crate::queue::socket::ActiveSockets; use crate::routes::ApiError; use crate::util::captcha::check_turnstile_captcha; +use crate::util::env::parse_strings_from_var; use crate::util::ext::{get_image_content_type, get_image_ext}; use crate::util::validate::{validation_errors_to_string, RE_URL_SAFE}; use actix_web::web::{scope, Data, Payload, Query, ServiceConfig}; @@ -54,7 +55,7 @@ pub fn config(cfg: &mut ServiceConfig) { ); } -#[derive(Serialize, Deserialize, Default, Eq, PartialEq, Clone, Copy)] +#[derive(Serialize, Deserialize, Default, Eq, PartialEq, Clone, Copy, Debug)] #[serde(rename_all = "lowercase")] pub enum AuthProvider { #[default] @@ -84,7 +85,7 @@ impl TempUser { transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, client: &PgPool, file_host: &Arc, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result { if let Some(email) = &self.email { if crate::database::models::User::get_email(email, client) @@ -907,7 +908,7 @@ pub async fn init( req: HttpRequest, Query(info): Query, // callback url client: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let url = url::Url::parse(&info.url).map_err(|_| AuthenticationError::Url)?; @@ -959,7 +960,7 @@ pub async fn ws_init( Query(info): Query, body: Payload, db: Data>, - redis: Data, + redis: Data, ) -> Result { let (res, session, _msg_stream) = actix_ws::handle(&req, body)?; @@ -967,7 +968,7 @@ pub async fn ws_init( mut ws_stream: actix_ws::Session, info: WsInit, db: Data>, - redis: Data, + redis: Data, ) -> Result<(), Closed> { let flow = Flow::OAuth { user_id: None, @@ -1003,7 +1004,7 @@ pub async fn auth_callback( active_sockets: Data>, client: Data, file_host: Data>, - redis: Data, + redis: Data, ) -> Result { let state_string = query .get("state") @@ -1210,7 +1211,7 @@ pub struct DeleteAuthProvider { pub async fn delete_auth_provider( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, delete_provider: web::Json, session_queue: Data, ) -> Result { @@ -1297,7 +1298,7 @@ pub struct NewAccount { pub async fn create_account_with_password( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, new_account: web::Json, ) -> Result { new_account @@ -1414,7 +1415,7 @@ pub struct Login { pub async fn login_password( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, login: web::Json, ) -> Result { if !check_turnstile_captcha(&req, &login.challenge).await? { @@ -1478,7 +1479,7 @@ async fn validate_2fa_code( secret: String, allow_backup: bool, user_id: crate::database::models::UserId, - redis: &deadpool_redis::Pool, + redis: &RedisPool, pool: &PgPool, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result { @@ -1530,7 +1531,7 @@ async fn validate_2fa_code( pub async fn login_2fa( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, login: web::Json, ) -> Result { let flow = Flow::get(&login.flow, &redis) @@ -1577,7 +1578,7 @@ pub async fn login_2fa( pub async fn begin_2fa_flow( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let user = get_user_from_headers( @@ -1616,7 +1617,7 @@ pub async fn begin_2fa_flow( pub async fn finish_2fa_flow( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, login: web::Json, session_queue: Data, ) -> Result { @@ -1739,7 +1740,7 @@ pub struct Remove2FA { pub async fn remove_2fa( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, login: web::Json, session_queue: Data, ) -> Result { @@ -1821,7 +1822,7 @@ pub struct ResetPassword { pub async fn reset_password_begin( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, reset_password: web::Json, ) -> Result { if !check_turnstile_captcha(&req, &reset_password.challenge).await? { @@ -1866,7 +1867,7 @@ pub struct ChangePassword { pub async fn change_password( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, change_password: web::Json, session_queue: Data, ) -> Result { @@ -2007,7 +2008,7 @@ pub struct SetEmail { pub async fn set_email( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, email: web::Json, session_queue: Data, ) -> Result { @@ -2073,7 +2074,7 @@ pub async fn set_email( pub async fn resend_verify_email( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let user = get_user_from_headers( @@ -2118,7 +2119,7 @@ pub struct VerifyEmail { #[post("email/verify")] pub async fn verify_email( pool: Data, - redis: Data, + redis: Data, email: web::Json, ) -> Result { let flow = Flow::get(&email.flow, &redis).await?; @@ -2168,7 +2169,7 @@ pub async fn verify_email( pub async fn subscribe_newsletter( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let user = get_user_from_headers( diff --git a/src/auth/pats.rs b/src/auth/pats.rs index c38f428b..bc56088d 100644 --- a/src/auth/pats.rs +++ b/src/auth/pats.rs @@ -11,6 +11,7 @@ use rand::distributions::Alphanumeric; use rand::Rng; use rand_chacha::rand_core::SeedableRng; use rand_chacha::ChaCha20Rng; +use crate::database::redis::RedisPool; use crate::models::pats::{PersonalAccessToken, Scopes}; use crate::queue::session::AuthQueue; @@ -30,7 +31,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { pub async fn get_pats( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let user = get_user_from_headers( @@ -73,7 +74,7 @@ pub async fn create_pat( req: HttpRequest, info: web::Json, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { info.0 @@ -159,7 +160,7 @@ pub async fn edit_pat( id: web::Path<(String,)>, info: web::Json, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let user = get_user_from_headers( @@ -248,7 +249,7 @@ pub async fn delete_pat( req: HttpRequest, id: web::Path<(String,)>, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let user = get_user_from_headers( diff --git a/src/auth/session.rs b/src/auth/session.rs index 43931aa9..a51e5e5a 100644 --- a/src/auth/session.rs +++ b/src/auth/session.rs @@ -10,6 +10,7 @@ use crate::util::env::parse_var; use actix_web::http::header::AUTHORIZATION; use actix_web::web::{scope, Data, ServiceConfig}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; +use crate::database::redis::RedisPool; use chrono::Utc; use rand::distributions::Alphanumeric; use rand::{Rng, SeedableRng}; @@ -86,7 +87,7 @@ pub async fn issue_session( req: HttpRequest, user_id: UserId, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result { let metadata = get_session_metadata(&req).await?; @@ -132,7 +133,7 @@ pub async fn issue_session( pub async fn list( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let current_user = get_user_from_headers( @@ -167,7 +168,7 @@ pub async fn delete( info: web::Path<(String,)>, req: HttpRequest, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let current_user = get_user_from_headers( @@ -206,7 +207,7 @@ pub async fn delete( pub async fn refresh( req: HttpRequest, pool: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let current_user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None) diff --git a/src/auth/validate.rs b/src/auth/validate.rs index 8589e176..b2599e3c 100644 --- a/src/auth/validate.rs +++ b/src/auth/validate.rs @@ -2,6 +2,7 @@ use crate::auth::flows::AuthProvider; use crate::auth::session::get_session_metadata; use crate::auth::AuthenticationError; use crate::database::models::user_item; +use crate::database::redis::RedisPool; use crate::models::pats::Scopes; use crate::models::users::{Role, User, UserId, UserPayoutData}; use crate::queue::session::AuthQueue; @@ -12,7 +13,7 @@ use reqwest::header::{HeaderValue, AUTHORIZATION}; pub async fn get_user_from_headers<'a, E>( req: &HttpRequest, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, session_queue: &AuthQueue, required_scopes: Option<&[Scopes]>, ) -> Result<(Scopes, User), AuthenticationError> @@ -82,7 +83,7 @@ pub async fn get_user_record_from_bearer_token<'a, 'b, E>( req: &HttpRequest, token: Option<&str>, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, session_queue: &AuthQueue, ) -> Result, AuthenticationError> where @@ -163,7 +164,7 @@ where pub async fn check_is_moderator_from_headers<'a, 'b, E>( req: &HttpRequest, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, session_queue: &AuthQueue, ) -> Result where diff --git a/src/database/mod.rs b/src/database/mod.rs index 9c51cd17..b03354bf 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -1,4 +1,5 @@ pub mod models; +pub mod redis; mod postgres_database; pub use models::Image; pub use models::Project; diff --git a/src/database/models/categories.rs b/src/database/models/categories.rs index 4a99a750..2d08a6c4 100644 --- a/src/database/models/categories.rs +++ b/src/database/models/categories.rs @@ -1,9 +1,10 @@ +use crate::database::redis::RedisPool; + use super::ids::*; use super::DatabaseError; use chrono::DateTime; use chrono::Utc; use futures::TryStreamExt; -use redis::cmd; use serde::{Deserialize, Serialize}; const TAGS_NAMESPACE: &str = "tags"; @@ -100,17 +101,13 @@ impl Category { pub async fn list<'a, E>( exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:category", TAGS_NAMESPACE)) - .query_async::<_, Option>(&mut redis) - .await? - .and_then(|x| serde_json::from_str::>(&x).ok()); + let res = redis.get::(TAGS_NAMESPACE, "category").await? + .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { return Ok(res); @@ -137,13 +134,7 @@ impl Category { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:category", TAGS_NAMESPACE)) - .arg(serde_json::to_string(&result)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(TAGS_NAMESPACE, "category", serde_json::to_string(&result)?, None).await?; Ok(result) } @@ -169,17 +160,13 @@ impl Loader { pub async fn list<'a, E>( exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:loader", TAGS_NAMESPACE)) - .query_async::<_, Option>(&mut redis) - .await? - .and_then(|x| serde_json::from_str::>(&x).ok()); + let res = redis.get::(TAGS_NAMESPACE, "loader").await? + .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { return Ok(res); @@ -212,13 +199,7 @@ impl Loader { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:loader", TAGS_NAMESPACE)) - .arg(serde_json::to_string(&result)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(TAGS_NAMESPACE, "loader", serde_json::to_string(&result)?, None).await?; Ok(result) } @@ -258,16 +239,12 @@ impl GameVersion { pub async fn list<'a, E>( exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:game_version", TAGS_NAMESPACE)) - .query_async::<_, Option>(&mut redis) - .await? + let res = redis.get::(TAGS_NAMESPACE, "game_version").await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -291,14 +268,7 @@ impl GameVersion { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:game_version", TAGS_NAMESPACE)) - .arg(serde_json::to_string(&result)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.set(TAGS_NAMESPACE, "game_version", serde_json::to_string(&result)?, None).await?; Ok(result) } @@ -306,7 +276,7 @@ impl GameVersion { version_type_option: Option<&str>, major_option: Option, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -408,16 +378,12 @@ impl DonationPlatform { pub async fn list<'a, E>( exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:donation_platform", TAGS_NAMESPACE)) - .query_async::<_, Option>(&mut redis) - .await? + let res = redis.get::(TAGS_NAMESPACE, "donation_platform").await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -440,13 +406,7 @@ impl DonationPlatform { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:donation_platform", TAGS_NAMESPACE)) - .arg(serde_json::to_string(&result)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(TAGS_NAMESPACE, "donation_platform", serde_json::to_string(&result)?, None).await?; Ok(result) } @@ -472,16 +432,12 @@ impl ReportType { pub async fn list<'a, E>( exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:report_type", TAGS_NAMESPACE)) - .query_async::<_, Option>(&mut redis) - .await? + let res = redis.get::(TAGS_NAMESPACE, "report_type").await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -498,13 +454,7 @@ impl ReportType { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:report_type", TAGS_NAMESPACE)) - .arg(serde_json::to_string(&result)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(TAGS_NAMESPACE, "report_type", serde_json::to_string(&result)?, None).await?; Ok(result) } @@ -530,16 +480,12 @@ impl ProjectType { pub async fn list<'a, E>( exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:project_type", TAGS_NAMESPACE)) - .query_async::<_, Option>(&mut redis) - .await? + let res = redis.get::(TAGS_NAMESPACE, "project_type").await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -556,13 +502,7 @@ impl ProjectType { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:project_type", TAGS_NAMESPACE)) - .arg(serde_json::to_string(&result)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(TAGS_NAMESPACE, "project_type", serde_json::to_string(&result)?, None).await?; Ok(result) } @@ -588,16 +528,12 @@ impl SideType { pub async fn list<'a, E>( exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:side_type", TAGS_NAMESPACE)) - .query_async::<_, Option>(&mut redis) - .await? + let res = redis.get::(TAGS_NAMESPACE, "side_type").await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -614,13 +550,7 @@ impl SideType { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:side_type", TAGS_NAMESPACE)) - .arg(serde_json::to_string(&result)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(TAGS_NAMESPACE, "side_type", serde_json::to_string(&result)?, None).await?; Ok(result) } diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index 0500ee81..98ef1574 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -2,6 +2,7 @@ use super::ids::*; use crate::database::models; use crate::database::models::DatabaseError; use crate::models::collections::CollectionStatus; +use crate::database::redis::RedisPool; use chrono::{DateTime, Utc}; use redis::cmd; use serde::{Deserialize, Serialize}; @@ -102,7 +103,7 @@ impl Collection { pub async fn remove( id: CollectionId, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> { let collection = Self::get(id, &mut *transaction, redis).await?; @@ -138,7 +139,7 @@ impl Collection { pub async fn get<'a, 'b, E>( id: CollectionId, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -151,7 +152,7 @@ impl Collection { pub async fn get_many<'a, E>( collection_ids: &[CollectionId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -162,21 +163,12 @@ impl Collection { return Ok(Vec::new()); } - let mut redis = redis.get().await?; - let mut found_collections = Vec::new(); let mut remaining_collections: Vec = collection_ids.to_vec(); if !collection_ids.is_empty() { - let collections = cmd("MGET") - .arg( - collection_ids - .iter() - .map(|x| format!("{}:{}", COLLECTIONS_NAMESPACE, x.0)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; + let collections = redis.multi_get::( + COLLECTIONS_NAMESPACE, collection_ids.iter().map(|x| x.0).collect()).await?; for collection in collections { if let Some(collection) = @@ -233,14 +225,7 @@ impl Collection { .await?; for collection in db_collections { - cmd("SET") - .arg(format!("{}:{}", COLLECTIONS_NAMESPACE, collection.id.0)) - .arg(serde_json::to_string(&collection)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.set(COLLECTIONS_NAMESPACE, collection.id.0, serde_json::to_string(&collection)?, None).await?; found_collections.push(collection); } } @@ -250,14 +235,9 @@ impl Collection { pub async fn clear_cache( id: CollectionId, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { - let mut redis = redis.get().await?; - let mut cmd = cmd("DEL"); - - cmd.arg(format!("{}:{}", COLLECTIONS_NAMESPACE, id.0)); - cmd.query_async::<_, ()>(&mut redis).await?; - + redis.delete(COLLECTIONS_NAMESPACE, id.0).await?; Ok(()) } } diff --git a/src/database/models/flow_item.rs b/src/database/models/flow_item.rs index f55fa9b0..4625f0f1 100644 --- a/src/database/models/flow_item.rs +++ b/src/database/models/flow_item.rs @@ -2,6 +2,7 @@ use super::ids::*; use crate::auth::flows::AuthProvider; use crate::database::models::DatabaseError; use chrono::Duration; +use crate::database::redis::RedisPool; use rand::distributions::Alphanumeric; use rand::Rng; use rand_chacha::rand_core::SeedableRng; @@ -40,9 +41,8 @@ impl Flow { pub async fn insert( &self, expires: Duration, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result { - let mut redis = redis.get().await?; let flow = ChaCha20Rng::from_entropy() .sample_iter(&Alphanumeric) @@ -50,40 +50,23 @@ impl Flow { .map(char::from) .collect::(); - cmd("SET") - .arg(format!("{}:{}", FLOWS_NAMESPACE, flow)) - .arg(serde_json::to_string(&self)?) - .arg("EX") - .arg(expires.num_seconds()) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.set(FLOWS_NAMESPACE, &flow, serde_json::to_string(&self)?, Some(expires.num_seconds())).await?; Ok(flow) } pub async fn get( id: &str, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> { - let mut redis = redis.get().await?; - - let res = cmd("GET") - .arg(format!("{}:{}", FLOWS_NAMESPACE, id)) - .query_async::<_, Option>(&mut redis) - .await?; - + let res = redis.get::(FLOWS_NAMESPACE, id).await?; Ok(res.and_then(|x| serde_json::from_str(&x).ok())) } pub async fn remove( id: &str, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> { - let mut redis = redis.get().await?; - let mut cmd = cmd("DEL"); - cmd.arg(format!("{}:{}", FLOWS_NAMESPACE, id)); - cmd.query_async::<_, ()>(&mut redis).await?; - + redis.delete(FLOWS_NAMESPACE, id).await?; Ok(Some(())) } } diff --git a/src/database/models/image_item.rs b/src/database/models/image_item.rs index fd6d0abb..17236221 100644 --- a/src/database/models/image_item.rs +++ b/src/database/models/image_item.rs @@ -1,4 +1,5 @@ use super::ids::*; +use crate::database::redis::RedisPool; use crate::{database::models::DatabaseError, models::images::ImageContext}; use chrono::{DateTime, Utc}; use redis::cmd; @@ -58,7 +59,7 @@ impl Image { pub async fn remove( id: ImageId, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> { let image = Self::get(id, &mut *transaction, redis).await?; @@ -161,7 +162,7 @@ impl Image { pub async fn get<'a, 'b, E>( id: ImageId, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -174,7 +175,7 @@ impl Image { pub async fn get_many<'a, E>( image_ids: &[ImageId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -185,7 +186,6 @@ impl Image { return Ok(Vec::new()); } - let mut redis = redis.get().await?; let mut found_images = Vec::new(); let mut remaining_ids = image_ids.to_vec(); @@ -193,16 +193,7 @@ impl Image { let image_ids = image_ids.iter().map(|x| x.0).collect::>(); if !image_ids.is_empty() { - let images = cmd("MGET") - .arg( - image_ids - .iter() - .map(|x| format!("{}:{}", IMAGES_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; - + let images = redis.multi_get::(IMAGES_NAMESPACE, image_ids).await?; for image in images { if let Some(image) = image.and_then(|x| serde_json::from_str::(&x).ok()) { remaining_ids.retain(|x| image.id.0 != x.0); @@ -245,14 +236,7 @@ impl Image { .await?; for image in db_images { - cmd("SET") - .arg(format!("{}:{}", IMAGES_NAMESPACE, image.id.0)) - .arg(serde_json::to_string(&image)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.set(IMAGES_NAMESPACE, image.id.0, serde_json::to_string(&image)?, None).await?; found_images.push(image); } } @@ -262,14 +246,9 @@ impl Image { pub async fn clear_cache( id: ImageId, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { - let mut redis = redis.get().await?; - let mut cmd = cmd("DEL"); - - cmd.arg(format!("{}:{}", IMAGES_NAMESPACE, id.0)); - cmd.query_async::<_, ()>(&mut redis).await?; - + redis.delete(IMAGES_NAMESPACE, id.0).await?; Ok(()) } } diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index f8ff23d1..2d82d582 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -1,5 +1,6 @@ use super::ids::*; use crate::database::models::DatabaseError; +use crate::database::redis::RedisPool; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::pats::Scopes; use chrono::{DateTime, Utc}; @@ -11,7 +12,7 @@ const PATS_TOKENS_NAMESPACE: &str = "pats_tokens"; const PATS_USERS_NAMESPACE: &str = "pats_users"; const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes -#[derive(Deserialize, Serialize)] +#[derive(Deserialize, Serialize, Clone)] pub struct PersonalAccessToken { pub id: PatId, pub name: String, @@ -55,7 +56,7 @@ impl PersonalAccessToken { pub async fn get<'a, E, T: ToString>( id: T, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -68,7 +69,7 @@ impl PersonalAccessToken { pub async fn get_many_ids<'a, E>( pat_ids: &[PatId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -83,7 +84,7 @@ impl PersonalAccessToken { pub async fn get_many<'a, E, T: ToString>( pat_strings: &[T], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -94,7 +95,6 @@ impl PersonalAccessToken { return Ok(Vec::new()); } - let mut redis = redis.get().await?; let mut found_pats = Vec::new(); let mut remaining_strings = pat_strings @@ -108,31 +108,14 @@ impl PersonalAccessToken { .collect::>(); pat_ids.append( - &mut cmd("MGET") - .arg( - pat_strings - .iter() - .map(|x| format!("{}:{}", PATS_TOKENS_NAMESPACE, x.to_string())) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await? + &mut redis.multi_get::(PATS_TOKENS_NAMESPACE, pat_strings.iter().map(|x| x.to_string()).collect()).await? .into_iter() .flatten() - .collect(), + .collect() ); if !pat_ids.is_empty() { - let pats = cmd("MGET") - .arg( - pat_ids - .iter() - .map(|x| format!("{}:{}", PATS_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; - + let pats = redis.multi_get::(PATS_NAMESPACE, pat_ids).await?; for pat in pats { if let Some(pat) = pat.and_then(|x| serde_json::from_str::(&x).ok()) @@ -181,21 +164,8 @@ impl PersonalAccessToken { .await?; for pat in db_pats { - cmd("SET") - .arg(format!("{}:{}", PATS_NAMESPACE, pat.id.0)) - .arg(serde_json::to_string(&pat)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - - cmd("SET") - .arg(format!("{}:{}", PATS_TOKENS_NAMESPACE, pat.access_token)) - .arg(pat.id.0) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(PATS_NAMESPACE, pat.id.0, serde_json::to_string(&pat)?, None); + redis.set(PATS_TOKENS_NAMESPACE, pat.access_token.clone(), pat.id.0, None); found_pats.push(pat); } } @@ -206,15 +176,12 @@ impl PersonalAccessToken { pub async fn get_user_pats<'a, E>( user_id: UserId, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:{}", PATS_USERS_NAMESPACE, user_id.0)) - .query_async::<_, Option>(&mut redis) + let res = redis.get::(PATS_USERS_NAMESPACE, user_id.0) .await? .and_then(|x| serde_json::from_str::>(&x).ok()); @@ -237,42 +204,30 @@ impl PersonalAccessToken { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:{}", PATS_USERS_NAMESPACE, user_id.0)) - .arg(serde_json::to_string(&db_pats)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.set(PATS_USERS_NAMESPACE, user_id.0, serde_json::to_string(&db_pats)?, None).await?; Ok(db_pats) } pub async fn clear_cache( clear_pats: Vec<(Option, Option, Option)>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { if clear_pats.is_empty() { return Ok(()); } - let mut redis = redis.get().await?; - let mut cmd = cmd("DEL"); - for (id, token, user_id) in clear_pats { if let Some(id) = id { - cmd.arg(format!("{}:{}", PATS_NAMESPACE, id.0)); + redis.delete(PATS_NAMESPACE, id.0).await?; } if let Some(token) = token { - cmd.arg(format!("{}:{}", PATS_TOKENS_NAMESPACE, token)); + redis.delete(PATS_TOKENS_NAMESPACE, token).await?; } if let Some(user_id) = user_id { - cmd.arg(format!("{}:{}", PATS_USERS_NAMESPACE, user_id.0)); + redis.delete(PATS_USERS_NAMESPACE, user_id.0).await?; } } - cmd.query_async::<_, ()>(&mut redis).await?; - Ok(()) } diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 81db0e81..3c27d0c4 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -1,14 +1,15 @@ use super::ids::*; use crate::database::models; use crate::database::models::DatabaseError; +use crate::database::redis::RedisPool; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::projects::{MonetizationStatus, ProjectStatus}; use chrono::{DateTime, Utc}; use redis::cmd; use serde::{Deserialize, Serialize}; -const PROJECTS_NAMESPACE: &str = "projects"; -const PROJECTS_SLUGS_NAMESPACE: &str = "projects_slugs"; +pub const PROJECTS_NAMESPACE: &str = "projects"; +pub const PROJECTS_SLUGS_NAMESPACE: &str = "projects_slugs"; const PROJECTS_DEPENDENCIES_NAMESPACE: &str = "projects_dependencies"; const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes @@ -294,7 +295,7 @@ impl Project { pub async fn remove( id: ProjectId, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> { let project = Self::get_id(id, &mut *transaction, redis).await?; @@ -428,7 +429,7 @@ impl Project { pub async fn get<'a, 'b, E>( string: &str, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -441,7 +442,7 @@ impl Project { pub async fn get_id<'a, 'b, E>( id: ProjectId, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -454,7 +455,7 @@ impl Project { pub async fn get_many_ids<'a, E>( project_ids: &[ProjectId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -469,19 +470,19 @@ impl Project { pub async fn get_many<'a, E, T: ToString>( project_strings: &[T], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { + let debug_time_0 = std::time::Instant::now(); + use futures::TryStreamExt; if project_strings.is_empty() { return Ok(Vec::new()); } - let mut redis = redis.get().await?; - let mut found_projects = Vec::new(); let mut remaining_strings = project_strings .iter() @@ -494,37 +495,17 @@ impl Project { .collect::>(); project_ids.append( - &mut cmd("MGET") - .arg( - project_strings - .iter() - .map(|x| { - format!( - "{}:{}", - PROJECTS_SLUGS_NAMESPACE, - x.to_string().to_lowercase() - ) - }) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await? + &mut redis.multi_get::(PROJECTS_SLUGS_NAMESPACE, project_strings.iter().map(|x| x.to_string().to_lowercase()).collect()).await? .into_iter() .flatten() .collect(), ); - if !project_ids.is_empty() { - let projects = cmd("MGET") - .arg( - project_ids - .iter() - .map(|x| format!("{}:{}", PROJECTS_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; + let debug_time_1 = std::time::Instant::now(); + println!("Redis time: {:?}", debug_time_1 - debug_time_0); + if !project_ids.is_empty() { + let projects = redis.multi_get::(PROJECTS_NAMESPACE, project_ids).await?; for project in projects { if let Some(project) = project.and_then(|x| serde_json::from_str::(&x).ok()) @@ -540,13 +521,15 @@ impl Project { } } + let debug_time_2 = std::time::Instant::now(); + println!("Redis time: {:?}", debug_time_2 - debug_time_1); + if !remaining_strings.is_empty() { let project_ids_parsed: Vec = remaining_strings .iter() .flat_map(|x| parse_base62(&x.to_string()).ok()) .map(|x| x as i64) .collect(); - let db_projects: Vec = sqlx::query!( " SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows, @@ -583,7 +566,9 @@ impl Project { .try_filter_map(|e| async { Ok(e.right().map(|m| { let id = m.id; - + let debug_time_3 = std::time::Instant::now(); + println!("inner SQL time: {:?}", debug_time_3 - debug_time_2); + QueryProject { inner: Project { id: ProjectId(id), @@ -665,30 +650,20 @@ impl Project { .try_collect::>() .await?; - for project in db_projects { - cmd("SET") - .arg(format!("{}:{}", PROJECTS_NAMESPACE, project.inner.id.0)) - .arg(serde_json::to_string(&project)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + let debug_time_3 = std::time::Instant::now(); + println!("total SQL time: {:?}", debug_time_3 - debug_time_2); + for project in db_projects { + redis.set(PROJECTS_NAMESPACE, project.inner.id.0, serde_json::to_string(&project)?, None).await?; if let Some(slug) = &project.inner.slug { - cmd("SET") - .arg(format!( - "{}:{}", - PROJECTS_SLUGS_NAMESPACE, - slug.to_lowercase() - )) - .arg(project.inner.id.0) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(PROJECTS_SLUGS_NAMESPACE, + slug.to_lowercase(), project.inner.id.0, None).await?; } found_projects.push(project); } + + let debug_time_4 = std::time::Instant::now(); + println!("Redis time: {:?}", debug_time_4 - debug_time_3); } Ok(found_projects) @@ -697,7 +672,7 @@ impl Project { pub async fn get_dependencies<'a, E>( id: ProjectId, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, Option, Option)>, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -706,13 +681,7 @@ impl Project { use futures::stream::TryStreamExt; - let mut redis = redis.get().await?; - - let dependencies = cmd("GET") - .arg(format!("{}:{}", PROJECTS_DEPENDENCIES_NAMESPACE, id.0)) - .query_async::<_, Option>(&mut redis) - .await?; - + let dependencies = redis.get::(PROJECTS_DEPENDENCIES_NAMESPACE, id.0).await?; if let Some(dependencies) = dependencies.and_then(|x| serde_json::from_str::(&x).ok()) { @@ -746,14 +715,7 @@ impl Project { .try_collect::() .await?; - cmd("SET") - .arg(format!("{}:{}", PROJECTS_DEPENDENCIES_NAMESPACE, id.0)) - .arg(serde_json::to_string(&dependencies)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.set(PROJECTS_DEPENDENCIES_NAMESPACE, id.0, serde_json::to_string(&dependencies)?, None).await?; Ok(dependencies) } @@ -811,25 +773,16 @@ impl Project { id: ProjectId, slug: Option, clear_dependencies: Option, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { - let mut redis = redis.get().await?; - let mut cmd = cmd("DEL"); - - cmd.arg(format!("{}:{}", PROJECTS_NAMESPACE, id.0)); + redis.delete(PROJECTS_NAMESPACE, id.0).await?; if let Some(slug) = slug { - cmd.arg(format!( - "{}:{}", - PROJECTS_SLUGS_NAMESPACE, - slug.to_lowercase() - )); + redis.delete(PROJECTS_SLUGS_NAMESPACE, slug.to_lowercase()).await?; } if clear_dependencies.unwrap_or(false) { - cmd.arg(format!("{}:{}", PROJECTS_DEPENDENCIES_NAMESPACE, id.0)); + redis.delete(PROJECTS_DEPENDENCIES_NAMESPACE, id.0).await?; } - cmd.query_async::<_, ()>(&mut redis).await?; - Ok(()) } } diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index e1f1843c..5b9373f2 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -1,5 +1,6 @@ use super::ids::*; use crate::database::models::DatabaseError; +use crate::database::redis::RedisPool; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use chrono::{DateTime, Utc}; use redis::cmd; @@ -83,7 +84,7 @@ impl Session { pub async fn get<'a, E, T: ToString>( id: T, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -96,7 +97,7 @@ impl Session { pub async fn get_id<'a, 'b, E>( id: SessionId, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -109,7 +110,7 @@ impl Session { pub async fn get_many_ids<'a, E>( session_ids: &[SessionId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -124,7 +125,7 @@ impl Session { pub async fn get_many<'a, E, T: ToString>( session_strings: &[T], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -135,8 +136,6 @@ impl Session { return Ok(Vec::new()); } - let mut redis = redis.get().await?; - let mut found_sessions = Vec::new(); let mut remaining_strings = session_strings .iter() @@ -149,31 +148,14 @@ impl Session { .collect::>(); session_ids.append( - &mut cmd("MGET") - .arg( - session_strings - .iter() - .map(|x| format!("{}:{}", SESSIONS_IDS_NAMESPACE, x.to_string())) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await? + &mut redis.multi_get::(SESSIONS_IDS_NAMESPACE, session_strings.iter().map(|x| x.to_string()).collect()).await? .into_iter() .flatten() .collect(), ); if !session_ids.is_empty() { - let sessions = cmd("MGET") - .arg( - session_ids - .iter() - .map(|x| format!("{}:{}", SESSIONS_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; - + let sessions = redis.multi_get::(SESSIONS_NAMESPACE, session_ids).await?; for session in sessions { if let Some(session) = session.and_then(|x| serde_json::from_str::(&x).ok()) @@ -225,21 +207,8 @@ impl Session { .await?; for session in db_sessions { - cmd("SET") - .arg(format!("{}:{}", SESSIONS_NAMESPACE, session.id.0)) - .arg(serde_json::to_string(&session)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - - cmd("SET") - .arg(format!("{}:{}", SESSIONS_IDS_NAMESPACE, session.session)) - .arg(session.id.0) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(SESSIONS_NAMESPACE, session.id.0, serde_json::to_string(&session)?, None).await?; + redis.set(SESSIONS_IDS_NAMESPACE, session.session.clone(), session.id.0, None).await?; found_sessions.push(session); } } @@ -250,16 +219,12 @@ impl Session { pub async fn get_user_sessions<'a, E>( user_id: UserId, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.get().await?; - let res = cmd("GET") - .arg(format!("{}:{}", SESSIONS_USERS_NAMESPACE, user_id.0)) - .query_async::<_, Option>(&mut redis) - .await? + let res = redis.get::(SESSIONS_USERS_NAMESPACE, user_id.0).await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -281,42 +246,31 @@ impl Session { .try_collect::>() .await?; - cmd("SET") - .arg(format!("{}:{}", SESSIONS_USERS_NAMESPACE, user_id.0)) - .arg(serde_json::to_string(&db_sessions)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(SESSIONS_USERS_NAMESPACE, user_id.0, serde_json::to_string(&db_sessions)?, None).await?; Ok(db_sessions) } pub async fn clear_cache( clear_sessions: Vec<(Option, Option, Option)>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { if clear_sessions.is_empty() { return Ok(()); } - let mut redis = redis.get().await?; - let mut cmd = cmd("DEL"); - for (id, session, user_id) in clear_sessions { if let Some(id) = id { - cmd.arg(format!("{}:{}", SESSIONS_NAMESPACE, id.0)); + redis.delete(SESSIONS_NAMESPACE, id.0).await?; } if let Some(session) = session { - cmd.arg(format!("{}:{}", SESSIONS_IDS_NAMESPACE, session)); + redis.delete(SESSIONS_IDS_NAMESPACE, session).await?; } if let Some(user_id) = user_id { - cmd.arg(format!("{}:{}", SESSIONS_USERS_NAMESPACE, user_id.0)); + redis.delete(SESSIONS_USERS_NAMESPACE, user_id.0).await?; } } - cmd.query_async::<_, ()>(&mut redis).await?; - Ok(()) } diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index c3ad0021..8d43818d 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -1,5 +1,5 @@ use super::ids::*; -use crate::models::teams::Permissions; +use crate::{models::teams::Permissions, database::redis::RedisPool}; use itertools::Itertools; use redis::cmd; use rust_decimal::Decimal; @@ -99,7 +99,7 @@ impl TeamMember { pub async fn get_from_team_full<'a, 'b, E>( id: TeamId, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, super::DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, @@ -110,7 +110,7 @@ impl TeamMember { pub async fn get_from_team_full_many<'a, E>( team_ids: &[TeamId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, super::DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, @@ -123,19 +123,9 @@ impl TeamMember { let mut team_ids_parsed: Vec = team_ids.iter().map(|x| x.0).collect(); - let mut redis = redis.get().await?; - let mut found_teams = Vec::new(); - let teams = cmd("MGET") - .arg( - team_ids_parsed - .iter() - .map(|x| format!("{}:{}", TEAMS_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; + let teams = redis.multi_get::(TEAMS_NAMESPACE, team_ids_parsed.clone()).await?; for team_raw in teams { if let Some(mut team) = team_raw @@ -183,14 +173,7 @@ impl TeamMember { for (id, members) in &teams.into_iter().group_by(|x| x.team_id) { let mut members = members.collect::>(); - cmd("SET") - .arg(format!("{}:{}", TEAMS_NAMESPACE, id.0)) - .arg(serde_json::to_string(&members)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.set(TEAMS_NAMESPACE, id.0, serde_json::to_string(&members)?, None).await?; found_teams.append(&mut members); } } @@ -200,14 +183,9 @@ impl TeamMember { pub async fn clear_cache( id: TeamId, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), super::DatabaseError> { - let mut redis = redis.get().await?; - cmd("DEL") - .arg(format!("{}:{}", TEAMS_NAMESPACE, id.0)) - .query_async::<_, ()>(&mut redis) - .await?; - + redis.delete(TEAMS_NAMESPACE, id.0).await?; Ok(()) } diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 86ffb7af..b0ae48e6 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -2,6 +2,7 @@ use super::ids::{ProjectId, UserId}; use super::CollectionId; use crate::database::models::DatabaseError; use crate::models::ids::base62_impl::{parse_base62, to_base62}; +use crate::database::redis::RedisPool; use crate::models::users::{Badges, RecipientType, RecipientWallet}; use chrono::{DateTime, Utc}; use redis::cmd; @@ -87,7 +88,7 @@ impl User { pub async fn get<'a, 'b, E>( string: &str, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -100,7 +101,7 @@ impl User { pub async fn get_id<'a, 'b, E>( id: UserId, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -113,7 +114,7 @@ impl User { pub async fn get_many_ids<'a, E>( user_ids: &[UserId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -128,7 +129,7 @@ impl User { pub async fn get_many<'a, E, T: ToString>( users_strings: &[T], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -139,8 +140,6 @@ impl User { return Ok(Vec::new()); } - let mut redis = redis.get().await?; - let mut found_users = Vec::new(); let mut remaining_strings = users_strings .iter() @@ -153,37 +152,14 @@ impl User { .collect::>(); user_ids.append( - &mut cmd("MGET") - .arg( - users_strings - .iter() - .map(|x| { - format!( - "{}:{}", - USER_USERNAMES_NAMESPACE, - x.to_string().to_lowercase() - ) - }) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await? + &mut redis.multi_get::(USER_USERNAMES_NAMESPACE, users_strings.iter().map(|x| x.to_string().to_lowercase()).collect()).await? .into_iter() .flatten() .collect(), ); if !user_ids.is_empty() { - let users = cmd("MGET") - .arg( - user_ids - .iter() - .map(|x| format!("{}:{}", USERS_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; - + let users = redis.multi_get::(USERS_NAMESPACE, user_ids).await?; for user in users { if let Some(user) = user.and_then(|x| serde_json::from_str::(&x).ok()) { remaining_strings.retain(|x| { @@ -252,25 +228,8 @@ impl User { .await?; for user in db_users { - cmd("SET") - .arg(format!("{}:{}", USERS_NAMESPACE, user.id.0)) - .arg(serde_json::to_string(&user)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - - cmd("SET") - .arg(format!( - "{}:{}", - USER_USERNAMES_NAMESPACE, - user.username.to_lowercase() - )) - .arg(user.id.0) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(USERS_NAMESPACE, user.id.0, serde_json::to_string(&user)?, None).await?; + redis.set(USER_USERNAMES_NAMESPACE, user.username.to_lowercase(), user.id.0, None).await?; found_users.push(user); } } @@ -371,24 +330,20 @@ impl User { pub async fn clear_caches( user_ids: &[(UserId, Option)], - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { - let mut redis = redis.get().await?; + let mut cmd = cmd("DEL"); for (id, username) in user_ids { - cmd.arg(format!("{}:{}", USERS_NAMESPACE, id.0)); + redis.delete(USERS_NAMESPACE, id.0).await?; + if let Some(username) = username { - cmd.arg(format!( - "{}:{}", - USER_USERNAMES_NAMESPACE, - username.to_lowercase() - )); + redis.delete(USER_USERNAMES_NAMESPACE, + username.to_lowercase()).await?; } } - cmd.query_async::<_, ()>(&mut redis).await?; - Ok(()) } @@ -396,7 +351,7 @@ impl User { id: UserId, full: bool, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> { let user = Self::get_id(id, &mut *transaction, redis).await?; diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index 9dcdeb3d..564f28f2 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -5,6 +5,7 @@ use chrono::{DateTime, Utc}; use itertools::Itertools; use redis::cmd; use serde::{Deserialize, Serialize}; +use crate::database::redis::RedisPool; use std::cmp::Ordering; use std::collections::HashMap; @@ -263,7 +264,7 @@ impl Version { pub async fn remove_full( id: VersionId, - redis: &deadpool_redis::Pool, + redis: &RedisPool, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result, DatabaseError> { let result = Self::get(id, &mut *transaction, redis).await?; @@ -398,7 +399,7 @@ impl Version { pub async fn get<'a, 'b, E>( id: VersionId, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -411,7 +412,7 @@ impl Version { pub async fn get_many<'a, E>( version_ids: &[VersionId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -424,19 +425,10 @@ impl Version { let mut version_ids_parsed: Vec = version_ids.iter().map(|x| x.0).collect(); - let mut redis = redis.get().await?; let mut found_versions = Vec::new(); - let versions = cmd("MGET") - .arg( - version_ids_parsed - .iter() - .map(|x| format!("{}:{}", VERSIONS_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; + let versions = redis.multi_get::(VERSIONS_NAMESPACE, version_ids_parsed.clone()).await?; for version in versions { if let Some(version) = @@ -588,13 +580,7 @@ impl Version { .await?; for version in db_versions { - cmd("SET") - .arg(format!("{}:{}", VERSIONS_NAMESPACE, version.inner.id.0)) - .arg(serde_json::to_string(&version)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(VERSIONS_NAMESPACE, version.inner.id.0,serde_json::to_string(&version)?,None).await?; found_versions.push(version); } @@ -608,7 +594,7 @@ impl Version { hash: String, version_id: Option, executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, @@ -625,7 +611,7 @@ impl Version { algorithm: String, hashes: &[String], executor: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, @@ -638,19 +624,13 @@ impl Version { let mut file_ids_parsed = hashes.to_vec(); - let mut redis = redis.get().await?; let mut found_files = Vec::new(); - let files = cmd("MGET") - .arg( - file_ids_parsed - .iter() - .map(|hash| format!("{}:{}_{}", VERSION_FILES_NAMESPACE, algorithm, hash)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; + let files = redis.multi_get::(VERSION_FILES_NAMESPACE, file_ids_parsed + .iter() + .map(|hash| format!("{}_{}", algorithm, hash)) + .collect::>()).await?; for file in files { if let Some(mut file) = @@ -726,13 +706,7 @@ impl Version { } for (key, mut files) in save_files { - cmd("SET") - .arg(format!("{}:{}", VERSION_FILES_NAMESPACE, key)) - .arg(serde_json::to_string(&files)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; + redis.set(VERSION_FILES_NAMESPACE, key, serde_json::to_string(&files)?, None).await?; found_files.append(&mut files); } @@ -743,22 +717,17 @@ impl Version { pub async fn clear_cache( version: &QueryVersion, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { - let mut redis = redis.get().await?; - - let mut cmd = cmd("DEL"); - cmd.arg(format!("{}:{}", VERSIONS_NAMESPACE, version.inner.id.0)); + redis.delete(VERSIONS_NAMESPACE, version.inner.id.0).await?; for file in &version.files { for (algo, hash) in &file.hashes { - cmd.arg(format!("{}:{}_{}", VERSION_FILES_NAMESPACE, algo, hash)); + redis.delete(VERSION_FILES_NAMESPACE, format!("{}_{}", algo, hash)).await?; } } - cmd.query_async::<_, ()>(&mut redis).await?; - Ok(()) } } diff --git a/src/database/redis.rs b/src/database/redis.rs new file mode 100644 index 00000000..8910e032 --- /dev/null +++ b/src/database/redis.rs @@ -0,0 +1,101 @@ +use std::fmt::Display; +use deadpool_redis::{Runtime, Config}; +use redis::{cmd, ToRedisArgs, FromRedisValue}; +use super::models::DatabaseError; + +const DEFAULT_EXPIRY : i64 = 1800; // 30 minutes + +#[derive(Clone)] +pub struct RedisPool { + pool : deadpool_redis::Pool, + meta_namespace : String +} + +impl RedisPool { + + // initiate a new redis pool + // testing pool uses a hashmap to mimic redis behaviour for very small data sizes (ie: tests) + // PANICS: production pool will panic if redis url is not set + pub fn new(meta_namespace : Option) -> Self { + let redis_pool = Config::from_url(dotenvy::var("REDIS_URL").expect("Redis URL not set")) + .builder() + .expect("Error building Redis pool") + .max_size( + dotenvy::var("DATABASE_MAX_CONNECTIONS") + .ok() + .and_then(|x| x.parse().ok()) + .unwrap_or(10000), + ) + .runtime(Runtime::Tokio1) + .build() + .expect("Redis connection failed"); + + RedisPool { + pool : redis_pool, + meta_namespace: meta_namespace.unwrap_or("".to_string()) + } +} + + pub async fn set(&self, namespace : &str, id : T1, data : T2, expiry : Option) -> Result<(), DatabaseError> + where T1 : Display, + T2 : ToRedisArgs + { + let mut redis_connection = self.pool.get().await?; + + cmd("SET") + .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) + .arg(data) + .arg("EX") + .arg(expiry.unwrap_or(DEFAULT_EXPIRY)) + .query_async::<_, ()>(&mut redis_connection) + .await?; + + Ok(()) + } + + pub async fn get(&self, namespace : &str, id : T1) -> Result, DatabaseError> + where T1 : Display, + R: FromRedisValue + { + let mut redis_connection = self.pool.get().await?; + + let res = cmd("GET") + .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) + .query_async::<_, Option>(&mut redis_connection) + .await?; + Ok(res) + } + + pub async fn multi_get(&self, namespace : &str, ids : Vec) -> Result>, DatabaseError> + where T1 : Display, + R: FromRedisValue + { + let mut redis_connection = self.pool.get().await?; + + let res = cmd("MGET") + .arg( + ids + .iter() + .map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x)) + .collect::>(), + ) + .query_async::<_, Vec>>(&mut redis_connection) + .await?; + Ok(res) + } + + pub async fn delete(&self, namespace : &str, id : T1) -> Result<(), DatabaseError> + where T1 : Display + { + let mut redis_connection = self.pool.get().await?; + + cmd("DEL") + .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) + .query_async::<_, ()>(&mut redis_connection) + .await?; + + Ok(()) + } + +} + diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 00000000..94ed45e5 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,365 @@ + + +use std::sync::Arc; + +use actix_web::web; +use chrono::{Utc, DateTime}; +use database::redis::RedisPool; +use log::{warn, info}; +use queue::{session::AuthQueue, socket::ActiveSockets, payouts::PayoutsQueue, analytics::AnalyticsQueue, download::DownloadQueue}; +use scheduler::Scheduler; +use sqlx::Postgres; +use tokio::sync::{Mutex, RwLock}; + +extern crate clickhouse as clickhouse_crate; +use clickhouse_crate::Client; +use util::cors::default_cors; + +use crate::{queue::payouts::process_payout, search::indexing::index_projects, util::env::parse_var}; + +pub mod auth; +pub mod clickhouse; +pub mod database; +pub mod file_hosting; +pub mod models; +pub mod queue; +pub mod ratelimit; +pub mod routes; +pub mod scheduler; +pub mod search; +pub mod util; +pub mod validate; + +#[derive(Clone)] +pub struct Pepper { + pub pepper: String, +} + +#[derive(Clone)] +pub struct LabrinthConfig { + pub pool: sqlx::Pool, + pub redis_pool: RedisPool, + pub clickhouse: Client, + pub file_host: Arc, + pub maxmind: Arc, + pub scheduler: Arc, + pub ip_salt: Pepper, + pub search_config: search::SearchConfig, + pub download_queue: web::Data, + pub session_queue: web::Data, + pub payouts_queue: web::Data>, + pub analytics_queue: Arc, + pub active_sockets: web::Data>, +} + +pub fn app_setup(pool: sqlx::Pool, redis_pool: RedisPool, clickhouse : &mut Client, file_host : Arc, maxmind : Arc) -> LabrinthConfig { + + info!( + "Starting Labrinth on {}", + dotenvy::var("BIND_ADDR").unwrap() + ); + + let search_config = search::SearchConfig { + address: dotenvy::var("MEILISEARCH_ADDR").unwrap(), + key: dotenvy::var("MEILISEARCH_KEY").unwrap(), + }; + + let mut scheduler = scheduler::Scheduler::new(); + + // The interval in seconds at which the local database is indexed + // for searching. Defaults to 1 hour if unset. + let local_index_interval = + std::time::Duration::from_secs(parse_var("LOCAL_INDEX_INTERVAL").unwrap_or(3600)); + + let pool_ref = pool.clone(); + let search_config_ref = search_config.clone(); + scheduler.run(local_index_interval, move || { + let pool_ref = pool_ref.clone(); + let search_config_ref = search_config_ref.clone(); + async move { + info!("Indexing local database"); + let result = index_projects(pool_ref, &search_config_ref).await; + if let Err(e) = result { + warn!("Local project indexing failed: {:?}", e); + } + info!("Done indexing local database"); + } + }); + + // Changes statuses of scheduled projects/versions + let pool_ref = pool.clone(); + // TODO: Clear cache when these are run + scheduler.run(std::time::Duration::from_secs(60 * 5), move || { + let pool_ref = pool_ref.clone(); + info!("Releasing scheduled versions/projects!"); + + async move { + let projects_results = sqlx::query!( + " + UPDATE mods + SET status = requested_status + WHERE status = $1 AND approved < CURRENT_DATE AND requested_status IS NOT NULL + ", + crate::models::projects::ProjectStatus::Scheduled.as_str(), + ) + .execute(&pool_ref) + .await; + + if let Err(e) = projects_results { + warn!("Syncing scheduled releases for projects failed: {:?}", e); + } + + let versions_results = sqlx::query!( + " + UPDATE versions + SET status = requested_status + WHERE status = $1 AND date_published < CURRENT_DATE AND requested_status IS NOT NULL + ", + crate::models::projects::VersionStatus::Scheduled.as_str(), + ) + .execute(&pool_ref) + .await; + + if let Err(e) = versions_results { + warn!("Syncing scheduled releases for versions failed: {:?}", e); + } + + info!("Finished releasing scheduled versions/projects"); + } + }); + + // Reminding moderators to review projects which have been in the queue longer than 40hr + let pool_ref = pool.clone(); + let redis_ref = redis_pool.clone(); + let webhook_message_sent = Arc::new(Mutex::new(Vec::<( + database::models::ProjectId, + DateTime, + )>::new())); + + scheduler.run(std::time::Duration::from_secs(10 * 60), move || { + let pool_ref = pool_ref.clone(); + let redis_ref = redis_ref.clone(); + let webhook_message_sent_ref = webhook_message_sent.clone(); + info!("Checking reviewed projects submitted more than 40hrs ago"); + + async move { + let do_steps = async { + use futures::TryStreamExt; + + let project_ids = sqlx::query!( + " + SELECT id FROM mods + WHERE status = $1 AND queued < NOW() - INTERVAL '40 hours' + ORDER BY updated ASC + ", + crate::models::projects::ProjectStatus::Processing.as_str(), + ) + .fetch_many(&pool_ref) + .try_filter_map(|e| async { + Ok(e.right().map(|m| database::models::ProjectId(m.id))) + }) + .try_collect::>() + .await?; + + let mut webhook_message_sent_ref = webhook_message_sent_ref.lock().await; + + webhook_message_sent_ref.retain(|x| Utc::now() - x.1 < chrono::Duration::hours(12)); + + for project in project_ids { + if webhook_message_sent_ref.iter().any(|x| x.0 == project) { continue; } + + if let Ok(webhook_url) = + dotenvy::var("MODERATION_DISCORD_WEBHOOK") + { + util::webhook::send_discord_webhook( + project.into(), + &pool_ref, + &redis_ref, + webhook_url, + Some("<@&783155186491195394> This project has been in the queue for over 40 hours!".to_string()), + ) + .await + .ok(); + + webhook_message_sent_ref.push((project, Utc::now())); + } + } + + Ok::<(), routes::ApiError>(()) + }; + + if let Err(e) = do_steps.await { + warn!( + "Checking reviewed projects submitted more than 40hrs ago failed: {:?}", + e + ); + } + + info!("Finished checking reviewed projects submitted more than 40hrs ago"); + } + }); + + scheduler::schedule_versions(&mut scheduler, pool.clone()); + + let download_queue = web::Data::new(DownloadQueue::new()); + + let pool_ref = pool.clone(); + let download_queue_ref = download_queue.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 5), move || { + let pool_ref = pool_ref.clone(); + let download_queue_ref = download_queue_ref.clone(); + + async move { + info!("Indexing download queue"); + let result = download_queue_ref.index(&pool_ref).await; + if let Err(e) = result { + warn!("Indexing download queue failed: {:?}", e); + } + info!("Done indexing download queue"); + } + }); + + let session_queue = web::Data::new(AuthQueue::new()); + + let pool_ref = pool.clone(); + let redis_ref = redis_pool.clone(); + let session_queue_ref = session_queue.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 30), move || { + let pool_ref = pool_ref.clone(); + let redis_ref = redis_ref.clone(); + let session_queue_ref = session_queue_ref.clone(); + + async move { + info!("Indexing sessions queue"); + let result = session_queue_ref.index(&pool_ref, &redis_ref).await; + if let Err(e) = result { + warn!("Indexing sessions queue failed: {:?}", e); + } + info!("Done indexing sessions queue"); + } + }); + + + let reader = maxmind.clone(); + { + let reader_ref = reader.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || { + let reader_ref = reader_ref.clone(); + + async move { + info!("Downloading MaxMind GeoLite2 country database"); + let result = reader_ref.index().await; + if let Err(e) = result { + warn!( + "Downloading MaxMind GeoLite2 country database failed: {:?}", + e + ); + } + info!("Done downloading MaxMind GeoLite2 country database"); + } + }); + } + info!("Downloading MaxMind GeoLite2 country database"); + + let analytics_queue = Arc::new(AnalyticsQueue::new()); + { + let client_ref = clickhouse.clone(); + let analytics_queue_ref = analytics_queue.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 5), move || { + let client_ref = client_ref.clone(); + let analytics_queue_ref = analytics_queue_ref.clone(); + + async move { + info!("Indexing analytics queue"); + let result = analytics_queue_ref.index(client_ref).await; + if let Err(e) = result { + warn!("Indexing analytics queue failed: {:?}", e); + } + info!("Done indexing analytics queue"); + } + }); + } + + { + let pool_ref = pool.clone(); + let redis_ref = redis_pool.clone(); + let client_ref = clickhouse.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 60 * 6), move || { + let pool_ref = pool_ref.clone(); + let redis_ref = redis_ref.clone(); + let client_ref = client_ref.clone(); + + async move { + info!("Started running payouts"); + let result = process_payout(&pool_ref, &redis_ref, &client_ref).await; + if let Err(e) = result { + warn!("Payouts run failed: {:?}", e); + } + info!("Done running payouts"); + } + }); + } + + let ip_salt = Pepper { + pepper: models::ids::Base62Id(models::ids::random_base62(11)).to_string(), + }; + + let payouts_queue = web::Data::new(Mutex::new(PayoutsQueue::new())); + let active_sockets = web::Data::new(RwLock::new(ActiveSockets::default())); + + LabrinthConfig { + pool, + redis_pool, + clickhouse: clickhouse.clone(), + file_host, + maxmind, + scheduler: Arc::new(scheduler), + ip_salt, + download_queue, + search_config, + session_queue, + payouts_queue, + analytics_queue, + active_sockets, + } + +} + +pub fn app_config(cfg: &mut web::ServiceConfig, labrinth_config: LabrinthConfig) { + cfg.app_data( + web::FormConfig::default().error_handler(|err, _req| { + routes::ApiError::Validation(err.to_string()).into() + }), + ) + .app_data( + web::PathConfig::default().error_handler(|err, _req| { + routes::ApiError::Validation(err.to_string()).into() + }), + ) + .app_data( + web::QueryConfig::default().error_handler(|err, _req| { + routes::ApiError::Validation(err.to_string()).into() + }), + ) + .app_data( + web::JsonConfig::default().error_handler(|err, _req| { + routes::ApiError::Validation(err.to_string()).into() + }), + ) +.app_data(web::Data::new(labrinth_config.redis_pool.clone())) + .app_data(web::Data::new(labrinth_config.pool.clone())) + .app_data(web::Data::new(labrinth_config.file_host.clone())) + .app_data(web::Data::new(labrinth_config.search_config.clone())) + .app_data(labrinth_config.download_queue.clone()) + .app_data(labrinth_config.session_queue.clone()) + .app_data(labrinth_config.payouts_queue.clone()) + .app_data(web::Data::new(labrinth_config.ip_salt.clone())) + .app_data(web::Data::new(labrinth_config.analytics_queue.clone())) + .app_data(web::Data::new(labrinth_config.clickhouse.clone())) + .app_data(web::Data::new(labrinth_config.maxmind.clone())) + .app_data(labrinth_config.active_sockets.clone()) + .configure(routes::v2::config) + .configure(routes::v3::config) + .configure(routes::root_config) + .default_service(web::get().wrap(default_cors()).to(routes::not_found)); +} diff --git a/src/main.rs b/src/main.rs index 427311b4..ca43e952 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,35 +1,15 @@ -use crate::file_hosting::S3Host; -use crate::queue::analytics::AnalyticsQueue; -use crate::queue::download::DownloadQueue; -use crate::queue::payouts::{process_payout, PayoutsQueue}; -use crate::queue::session::AuthQueue; -use crate::queue::socket::ActiveSockets; -use crate::ratelimit::errors::ARError; -use crate::ratelimit::memory::{MemoryStore, MemoryStoreActor}; -use crate::ratelimit::middleware::RateLimiter; -use crate::util::cors::default_cors; -use crate::util::env::{parse_strings_from_var, parse_var}; -use actix_web::{web, App, HttpServer}; -use chrono::{DateTime, Utc}; -use deadpool_redis::{Config, Runtime}; +use labrinth::file_hosting::S3Host; +use labrinth::ratelimit::errors::ARError; +use labrinth::ratelimit::memory::{MemoryStore, MemoryStoreActor}; +use labrinth::ratelimit::middleware::RateLimiter; +use labrinth::{clickhouse, database, file_hosting, queue}; +use labrinth::util::env::{parse_strings_from_var, parse_var}; +use labrinth::database::redis::RedisPool; +use actix_web::{App, HttpServer}; use env_logger::Env; use log::{error, info, warn}; -use search::indexing::index_projects; + use std::sync::Arc; -use tokio::sync::{Mutex, RwLock}; - -mod auth; -mod clickhouse; -mod database; -mod file_hosting; -mod models; -mod queue; -mod ratelimit; -mod routes; -mod scheduler; -mod search; -mod util; -mod validate; #[derive(Clone)] pub struct Pepper { @@ -64,11 +44,6 @@ async fn main() -> std::io::Result<()> { dotenvy::var("BIND_ADDR").unwrap() ); - let search_config = search::SearchConfig { - address: dotenvy::var("MEILISEARCH_ADDR").unwrap(), - key: dotenvy::var("MEILISEARCH_KEY").unwrap(), - }; - database::check_for_migrations() .await .expect("An error occurred while running migrations."); @@ -79,18 +54,7 @@ async fn main() -> std::io::Result<()> { .expect("Database connection failed"); // Redis connector - let redis_pool = Config::from_url(dotenvy::var("REDIS_URL").expect("Redis URL not set")) - .builder() - .expect("Error building Redis pool") - .max_size( - dotenvy::var("DATABASE_MAX_CONNECTIONS") - .ok() - .and_then(|x| x.parse().ok()) - .unwrap_or(10000), - ) - .runtime(Runtime::Tokio1) - .build() - .expect("Redis connection failed"); + let redis_pool = RedisPool::new(None); let storage_backend = dotenvy::var("STORAGE_BACKEND").unwrap_or_else(|_| "local".to_string()); @@ -117,255 +81,24 @@ async fn main() -> std::io::Result<()> { _ => panic!("Invalid storage backend specified. Aborting startup!"), }; - let mut scheduler = scheduler::Scheduler::new(); - - // The interval in seconds at which the local database is indexed - // for searching. Defaults to 1 hour if unset. - let local_index_interval = - std::time::Duration::from_secs(parse_var("LOCAL_INDEX_INTERVAL").unwrap_or(3600)); - - let pool_ref = pool.clone(); - let search_config_ref = search_config.clone(); - scheduler.run(local_index_interval, move || { - let pool_ref = pool_ref.clone(); - let search_config_ref = search_config_ref.clone(); - async move { - info!("Indexing local database"); - let result = index_projects(pool_ref, &search_config_ref).await; - if let Err(e) = result { - warn!("Local project indexing failed: {:?}", e); - } - info!("Done indexing local database"); - } - }); - - // Changes statuses of scheduled projects/versions - let pool_ref = pool.clone(); - // TODO: Clear cache when these are run - scheduler.run(std::time::Duration::from_secs(60 * 5), move || { - let pool_ref = pool_ref.clone(); - info!("Releasing scheduled versions/projects!"); - - async move { - let projects_results = sqlx::query!( - " - UPDATE mods - SET status = requested_status - WHERE status = $1 AND approved < CURRENT_DATE AND requested_status IS NOT NULL - ", - crate::models::projects::ProjectStatus::Scheduled.as_str(), - ) - .execute(&pool_ref) - .await; - - if let Err(e) = projects_results { - warn!("Syncing scheduled releases for projects failed: {:?}", e); - } - - let versions_results = sqlx::query!( - " - UPDATE versions - SET status = requested_status - WHERE status = $1 AND date_published < CURRENT_DATE AND requested_status IS NOT NULL - ", - crate::models::projects::VersionStatus::Scheduled.as_str(), - ) - .execute(&pool_ref) - .await; - - if let Err(e) = versions_results { - warn!("Syncing scheduled releases for versions failed: {:?}", e); - } - - info!("Finished releasing scheduled versions/projects"); - } - }); - - // Reminding moderators to review projects which have been in the queue longer than 40hr - let pool_ref = pool.clone(); - let redis_ref = redis_pool.clone(); - let webhook_message_sent = Arc::new(Mutex::new(Vec::<( - database::models::ProjectId, - DateTime, - )>::new())); - - scheduler.run(std::time::Duration::from_secs(10 * 60), move || { - let pool_ref = pool_ref.clone(); - let redis_ref = redis_ref.clone(); - let webhook_message_sent_ref = webhook_message_sent.clone(); - info!("Checking reviewed projects submitted more than 40hrs ago"); - - async move { - let do_steps = async { - use futures::TryStreamExt; - - let project_ids = sqlx::query!( - " - SELECT id FROM mods - WHERE status = $1 AND queued < NOW() - INTERVAL '40 hours' - ORDER BY updated ASC - ", - crate::models::projects::ProjectStatus::Processing.as_str(), - ) - .fetch_many(&pool_ref) - .try_filter_map(|e| async { - Ok(e.right().map(|m| database::models::ProjectId(m.id))) - }) - .try_collect::>() - .await?; - - let mut webhook_message_sent_ref = webhook_message_sent_ref.lock().await; - - webhook_message_sent_ref.retain(|x| Utc::now() - x.1 < chrono::Duration::hours(12)); - - for project in project_ids { - if webhook_message_sent_ref.iter().any(|x| x.0 == project) { continue; } - - if let Ok(webhook_url) = - dotenvy::var("MODERATION_DISCORD_WEBHOOK") - { - util::webhook::send_discord_webhook( - project.into(), - &pool_ref, - &redis_ref, - webhook_url, - Some("<@&783155186491195394> This project has been in the queue for over 40 hours!".to_string()), - ) - .await - .ok(); - - webhook_message_sent_ref.push((project, Utc::now())); - } - } - - Ok::<(), routes::ApiError>(()) - }; - - if let Err(e) = do_steps.await { - warn!( - "Checking reviewed projects submitted more than 40hrs ago failed: {:?}", - e - ); - } - - info!("Finished checking reviewed projects submitted more than 40hrs ago"); - } - }); - - scheduler::schedule_versions(&mut scheduler, pool.clone()); - - let download_queue = web::Data::new(DownloadQueue::new()); - - let pool_ref = pool.clone(); - let download_queue_ref = download_queue.clone(); - scheduler.run(std::time::Duration::from_secs(60 * 5), move || { - let pool_ref = pool_ref.clone(); - let download_queue_ref = download_queue_ref.clone(); - - async move { - info!("Indexing download queue"); - let result = download_queue_ref.index(&pool_ref).await; - if let Err(e) = result { - warn!("Indexing download queue failed: {:?}", e); - } - info!("Done indexing download queue"); - } - }); - - let session_queue = web::Data::new(AuthQueue::new()); - - let pool_ref = pool.clone(); - let redis_ref = redis_pool.clone(); - let session_queue_ref = session_queue.clone(); - scheduler.run(std::time::Duration::from_secs(60 * 30), move || { - let pool_ref = pool_ref.clone(); - let redis_ref = redis_ref.clone(); - let session_queue_ref = session_queue_ref.clone(); - - async move { - info!("Indexing sessions queue"); - let result = session_queue_ref.index(&pool_ref, &redis_ref).await; - if let Err(e) = result { - warn!("Indexing sessions queue failed: {:?}", e); - } - info!("Done indexing sessions queue"); - } - }); info!("Initializing clickhouse connection"); - let clickhouse = clickhouse::init_client().await.unwrap(); - - let reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap()); - { - let reader_ref = reader.clone(); - scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || { - let reader_ref = reader_ref.clone(); - - async move { - info!("Downloading MaxMind GeoLite2 country database"); - let result = reader_ref.index().await; - if let Err(e) = result { - warn!( - "Downloading MaxMind GeoLite2 country database failed: {:?}", - e - ); - } - info!("Done downloading MaxMind GeoLite2 country database"); - } - }); - } - info!("Downloading MaxMind GeoLite2 country database"); - - let analytics_queue = Arc::new(AnalyticsQueue::new()); - { - let client_ref = clickhouse.clone(); - let analytics_queue_ref = analytics_queue.clone(); - scheduler.run(std::time::Duration::from_secs(60 * 5), move || { - let client_ref = client_ref.clone(); - let analytics_queue_ref = analytics_queue_ref.clone(); - - async move { - info!("Indexing analytics queue"); - let result = analytics_queue_ref.index(client_ref).await; - if let Err(e) = result { - warn!("Indexing analytics queue failed: {:?}", e); - } - info!("Done indexing analytics queue"); - } - }); - } - - { - let pool_ref = pool.clone(); - let redis_ref = redis_pool.clone(); - let client_ref = clickhouse.clone(); - scheduler.run(std::time::Duration::from_secs(60 * 60 * 6), move || { - let pool_ref = pool_ref.clone(); - let redis_ref = redis_ref.clone(); - let client_ref = client_ref.clone(); - - async move { - info!("Started running payouts"); - let result = process_payout(&pool_ref, &redis_ref, &client_ref).await; - if let Err(e) = result { - warn!("Payouts run failed: {:?}", e); - } - info!("Done running payouts"); - } - }); - } - - let ip_salt = Pepper { - pepper: models::ids::Base62Id(models::ids::random_base62(11)).to_string(), - }; + let mut clickhouse = clickhouse::init_client().await.unwrap(); - let payouts_queue = web::Data::new(Mutex::new(PayoutsQueue::new())); - let active_sockets = web::Data::new(RwLock::new(ActiveSockets::default())); + let maxmind_reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap()); let store = MemoryStore::new(); info!("Starting Actix HTTP server!"); + let labrinth_config = labrinth::app_setup( + pool.clone(), + redis_pool.clone(), + &mut clickhouse, + file_host.clone(), + maxmind_reader.clone(), + ); + // Init App HttpServer::new(move || { App::new() @@ -392,42 +125,7 @@ async fn main() -> std::io::Result<()> { .with_ignore_key(dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok()), ) .wrap(sentry_actix::Sentry::new()) - .app_data( - web::FormConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), - ) - .app_data( - web::PathConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), - ) - .app_data( - web::QueryConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), - ) - .app_data( - web::JsonConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), - ) - .app_data(web::Data::new(redis_pool.clone())) - .app_data(web::Data::new(pool.clone())) - .app_data(web::Data::new(file_host.clone())) - .app_data(web::Data::new(search_config.clone())) - .app_data(download_queue.clone()) - .app_data(session_queue.clone()) - .app_data(payouts_queue.clone()) - .app_data(web::Data::new(ip_salt.clone())) - .app_data(web::Data::new(analytics_queue.clone())) - .app_data(web::Data::new(clickhouse.clone())) - .app_data(web::Data::new(reader.clone())) - .app_data(active_sockets.clone()) - .configure(routes::v2::config) - .configure(routes::v3::config) - .configure(routes::root_config) - .default_service(web::get().wrap(default_cors()).to(routes::not_found)) + .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())) }) .bind(dotenvy::var("BIND_ADDR").unwrap())? .run() diff --git a/src/models/pack.rs b/src/models/pack.rs index c6feac60..fb756dfd 100644 --- a/src/models/pack.rs +++ b/src/models/pack.rs @@ -1,5 +1,4 @@ -use crate::models::projects::SideType; -use crate::parse_strings_from_var; +use crate::{models::projects::SideType, util::env::parse_strings_from_var}; use serde::{Deserialize, Serialize}; use validator::Validate; diff --git a/src/models/users.rs b/src/models/users.rs index 7b1a2a98..4b2a0e90 100644 --- a/src/models/users.rs +++ b/src/models/users.rs @@ -4,7 +4,7 @@ use chrono::{DateTime, Utc}; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] #[serde(from = "Base62Id")] #[serde(into = "Base62Id")] pub struct UserId(pub u64); @@ -35,7 +35,7 @@ impl Default for Badges { } } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct User { pub id: UserId, pub username: String, @@ -57,7 +57,7 @@ pub struct User { pub github_id: Option, } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct UserPayoutData { pub balance: Decimal, pub payout_wallet: Option, @@ -156,7 +156,7 @@ impl From for User { } } -#[derive(Serialize, Deserialize, PartialEq, Eq, Clone)] +#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug)] #[serde(rename_all = "lowercase")] pub enum Role { Developer, diff --git a/src/queue/payouts.rs b/src/queue/payouts.rs index 57df7054..d77d7fa4 100644 --- a/src/queue/payouts.rs +++ b/src/queue/payouts.rs @@ -1,4 +1,4 @@ -use crate::models::projects::MonetizationStatus; +use crate::{models::projects::MonetizationStatus, database::redis::RedisPool}; use crate::routes::ApiError; use crate::util::env::parse_var; use base64::Engine; @@ -203,7 +203,7 @@ impl PayoutsQueue { pub async fn process_payout( pool: &PgPool, - redis: &deadpool_redis::Pool, + redis: &RedisPool, client: &clickhouse::Client, ) -> Result<(), ApiError> { let start: DateTime = DateTime::from_utc( diff --git a/src/queue/session.rs b/src/queue/session.rs index eb76ec39..c5728cb9 100644 --- a/src/queue/session.rs +++ b/src/queue/session.rs @@ -2,6 +2,7 @@ use crate::auth::session::SessionMetadata; use crate::database::models::pat_item::PersonalAccessToken; use crate::database::models::session_item::Session; use crate::database::models::{DatabaseError, PatId, SessionId, UserId}; +use crate::database::redis::RedisPool; use chrono::Utc; use sqlx::PgPool; use std::collections::{HashMap, HashSet}; @@ -45,7 +46,7 @@ impl AuthQueue { pub async fn index( &self, pool: &PgPool, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), DatabaseError> { let session_queue = self.take_sessions().await; let pat_queue = self.take_pats().await; diff --git a/src/ratelimit/memory.rs b/src/ratelimit/memory.rs index 60c4abf0..2e786835 100644 --- a/src/ratelimit/memory.rs +++ b/src/ratelimit/memory.rs @@ -20,7 +20,7 @@ impl MemoryStore { /// /// # Example /// ```rust - /// use actix_ratelimit::MemoryStore; + /// use labrinth::ratelimit::memory::MemoryStore; /// /// let store = MemoryStore::new(); /// ``` diff --git a/src/routes/analytics.rs b/src/routes/analytics.rs index 04203291..1272154e 100644 --- a/src/routes/analytics.rs +++ b/src/routes/analytics.rs @@ -5,12 +5,13 @@ use crate::queue::maxmind::MaxMindIndexer; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::util::env::parse_strings_from_var; -use crate::AnalyticsQueue; +use crate::queue::analytics::AnalyticsQueue; use actix_web::{post, web}; use actix_web::{HttpRequest, HttpResponse}; use chrono::Utc; use serde::Deserialize; use sqlx::PgPool; +use crate::database::redis::RedisPool; use std::collections::HashMap; use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr}; use std::sync::Arc; @@ -63,7 +64,7 @@ pub async fn page_view_ingest( session_queue: web::Data, url_input: web::Json, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None) .await @@ -169,7 +170,7 @@ pub async fn playtime_ingest( session_queue: web::Data, playtime_input: web::Json>, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let (_, user) = get_user_from_headers( &req, diff --git a/src/routes/maven.rs b/src/routes/maven.rs index f8d0927e..f0cae083 100644 --- a/src/routes/maven.rs +++ b/src/routes/maven.rs @@ -5,6 +5,7 @@ use crate::models::pats::Scopes; use crate::models::projects::{ProjectId, VersionId}; use crate::queue::session::AuthQueue; use crate::routes::ApiError; +use crate::database::redis::RedisPool; use crate::{ auth::{get_user_from_headers, is_authorized, is_authorized_version}, database, @@ -71,7 +72,7 @@ pub async fn maven_metadata( req: HttpRequest, params: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let project_id = params.into_inner().0; @@ -156,7 +157,7 @@ async fn find_version( project: &QueryProject, vcoords: &String, pool: &PgPool, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, ApiError> { let id_option = crate::models::ids::base62_impl::parse_base62(vcoords) .ok() @@ -245,7 +246,7 @@ pub async fn version_file( req: HttpRequest, params: web::Path<(String, String, String)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let (project_id, vnum, file) = params.into_inner(); @@ -306,7 +307,7 @@ pub async fn version_file_sha1( req: HttpRequest, params: web::Path<(String, String, String)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let (project_id, vnum, file) = params.into_inner(); @@ -348,7 +349,7 @@ pub async fn version_file_sha512( req: HttpRequest, params: web::Path<(String, String, String)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let (project_id, vnum, file) = params.into_inner(); diff --git a/src/routes/updates.rs b/src/routes/updates.rs index 13c8e397..2960a13f 100644 --- a/src/routes/updates.rs +++ b/src/routes/updates.rs @@ -6,6 +6,7 @@ use sqlx::PgPool; use crate::auth::{filter_authorized_versions, get_user_from_headers, is_authorized}; use crate::database; +use crate::database::redis::RedisPool; use crate::models::pats::Scopes; use crate::models::projects::VersionType; use crate::queue::session::AuthQueue; @@ -21,7 +22,7 @@ pub async fn forge_updates( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { const ERROR: &str = "The specified project does not exist!"; diff --git a/src/routes/v2/admin.rs b/src/routes/v2/admin.rs index 4b0f193f..b388cc77 100644 --- a/src/routes/v2/admin.rs +++ b/src/routes/v2/admin.rs @@ -6,8 +6,9 @@ use crate::queue::analytics::AnalyticsQueue; use crate::queue::maxmind::MaxMindIndexer; use crate::queue::session::AuthQueue; use crate::routes::ApiError; +use crate::database::redis::RedisPool; use crate::util::guards::admin_key_guard; -use crate::DownloadQueue; +use crate::queue::download::DownloadQueue; use actix_web::{patch, web, HttpRequest, HttpResponse}; use chrono::Utc; use serde::Deserialize; @@ -37,7 +38,7 @@ pub struct DownloadBody { pub async fn count_download( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, maxmind: web::Data>, analytics_queue: web::Data>, session_queue: web::Data, diff --git a/src/routes/v2/analytics_get.rs b/src/routes/v2/analytics_get.rs index 833aa584..5365e4bd 100644 --- a/src/routes/v2/analytics_get.rs +++ b/src/routes/v2/analytics_get.rs @@ -3,7 +3,7 @@ use chrono::{Duration, NaiveDate, Utc}; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use std::collections::HashMap; - +use crate::database::redis::RedisPool; use crate::{ auth::{filter_authorized_projects, filter_authorized_versions, get_user_from_headers}, database::models::{project_item, user_item, version_item}, @@ -71,7 +71,7 @@ pub async fn playtimes_get( data: web::Json, session_queue: web::Data, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user_option = get_user_from_headers( &req, @@ -146,7 +146,7 @@ pub async fn views_get( data: web::Json, session_queue: web::Data, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user_option = get_user_from_headers( &req, @@ -221,7 +221,7 @@ pub async fn downloads_get( data: web::Json, session_queue: web::Data, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user_option = get_user_from_headers( &req, @@ -299,7 +299,7 @@ pub async fn countries_downloads_get( data: web::Json, session_queue: web::Data, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user_option = get_user_from_headers( &req, @@ -375,7 +375,7 @@ pub async fn countries_views_get( data: web::Json, session_queue: web::Data, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user_option = get_user_from_headers( &req, @@ -437,7 +437,7 @@ async fn filter_allowed_ids( version_ids: Option>, user_option: Option, pool: &web::Data, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(Option>, Option>), ApiError> { if project_ids.is_some() && version_ids.is_some() { return Err(ApiError::InvalidInput( diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs index b4920588..d957fab8 100644 --- a/src/routes/v2/collections.rs +++ b/src/routes/v2/collections.rs @@ -8,6 +8,7 @@ use crate::models::ids::base62_impl::parse_base62; use crate::models::ids::{CollectionId, ProjectId}; use crate::models::pats::Scopes; use crate::queue::session::AuthQueue; +use crate::database::redis::RedisPool; use crate::routes::ApiError; use crate::util::routes::read_from_payload; use crate::util::validate::validation_errors_to_string; @@ -56,7 +57,7 @@ pub async fn collection_create( req: HttpRequest, collection_create_data: web::Json, client: Data, - redis: Data, + redis: Data, session_queue: Data, ) -> Result { let collection_create_data = collection_create_data.into_inner(); @@ -130,7 +131,7 @@ pub async fn collections_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let ids = serde_json::from_str::>(&ids.ids)?; @@ -162,7 +163,7 @@ pub async fn collection_get( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let string = info.into_inner().0; @@ -208,7 +209,7 @@ pub async fn collection_edit( info: web::Path<(String,)>, pool: web::Data, new_collection: web::Json, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user_option = get_user_from_headers( @@ -348,7 +349,7 @@ pub async fn collection_icon_edit( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, mut payload: web::Payload, session_queue: web::Data, @@ -434,7 +435,7 @@ pub async fn delete_collection_icon( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, session_queue: web::Data, ) -> Result { @@ -493,7 +494,7 @@ pub async fn collection_delete( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user_option = get_user_from_headers( diff --git a/src/routes/v2/images.rs b/src/routes/v2/images.rs index a945d1e7..abd57a31 100644 --- a/src/routes/v2/images.rs +++ b/src/routes/v2/images.rs @@ -12,6 +12,7 @@ use crate::routes::v2::threads::is_authorized_thread; use crate::routes::ApiError; use crate::util::routes::read_from_payload; use actix_web::{post, web, HttpRequest, HttpResponse}; +use crate::database::redis::RedisPool; use serde::{Deserialize, Serialize}; use sqlx::PgPool; @@ -41,7 +42,7 @@ pub async fn images_add( file_host: web::Data>, mut payload: web::Payload, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { if let Some(content_type) = crate::util::ext::get_image_content_type(&data.ext) { diff --git a/src/routes/v2/moderation.rs b/src/routes/v2/moderation.rs index e1d6e995..f1d56dd1 100644 --- a/src/routes/v2/moderation.rs +++ b/src/routes/v2/moderation.rs @@ -1,6 +1,7 @@ use super::ApiError; use crate::auth::check_is_moderator_from_headers; use crate::database; +use crate::database::redis::RedisPool; use crate::models::projects::ProjectStatus; use crate::queue::session::AuthQueue; use actix_web::{get, web, HttpRequest, HttpResponse}; @@ -25,7 +26,7 @@ fn default_count() -> i16 { pub async fn get_projects( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, count: web::Query, session_queue: web::Data, ) -> Result { diff --git a/src/routes/v2/notifications.rs b/src/routes/v2/notifications.rs index b0a0940d..80b432fd 100644 --- a/src/routes/v2/notifications.rs +++ b/src/routes/v2/notifications.rs @@ -6,6 +6,7 @@ use crate::models::pats::Scopes; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse}; +use crate::database::redis::RedisPool; use serde::{Deserialize, Serialize}; use sqlx::PgPool; @@ -32,7 +33,7 @@ pub async fn notifications_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -72,7 +73,7 @@ pub async fn notification_get( req: HttpRequest, info: web::Path<(NotificationId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -106,7 +107,7 @@ pub async fn notification_read( req: HttpRequest, info: web::Path<(NotificationId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -149,7 +150,7 @@ pub async fn notification_delete( req: HttpRequest, info: web::Path<(NotificationId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -192,7 +193,7 @@ pub async fn notifications_read( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -237,7 +238,7 @@ pub async fn notifications_delete( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( diff --git a/src/routes/v2/project_creation.rs b/src/routes/v2/project_creation.rs index 906f210a..a222040c 100644 --- a/src/routes/v2/project_creation.rs +++ b/src/routes/v2/project_creation.rs @@ -28,6 +28,7 @@ use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; use sqlx::postgres::PgPool; use std::sync::Arc; +use crate::database::redis::RedisPool; use thiserror::Error; use validator::Validate; @@ -279,7 +280,7 @@ pub async fn project_create( req: HttpRequest, mut payload: Multipart, client: Data, - redis: Data, + redis: Data, file_host: Data>, session_queue: Data, ) -> Result { @@ -350,7 +351,7 @@ async fn project_create_inner( file_host: &dyn FileHost, uploaded_files: &mut Vec, pool: &PgPool, - redis: &deadpool_redis::Pool, + redis: &RedisPool, session_queue: &AuthQueue, ) -> Result { // The base URL for files uploaded to backblaze @@ -401,7 +402,6 @@ async fn project_create_inner( "`data` field must come before file fields", ))); } - let mut data = Vec::new(); while let Some(chunk) = field.next().await { data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?); diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 73f166b8..ace4f9e1 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -6,6 +6,7 @@ use crate::database::models::thread_item::ThreadMessageBuilder; use crate::file_hosting::FileHost; use crate::models; use crate::models::ids::base62_impl::parse_base62; +use crate::database::redis::RedisPool; use crate::models::images::ImageContext; use crate::models::notifications::NotificationBody; use crate::models::pats::Scopes; @@ -79,7 +80,7 @@ pub struct RandomProjects { pub async fn random_projects_get( web::Query(count): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { count .validate() @@ -119,7 +120,7 @@ pub async fn projects_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let ids = serde_json::from_str::>(&ids.ids)?; @@ -146,13 +147,14 @@ pub async fn project_get( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let string = info.into_inner().0; + println!("Getting project {}", &string); let project_data = database::models::Project::get(&string, &**pool, &redis).await?; - + println!("Got project data {:?}", &project_data); let user_option = get_user_from_headers( &req, &**pool, @@ -163,8 +165,10 @@ pub async fn project_get( .await .map(|x| x.1) .ok(); + println!("Got user option {:?}", &user_option); if let Some(data) = project_data { + println!("Got project data {:?}", &data); if is_authorized(&data.inner, &user_option, &pool).await? { return Ok(HttpResponse::Ok().json(Project::from(data))); } @@ -177,7 +181,7 @@ pub async fn project_get( pub async fn project_get_check( info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let slug = info.into_inner().0; @@ -203,7 +207,7 @@ pub async fn dependency_list( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let string = info.into_inner().0; @@ -381,7 +385,7 @@ pub async fn project_edit( pool: web::Data, config: web::Data, new_project: web::Json, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -1216,7 +1220,7 @@ pub async fn projects_edit( web::Query(ids): web::Query, pool: web::Data, bulk_edit_project: web::Json, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -1555,7 +1559,7 @@ pub async fn project_schedule( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, scheduling_data: web::Json, ) -> Result { @@ -1653,7 +1657,7 @@ pub async fn project_icon_edit( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, mut payload: web::Payload, session_queue: web::Data, @@ -1758,7 +1762,7 @@ pub async fn delete_project_icon( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, session_queue: web::Data, ) -> Result { @@ -1851,7 +1855,7 @@ pub async fn add_gallery_item( web::Query(item): web::Query, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, mut payload: web::Payload, session_queue: web::Data, @@ -2003,7 +2007,7 @@ pub async fn edit_gallery_item( web::Query(item): web::Query, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -2156,7 +2160,7 @@ pub async fn delete_gallery_item( web::Query(item): web::Query, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, session_queue: web::Data, ) -> Result { @@ -2251,7 +2255,7 @@ pub async fn project_delete( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, config: web::Data, session_queue: web::Data, ) -> Result { @@ -2333,7 +2337,7 @@ pub async fn project_follow( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -2412,7 +2416,7 @@ pub async fn project_unfollow( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( diff --git a/src/routes/v2/reports.rs b/src/routes/v2/reports.rs index 90960e30..fa9673e0 100644 --- a/src/routes/v2/reports.rs +++ b/src/routes/v2/reports.rs @@ -10,6 +10,7 @@ use crate::models::reports::{ItemType, Report}; use crate::models::threads::{MessageBody, ThreadType}; use crate::queue::session::AuthQueue; use crate::routes::ApiError; +use crate::database::redis::RedisPool; use crate::util::img; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use chrono::Utc; @@ -44,7 +45,7 @@ pub async fn report_create( req: HttpRequest, pool: web::Data, mut body: web::Payload, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let mut transaction = pool.begin().await?; @@ -235,7 +236,7 @@ fn default_all() -> bool { pub async fn reports( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, count: web::Query, session_queue: web::Data, ) -> Result { @@ -310,7 +311,7 @@ pub async fn reports_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let report_ids: Vec = @@ -345,7 +346,7 @@ pub async fn reports_get( pub async fn report_get( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, info: web::Path<(crate::models::reports::ReportId,)>, session_queue: web::Data, ) -> Result { @@ -385,7 +386,7 @@ pub struct EditReport { pub async fn report_edit( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, info: web::Path<(crate::models::reports::ReportId,)>, session_queue: web::Data, edit_report: web::Json, @@ -492,7 +493,7 @@ pub async fn report_delete( req: HttpRequest, pool: web::Data, info: web::Path<(crate::models::reports::ReportId,)>, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { check_is_moderator_from_headers(&req, &**pool, &redis, &session_queue).await?; diff --git a/src/routes/v2/tags.rs b/src/routes/v2/tags.rs index 9307ae3e..1e1e1265 100644 --- a/src/routes/v2/tags.rs +++ b/src/routes/v2/tags.rs @@ -3,6 +3,7 @@ use crate::database::models; use crate::database::models::categories::{DonationPlatform, ProjectType, ReportType, SideType}; use actix_web::{get, web, HttpResponse}; use chrono::{DateTime, Utc}; +use crate::database::redis::RedisPool; use models::categories::{Category, GameVersion, Loader}; use sqlx::PgPool; @@ -32,7 +33,7 @@ pub struct CategoryData { #[get("category")] pub async fn category_list( pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let results = Category::list(&**pool, &redis) .await? @@ -58,7 +59,7 @@ pub struct LoaderData { #[get("loader")] pub async fn loader_list( pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let mut results = Loader::list(&**pool, &redis) .await? @@ -94,7 +95,7 @@ pub struct GameVersionQuery { pub async fn game_version_list( pool: web::Data, query: web::Query, - redis: web::Data, + redis: web::Data, ) -> Result { let results: Vec = if query.type_.is_some() || query.major.is_some() { GameVersion::list_filter(query.type_.as_deref(), query.major, &**pool, &redis).await? @@ -172,7 +173,7 @@ pub struct DonationPlatformQueryData { #[get("donation_platform")] pub async fn donation_platform_list( pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let results: Vec = DonationPlatform::list(&**pool, &redis) .await? @@ -188,7 +189,7 @@ pub async fn donation_platform_list( #[get("report_type")] pub async fn report_type_list( pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let results = ReportType::list(&**pool, &redis).await?; Ok(HttpResponse::Ok().json(results)) @@ -197,7 +198,7 @@ pub async fn report_type_list( #[get("project_type")] pub async fn project_type_list( pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let results = ProjectType::list(&**pool, &redis).await?; Ok(HttpResponse::Ok().json(results)) @@ -206,7 +207,7 @@ pub async fn project_type_list( #[get("side_type")] pub async fn side_type_list( pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let results = SideType::list(&**pool, &redis).await?; Ok(HttpResponse::Ok().json(results)) diff --git a/src/routes/v2/teams.rs b/src/routes/v2/teams.rs index bc1f8cf6..0dd234df 100644 --- a/src/routes/v2/teams.rs +++ b/src/routes/v2/teams.rs @@ -11,6 +11,7 @@ use crate::routes::ApiError; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; +use crate::database::redis::RedisPool; use sqlx::PgPool; pub fn config(cfg: &mut web::ServiceConfig) { @@ -32,7 +33,7 @@ pub async fn team_members_get_project( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let string = info.into_inner().0; @@ -100,7 +101,7 @@ pub async fn team_members_get( req: HttpRequest, info: web::Path<(TeamId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let id = info.into_inner().0; @@ -162,7 +163,7 @@ pub async fn teams_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { use itertools::Itertools; @@ -227,7 +228,7 @@ pub async fn join_team( req: HttpRequest, info: web::Path<(TeamId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let team_id = info.into_inner().0.into(); @@ -304,7 +305,7 @@ pub async fn add_team_member( info: web::Path<(TeamId,)>, pool: web::Data, new_member: web::Json, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let team_id = info.into_inner().0.into(); @@ -427,7 +428,7 @@ pub async fn edit_team_member( info: web::Path<(TeamId, UserId)>, pool: web::Data, edit_member: web::Json, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let ids = info.into_inner(); @@ -526,7 +527,7 @@ pub async fn transfer_ownership( info: web::Path<(TeamId,)>, pool: web::Data, new_owner: web::Json, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let id = info.into_inner().0; @@ -607,7 +608,7 @@ pub async fn remove_team_member( req: HttpRequest, info: web::Path<(TeamId, UserId)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let ids = info.into_inner(); diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index c2e6c096..a1990101 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -9,6 +9,7 @@ use crate::file_hosting::FileHost; use crate::models::ids::ThreadMessageId; use crate::models::images::{Image, ImageContext}; use crate::models::notifications::NotificationBody; +use crate::database::redis::RedisPool; use crate::models::pats::Scopes; use crate::models::projects::ProjectStatus; use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType}; @@ -83,7 +84,7 @@ pub async fn filter_authorized_threads( threads: Vec, user: &User, pool: &web::Data, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, ApiError> { let user_id: database::models::UserId = user.id.into(); @@ -225,7 +226,7 @@ pub async fn thread_get( req: HttpRequest, info: web::Path<(ThreadId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let string = info.into_inner().0.into(); @@ -276,7 +277,7 @@ pub async fn threads_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -313,7 +314,7 @@ pub async fn thread_send_message( info: web::Path<(ThreadId,)>, pool: web::Data, new_message: web::Json, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -508,7 +509,7 @@ pub async fn thread_send_message( pub async fn moderation_inbox( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = check_is_moderator_from_headers(&req, &**pool, &redis, &session_queue).await?; @@ -536,7 +537,7 @@ pub async fn thread_read( req: HttpRequest, info: web::Path<(ThreadId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { check_is_moderator_from_headers(&req, &**pool, &redis, &session_queue).await?; @@ -565,7 +566,7 @@ pub async fn message_delete( req: HttpRequest, info: web::Path<(ThreadMessageId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, file_host: web::Data>, ) -> Result { diff --git a/src/routes/v2/users.rs b/src/routes/v2/users.rs index 6adfe6a8..bee91fb1 100644 --- a/src/routes/v2/users.rs +++ b/src/routes/v2/users.rs @@ -6,6 +6,7 @@ use crate::models::notifications::Notification; use crate::models::pats::Scopes; use crate::models::projects::Project; use crate::models::users::{Badges, RecipientType, RecipientWallet, Role, UserId}; +use crate::database::redis::RedisPool; use crate::queue::payouts::{PayoutAmount, PayoutItem, PayoutsQueue}; use crate::queue::session::AuthQueue; use crate::routes::ApiError; @@ -46,7 +47,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { pub async fn user_auth_get( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let (scopes, mut user) = get_user_from_headers( @@ -88,7 +89,7 @@ pub struct UserIds { pub async fn users_get( web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user_ids = serde_json::from_str::>(&ids.ids)?; @@ -103,7 +104,7 @@ pub async fn users_get( pub async fn user_get( info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, ) -> Result { let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?; @@ -120,7 +121,7 @@ pub async fn projects_list( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -164,7 +165,7 @@ pub async fn collections_list( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -250,7 +251,7 @@ pub async fn user_edit( info: web::Path<(String,)>, new_user: web::Json, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let (scopes, user) = get_user_from_headers( @@ -471,7 +472,7 @@ pub async fn user_icon_edit( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, mut payload: web::Payload, session_queue: web::Data, @@ -560,7 +561,7 @@ pub async fn user_delete( info: web::Path<(String,)>, pool: web::Data, removal_type: web::Query, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -608,7 +609,7 @@ pub async fn user_follows( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -664,7 +665,7 @@ pub async fn user_notifications( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -712,7 +713,7 @@ pub async fn user_payouts( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -797,7 +798,7 @@ pub async fn user_payouts_request( pool: web::Data, data: web::Json, payouts_queue: web::Data>, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let mut payouts_queue = payouts_queue.lock().await; diff --git a/src/routes/v2/version_creation.rs b/src/routes/v2/version_creation.rs index 7a398e2f..dc6af30b 100644 --- a/src/routes/v2/version_creation.rs +++ b/src/routes/v2/version_creation.rs @@ -5,6 +5,7 @@ use crate::database::models::version_item::{ DependencyBuilder, VersionBuilder, VersionFileBuilder, }; use crate::database::models::{self, image_item}; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::images::{Image, ImageContext, ImageId}; use crate::models::notifications::NotificationBody; @@ -89,7 +90,7 @@ pub async fn version_create( req: HttpRequest, mut payload: Multipart, client: Data, - redis: Data, + redis: Data, file_host: Data>, session_queue: Data, ) -> Result { @@ -129,7 +130,7 @@ async fn version_create_inner( req: HttpRequest, payload: &mut Multipart, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, file_host: &dyn FileHost, uploaded_files: &mut Vec, pool: &PgPool, @@ -490,7 +491,7 @@ pub async fn upload_file_to_version( url_data: web::Path<(VersionId,)>, mut payload: Multipart, client: Data, - redis: Data, + redis: Data, file_host: Data>, session_queue: web::Data, ) -> Result { @@ -534,7 +535,7 @@ async fn upload_file_to_version_inner( payload: &mut Multipart, client: Data, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: Data, + redis: Data, file_host: &dyn FileHost, uploaded_files: &mut Vec, version_id: models::VersionId, diff --git a/src/routes/v2/version_file.rs b/src/routes/v2/version_file.rs index 569f8935..63a3575b 100644 --- a/src/routes/v2/version_file.rs +++ b/src/routes/v2/version_file.rs @@ -6,6 +6,7 @@ use crate::auth::{ use crate::models::ids::VersionId; use crate::models::pats::Scopes; use crate::models::projects::VersionType; +use crate::database::redis::RedisPool; use crate::models::teams::Permissions; use crate::queue::session::AuthQueue; use crate::{database, models}; @@ -49,7 +50,7 @@ pub async fn get_version_from_hash( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, hash_query: web::Query, session_queue: web::Data, ) -> Result { @@ -102,7 +103,7 @@ pub async fn download_version( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, hash_query: web::Query, session_queue: web::Data, ) -> Result { @@ -152,7 +153,7 @@ pub async fn delete_file( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, hash_query: web::Query, session_queue: web::Data, ) -> Result { @@ -255,7 +256,7 @@ pub async fn get_update_from_hash( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, hash_query: web::Query, update_data: web::Json, session_queue: web::Data, @@ -333,7 +334,7 @@ pub struct FileHashes { pub async fn get_versions_from_hashes( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, file_data: web::Json, session_queue: web::Data, ) -> Result { @@ -381,7 +382,7 @@ pub async fn get_versions_from_hashes( pub async fn get_projects_from_hashes( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, file_data: web::Json, session_queue: web::Data, ) -> Result { @@ -439,7 +440,7 @@ pub struct ManyUpdateData { pub async fn update_files( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, update_data: web::Json, session_queue: web::Data, ) -> Result { @@ -539,7 +540,7 @@ pub struct ManyFileUpdateData { pub async fn update_individual_files( req: HttpRequest, pool: web::Data, - redis: web::Data, + redis: web::Data, update_data: web::Json, session_queue: web::Data, ) -> Result { diff --git a/src/routes/v2/versions.rs b/src/routes/v2/versions.rs index ee949a6c..ada51517 100644 --- a/src/routes/v2/versions.rs +++ b/src/routes/v2/versions.rs @@ -3,6 +3,7 @@ use crate::auth::{ filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version, }; use crate::database; +use crate::database::redis::RedisPool; use crate::database::models::image_item; use crate::models; use crate::models::ids::base62_impl::parse_base62; @@ -49,7 +50,7 @@ pub async fn version_list( info: web::Path<(String,)>, web::Query(filters): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let string = info.into_inner().0; @@ -170,7 +171,7 @@ pub async fn version_project_get( req: HttpRequest, info: web::Path<(String, String)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let id = info.into_inner(); @@ -221,7 +222,7 @@ pub async fn versions_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let version_ids = serde_json::from_str::>(&ids.ids)? @@ -251,7 +252,7 @@ pub async fn version_get( req: HttpRequest, info: web::Path<(models::ids::VersionId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let id = info.into_inner().0; @@ -318,7 +319,7 @@ pub async fn version_edit( req: HttpRequest, info: web::Path<(models::ids::VersionId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, new_version: web::Json, session_queue: web::Data, ) -> Result { @@ -717,7 +718,7 @@ pub async fn version_schedule( req: HttpRequest, info: web::Path<(models::ids::VersionId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, scheduling_data: web::Json, session_queue: web::Data, ) -> Result { @@ -792,7 +793,7 @@ pub async fn version_delete( req: HttpRequest, info: web::Path<(models::ids::VersionId,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( diff --git a/src/util/img.rs b/src/util/img.rs index 99574e22..b8db684c 100644 --- a/src/util/img.rs +++ b/src/util/img.rs @@ -1,7 +1,7 @@ use color_thief::ColorFormat; use image::imageops::FilterType; use image::{EncodableLayout, ImageError}; - +use crate::database::redis::RedisPool; use crate::database; use crate::database::models::image_item; use crate::models::images::ImageContext; @@ -26,7 +26,7 @@ pub async fn delete_unused_images( context: ImageContext, reference_strings: Vec<&str>, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), ApiError> { let uploaded_images = database::models::Image::get_many_contexted(context, transaction).await?; diff --git a/src/util/webhook.rs b/src/util/webhook.rs index 040b2eb0..84e46242 100644 --- a/src/util/webhook.rs +++ b/src/util/webhook.rs @@ -3,6 +3,7 @@ use crate::models::projects::ProjectId; use crate::routes::ApiError; use chrono::{DateTime, Utc}; use serde::Serialize; +use crate::database::redis::RedisPool; use sqlx::PgPool; use std::usize; @@ -72,7 +73,7 @@ const PLUGIN_LOADERS: &[&str] = &[ pub async fn send_discord_webhook( project_id: ProjectId, pool: &PgPool, - redis: &deadpool_redis::Pool, + redis: &RedisPool, webhook_url: String, message: Option, ) -> Result<(), ApiError> { diff --git a/tests/common/actix.rs b/tests/common/actix.rs new file mode 100644 index 00000000..d9c9930e --- /dev/null +++ b/tests/common/actix.rs @@ -0,0 +1,51 @@ +use bytes::{BytesMut, Bytes}; + +pub struct MultipartSegment { + pub name : String, + pub filename : Option, + pub content_type : Option, + pub data : MultipartSegmentData +} + +pub enum MultipartSegmentData { + Text(String), + Binary(Vec) +} + +pub fn generate_multipart(data: Vec) -> (String, Bytes) { + let mut boundary = String::from("----WebKitFormBoundary"); + boundary.push_str(&rand::random::().to_string()); + boundary.push_str(&rand::random::().to_string()); + boundary.push_str(&rand::random::().to_string()); + + let mut payload = BytesMut::new(); + + for segment in data { + payload.extend_from_slice(format!( + "--{boundary}\r\nContent-Disposition: form-data; name=\"{name}\"", + boundary = boundary, + name = segment.name + ).as_bytes()); + + if let Some(filename) = &segment.filename { + payload.extend_from_slice(format!("; filename=\"{filename}\"", filename = filename).as_bytes()); + } + if let Some(content_type) = &segment.content_type { + payload.extend_from_slice(format!("\r\nContent-Type: {content_type}", content_type = content_type).as_bytes()); + } + payload.extend_from_slice(b"\r\n\r\n"); + + match &segment.data { + MultipartSegmentData::Text(text) => { + payload.extend_from_slice(text.as_bytes()); + }, + MultipartSegmentData::Binary(binary) => { + payload.extend_from_slice(binary); + } + } + payload.extend_from_slice(b"\r\n"); + } + payload.extend_from_slice(format!("--{boundary}--\r\n", boundary = boundary).as_bytes()); + + (boundary, Bytes::from(payload)) +} diff --git a/tests/common/database.rs b/tests/common/database.rs new file mode 100644 index 00000000..76eccb5b --- /dev/null +++ b/tests/common/database.rs @@ -0,0 +1,126 @@ + +use labrinth::database::{redis::RedisPool, models::project_item::PROJECTS_NAMESPACE}; +use sqlx::{PgPool, postgres::PgPoolOptions, Executor}; +use url::Url; +use std::time::Duration; + +pub struct TemporaryDatabase { + pub pool: PgPool, + pub redis_pool : RedisPool, + pub database_name: String, +} + +impl TemporaryDatabase { + + pub async fn create() -> Self { + let temp_database_name = generate_random_database_name(); + println!("Creating temporary database: {}", &temp_database_name); + + let database_url = dotenvy::var("DATABASE_URL").expect("No database URL"); + let mut url = Url::parse(&database_url).expect("Invalid database URL"); + let pool = PgPool::connect(&database_url).await + .expect("Connection to database failed"); + + // Create the temporary database + let create_db_query = format!("CREATE DATABASE {}", &temp_database_name); + + sqlx::query(&create_db_query) + .execute(&pool) + .await + .expect("Database creation failed"); + + pool.close().await; + + + // Modify the URL to switch to the temporary database + url.set_path(&format!("/{}", &temp_database_name)); + let temp_db_url = url.to_string(); + + let pool = PgPoolOptions::new() + .min_connections(0) + .max_connections(4) + .max_lifetime(Some(Duration::from_secs(60 * 60))).connect(&temp_db_url).await + .expect("Connection to temporary database failed"); + + // Performs migrations + let migrations = sqlx::migrate!("./migrations"); + migrations.run(&pool).await.expect("Migrations failed"); + + // Gets new Redis pool + let redis_pool = RedisPool::new(Some(temp_database_name.clone())); + + Self { + pool, + database_name: temp_database_name, + redis_pool + } + } + + pub async fn create_with_dummy() -> Self { + let db = Self::create().await; + db.add_dummy_data().await; + db + } + + pub async fn cleanup(mut self) { + let database_url = dotenvy::var("DATABASE_URL").expect("No database URL"); + self.pool.close().await; + + self.pool = PgPool::connect(&database_url) + .await + .expect("Connection to main database failed"); + + // Forcibly terminate all existing connections to this version of the temporary database + // We are done and deleting it, so we don't need them anymore + let terminate_query = format!( + "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE datname = '{}' AND pid <> pg_backend_pid()", + &self.database_name + ); + sqlx::query(&terminate_query).execute(&self.pool).await.unwrap(); + + // Execute the deletion query asynchronously + let drop_db_query = format!("DROP DATABASE IF EXISTS {}", &self.database_name); + sqlx::query(&drop_db_query) + .execute(&self.pool) + .await + .expect("Database deletion failed"); + } + + /* + Adds the following dummy data to the database: + - 5 users (admin, mod, user, friend, enemy) + - Admin and mod have special powers, the others do not + - User is our mock user. Friend and enemy can be used to simulate a collaborator to user to be given permnissions on a project, + whereas enemy might be banned or otherwise not given permission. (These are arbitrary and dependent on the test) + - PATs for each of the five users, with full privileges (for testing purposes). + - 'mrp_patadmin' for admin, etc + - 1 game version (1.20.1) + - 1 dummy project called 'testslug' with the following properties: + - 1 team (12345) + - 1 team member (test_user) + - 1 mod (testslug) + - 1 category (test_category) + - 1 thread (100) + + This is a test function, so it panics on error. + */ + pub async fn add_dummy_data(&self) { + let pool = &self.pool.clone(); + pool.execute(include_str!("../files/dummy_data.sql")).await.unwrap(); + } +} + + +fn generate_random_database_name() -> String { + // Generate a random database name here + // You can use your logic to create a unique name + // For example, you can use a random string as you did before + // or append a timestamp, etc. + + // We will use a random string starting with "labrinth_tests_db_" + // and append a 6-digit number to it. + let mut database_name = String::from("labrinth_tests_db_"); + database_name.push_str(&rand::random::().to_string()[..6]); + database_name +} + diff --git a/tests/common/mod.rs b/tests/common/mod.rs new file mode 100644 index 00000000..4b1e24f1 --- /dev/null +++ b/tests/common/mod.rs @@ -0,0 +1,120 @@ + +use std::sync::Arc; +use labrinth::{LabrinthConfig, + util::env::{parse_strings_from_var, parse_var}, + file_hosting, queue}; +use labrinth::clickhouse; + +use self::database::TemporaryDatabase; + +pub mod actix; +pub mod database; + +pub async fn setup(db : &TemporaryDatabase) -> LabrinthConfig { + println!("Setting up labrinth config"); + + dotenvy::dotenv().ok(); + + if check_test_vars() { + println!("Some environment variables are missing!"); + } + + let pool = db.pool.clone(); + let redis_pool = db.redis_pool.clone(); + let file_host: Arc = Arc::new(file_hosting::MockHost::new()); + let mut clickhouse = clickhouse::init_client().await.unwrap(); + + let maxmind_reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap()); + + labrinth::app_setup( + pool.clone(), + redis_pool.clone(), + &mut clickhouse, + file_host.clone(), + maxmind_reader.clone(), + ) +} + +// This is so that env vars not used immediately don't panic at runtime +fn check_test_vars() -> bool { + let mut failed = false; + + fn check_var(var: &'static str) -> bool { + let check = parse_var::(var).is_none(); + if check { + println!( + "Variable `{}` missing in dotenv or not of type `{}`", + var, + std::any::type_name::() + ); + } + check + } + + failed |= check_var::("DATABASE_URL"); + failed |= check_var::("MEILISEARCH_ADDR"); + failed |= check_var::("MEILISEARCH_KEY"); + failed |= check_var::("BIND_ADDR"); + failed |= check_var::("SELF_ADDR"); + + failed |= check_var::("MOCK_FILE_PATH"); + + failed |= check_var::("LOCAL_INDEX_INTERVAL"); + failed |= check_var::("VERSION_INDEX_INTERVAL"); + + if parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").is_none() { + println!("Variable `WHITELISTED_MODPACK_DOMAINS` missing in dotenv or not a json array of strings"); + failed |= true; + } + + if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { + println!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings"); + failed |= true; + } + + failed |= check_var::("PAYPAL_API_URL"); + failed |= check_var::("PAYPAL_CLIENT_ID"); + failed |= check_var::("PAYPAL_CLIENT_SECRET"); + + failed |= check_var::("GITHUB_CLIENT_ID"); + failed |= check_var::("GITHUB_CLIENT_SECRET"); + failed |= check_var::("GITLAB_CLIENT_ID"); + failed |= check_var::("GITLAB_CLIENT_SECRET"); + failed |= check_var::("DISCORD_CLIENT_ID"); + failed |= check_var::("DISCORD_CLIENT_SECRET"); + failed |= check_var::("MICROSOFT_CLIENT_ID"); + failed |= check_var::("MICROSOFT_CLIENT_SECRET"); + failed |= check_var::("GOOGLE_CLIENT_ID"); + failed |= check_var::("GOOGLE_CLIENT_SECRET"); + failed |= check_var::("STEAM_API_KEY"); + + failed |= check_var::("TURNSTILE_SECRET"); + + failed |= check_var::("SMTP_USERNAME"); + failed |= check_var::("SMTP_PASSWORD"); + failed |= check_var::("SMTP_HOST"); + + failed |= check_var::("SITE_VERIFY_EMAIL_PATH"); + failed |= check_var::("SITE_RESET_PASSWORD_PATH"); + + failed |= check_var::("BEEHIIV_PUBLICATION_ID"); + failed |= check_var::("BEEHIIV_API_KEY"); + + if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { + println!( + "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" + ); + failed |= true; + } + + failed |= check_var::("CLICKHOUSE_URL"); + failed |= check_var::("CLICKHOUSE_USER"); + failed |= check_var::("CLICKHOUSE_PASSWORD"); + failed |= check_var::("CLICKHOUSE_DATABASE"); + + failed |= check_var::("MAXMIND_LICENSE_KEY"); + + failed |= check_var::("PAYOUTS_BUDGET"); + + failed +} diff --git a/tests/files/basic-mod.jar b/tests/files/basic-mod.jar new file mode 100644 index 0000000000000000000000000000000000000000..0987832e94683635ec167c6d5e2960b55aa02e4b GIT binary patch literal 678 zcmWIWW@Zs#U|`^2aF}KqzV5!Tz%L;04I={sKafsKOe)Gu*2~RL(aS2%&+84?op;!P z=gwczwr3%o)@&Ke^<+1(#&+?nIupgB6eRFe!gWIZdO62Ses9b6Sj$w}l-I65V)fw} z?;?fCx4s=-wZS57g^)*3somWY(-*yRM$s8Ya&^+~0@8*{R{v~uta|ih!m>1j;vyN< z?)Q~vj_3%4wzX?TFH6Ze^=aqHK3f*!tEPHO6>fj6iWGZW+2e6C^YlKo>3TNq+@IvN zyUx^GZ2ovDOYS0*r)}c*UHP^bwzVB$vAR;$%4U_A^2uRo^CowljH^pT_A@=N*?Ijl z+skj=LZ_<^J$csuTX(m1z}4c6^sC|Q3qGcc3EjxBd;RSBa!30-xvcJYmui^<;K8)r zVr|%D3zN?~fWeap3?@+E`MQQU>U#RQ>H9kRdAhjjMt3PgSwl!@VEB80o`aR_zV;N_Qul{)~>DxWld0hMQm`}#OPPcDA|04LTQ*Y9CiJ&KH z8fW?YbrxGrI4t1tc$sdE%jylWLA%*@S4WFIX1bj7y_%q9JIOg9_q zCR)ypFS){=v*JtyTadf%;=asDYX1Yg8JR@DzQUd)fZk Date: Sun, 24 Sep 2023 12:25:41 -0700 Subject: [PATCH 02/16] some fixes; github action --- .github/workflows/tests.yml | 22 +++- src/database/models/categories.rs | 1 - src/database/redis.rs | 6 + tests/common/database.rs | 2 +- tests/files/basic-mod-different.jar | Bin 0 -> 1318 bytes tests/files/dummy_data.sql | 4 + tests/project.rs | 189 +++++++++++++++++++--------- 7 files changed, 157 insertions(+), 67 deletions(-) create mode 100644 tests/files/basic-mod-different.jar diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3757ded0..311a0bd0 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -21,23 +21,34 @@ jobs: steps: - uses: actions/checkout@v2 + + # Start Docker Compose + - name: Start Docker Compose + run: docker-compose up -d + - uses: actions-rs/toolchain@v1 name: Install toolchain with: profile: minimal toolchain: ${{ matrix.rust }} override: true - - name: Cache build artifacts - id: cache-build + + # Cache dependencies and build artifacts + - name: Cache build artifacts and dependencies uses: actions/cache@v2 with: - path: target/** - key: ${{ runner.os }}-build-cache-${{ matrix.rust }} + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + - uses: actions-rs/cargo@v1 with: command: build env: SQLX_OFFLINE: true + - uses: actions-rs/cargo@v1 with: command: test @@ -50,4 +61,5 @@ jobs: S3_URL: ${{ secrets.S3_URL }} S3_REGION: ${{ secrets.S3_REGION }} S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }} - SQLX_OFFLINE: true \ No newline at end of file + SQLX_OFFLINE: false + DATABASE_URL: postgresql://labrinth:labrinth@localhost/labrinth diff --git a/src/database/models/categories.rs b/src/database/models/categories.rs index 2d08a6c4..bb470046 100644 --- a/src/database/models/categories.rs +++ b/src/database/models/categories.rs @@ -8,7 +8,6 @@ use futures::TryStreamExt; use serde::{Deserialize, Serialize}; const TAGS_NAMESPACE: &str = "tags"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes pub struct ProjectType { pub id: ProjectTypeId, diff --git a/src/database/redis.rs b/src/database/redis.rs index 8910e032..f79bca64 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -42,6 +42,7 @@ impl RedisPool { { let mut redis_connection = self.pool.get().await?; + println!("SET: {}_{}:{}", self.meta_namespace, namespace, id); cmd("SET") .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) .arg(data) @@ -59,6 +60,7 @@ impl RedisPool { { let mut redis_connection = self.pool.get().await?; + println!("GET: {}_{}:{}", self.meta_namespace, namespace, id); let res = cmd("GET") .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) .query_async::<_, Option>(&mut redis_connection) @@ -72,6 +74,10 @@ impl RedisPool { { let mut redis_connection = self.pool.get().await?; + for id in &ids { + println!("MGET: {}_{}:{}", self.meta_namespace, namespace, id); + } + let res = cmd("MGET") .arg( ids diff --git a/tests/common/database.rs b/tests/common/database.rs index 76eccb5b..989563fa 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -1,5 +1,5 @@ -use labrinth::database::{redis::RedisPool, models::project_item::PROJECTS_NAMESPACE}; +use labrinth::database::redis::RedisPool; use sqlx::{PgPool, postgres::PgPoolOptions, Executor}; use url::Url; use std::time::Duration; diff --git a/tests/files/basic-mod-different.jar b/tests/files/basic-mod-different.jar new file mode 100644 index 0000000000000000000000000000000000000000..616131ae8ffbb1c411b35cf36fcfe4c1c913fb2b GIT binary patch literal 1318 zcmWIWW@Zs#-~hs>E6u|hpn#o;fq|bvfgvq1sVFm9FE>9$FRM5|FEoUgf&JWe{!|z) zt>9*0WckX-zyLIafuT2Wciv$Go;#mKo1VFJ8nb0I_Q`G%jqT!D*ew;t-af~N4eBJHr(k%hK|KwKE*}ql>%q2$~XE37`aSaYInc#=8CylLFU^%t$zsS zeG9$iWWwkk_e5Wb??$wo-lC)Hni-Bf@@BvIQ{j_L(E3+P=H2)uaQTY6R&a8cKuXhc zCu#pRyz0NAGGEK~?9%7c`7FVI`>oCSQ$NiVHijPCy~*h z&sl!Hefndjl#6p#t^LHV9PqnZTxiCE-Jd>v4qp6ix3A~MU2FF-26(e`ybt7mEC383 zP>=>-4eSlTkdp!ic6_|AqqBc-guY%pzHq+*41ct6Pfid`PESZm_~7dk_JKdFBcOq4 zf&{a=fHY&FtU&V*X^9S|K1D^x05y-}ObTKS!G}5orhA<`f7M$@l)&y z>*?pF4^Aqpflgfm#2^~Y$%vrXZm~8Dq#XnjfdM6gY>cm?pQoE^aEP9-8`wmvneizg z)3BHr+vk|yL}A=sd~%)T{pVSV-5~*0gP`+}~X5_mqE(Wt=U(`scBvZ}(W|aqY`v zJ{kKu-M;<&i{P_Py-C|8f}W^poaOJ=S!_Asuz<(oWx6#kt2e|3?Pl9u9WC~l>6&)A zf8xzACfZ-G2c$VMm-OE;-E63vXgNE+9g z;LXS+!i>n1$O#CPCt+YqBZ!4BrGRoNMzEtAjvTX~GzbG*8gm&LC`geoPk_wBmQG;y wymd4L<_i*14Z``Lbb}mjpmYNRTN*2njU^!!1$eWvflOfm!a86OOkxJ{0NXjF?*IS* literal 0 HcmV?d00001 diff --git a/tests/files/dummy_data.sql b/tests/files/dummy_data.sql index 87effd90..8fe596dc 100644 --- a/tests/files/dummy_data.sql +++ b/tests/files/dummy_data.sql @@ -8,12 +8,14 @@ ALTER TABLE game_versions_versions DISABLE TRIGGER ALL; ALTER TABLE files DISABLE TRIGGER ALL; ALTER TABLE hashes DISABLE TRIGGER ALL; +-- IDs 1-5, 1-5 INSERT INTO users (id, username, name, email, role) VALUES (1, 'admin', 'Administrator Test', 'admin@modrinth.com', 'admin'); INSERT INTO users (id, username, name, email, role) VALUES (2, 'moderator', 'Moderator Test', 'moderator@modrinth.com', 'mod'); INSERT INTO users (id, username, name, email, role) VALUES (3, 'user', 'User Test', 'user@modrinth.com', 'developer'); INSERT INTO users (id, username, name, email, role) VALUES (4, 'friend', 'Friend Test', 'friend@modrinth.com', 'developer'); INSERT INTO users (id, username, name, email, role) VALUES (5, 'enemy', 'Enemy Test', 'enemy@modrinth.com', 'developer'); +-- IDs: 50-54, o p q r s INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (50, 1, 'admin-pat', 'mrp_patadmin', B'11111111111111111111111111111111111'::BIGINT, '2030-08-18 15:48:58.435729+00'); INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (51, 2, 'moderator-pat', 'mrp_patmoderator', B'11111111111111111111111111111111111'::BIGINT, '2030-08-18 15:48:58.435729+00'); INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (52, 3, 'user-pat', 'mrp_patuser', B'11111111111111111111111111111111111'::BIGINT, '2030-08-18 15:48:58.435729+00'); @@ -30,6 +32,7 @@ INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) V INSERT INTO teams (id) VALUES (100); INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (200, 100, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); +-- ID: 1000, G8 INSERT INTO mods ( id, team_id, title, description, body, published, downloads, @@ -45,6 +48,7 @@ VALUES ( 'testslug', 1, 'monetized' ); +-- ID: 1100, Hk INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, diff --git a/tests/project.rs b/tests/project.rs index 3dc526a5..789f9a7e 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -1,5 +1,6 @@ -use actix_web::{App, test::{self, init_service, TestRequest}, HttpResponse, web, dev::{ServiceResponse, Service}}; +use actix_web::{App, test}; use common::database::TemporaryDatabase; +use labrinth::database::models::project_item::{PROJECTS_NAMESPACE, PROJECTS_SLUGS_NAMESPACE}; use serde_json::json; use crate::common::{setup, actix::generate_multipart}; @@ -9,67 +10,57 @@ mod common; #[actix_rt::test] async fn test_get_project() { - let debug_time_start_0 = std::time::Instant::now(); // Test setup and dummy data let db = TemporaryDatabase::create_with_dummy().await; - let debug_time_start_1 = std::time::Instant::now(); let labrinth_config = setup(&db).await; - let debug_time_start_2 = std::time::Instant::now(); - println!("Setup time: {:?}", debug_time_start_2 - debug_time_start_1); - let app = App::new() .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; - let debug_time_start_3 = std::time::Instant::now(); - println!("Init time: {:?}", debug_time_start_3 - debug_time_start_2); + // Cache should default to unpopulated + assert!(db.redis_pool.get::(PROJECTS_NAMESPACE, 1000).await.unwrap().is_none()); - /////////////////////////////////////////////// - // Perform request on dumy data + // Perform request on dummy data println!("Sending request"); let req = test::TestRequest::get() .uri("/v2/project/G8") .append_header(("Authorization","mrp_patuser")) .to_request(); - - let debug_time_start_3_1 = std::time::Instant::now(); let resp = test::call_service(&test_app, req).await; - let debug_time_start_3_2 = std::time::Instant::now(); - println!("RESPONSE TIME: {:?}", debug_time_start_3_2 - debug_time_start_3_1); - println!("Response: {:?}", resp.response().body()); let status = resp.status(); - assert_eq!(status, 200); let body : serde_json::Value = test::read_body_json(resp).await; + + assert_eq!(status, 200); assert!(body.get("id").is_some()); assert_eq!(body.get("slug").unwrap(), &json!("testslug")); - - let debug_time_start_4 = std::time::Instant::now(); - println!("Request time: {:?}", debug_time_start_4 - debug_time_start_3); - - /////////////////////////////////////////////// - // Perform request on dumy data - println!("///////////////////////////////////////////////////////////////"); - println!("Sending request"); + let versions = body.get("versions").unwrap().as_array().unwrap(); + assert!(versions.len() > 0); + assert_eq!(versions[0], json!("Hk")); + + // Confirm that the request was cached + println!("Confirming cache"); + assert_eq!(db.redis_pool.get::(PROJECTS_SLUGS_NAMESPACE, "testslug").await.unwrap(), Some(1000)); + + let cached_project = db.redis_pool.get::(PROJECTS_NAMESPACE, 1000).await.unwrap().unwrap(); + let cached_project : serde_json::Value = serde_json::from_str(&cached_project).unwrap(); + println!("Cached project: {:?}", cached_project); + println!("Cached project: {:?}", cached_project.to_string()); + println!("{:?}",cached_project.as_object().unwrap()); + assert_eq!(cached_project.get("inner").unwrap().get("slug").unwrap(), &json!("testslug")); + + // Make the request again, this time it should be cached let req = test::TestRequest::get() .uri("/v2/project/G8") .append_header(("Authorization","mrp_patuser")) .to_request(); - - let debug_time_start_3_1 = std::time::Instant::now(); let resp = test::call_service(&test_app, req).await; - let debug_time_start_3_2 = std::time::Instant::now(); - println!("RESPONSE TIME: {:?}", debug_time_start_3_2 - debug_time_start_3_1); - println!("Response: {:?}", resp.response().body()); let status = resp.status(); assert_eq!(status, 200); + let body : serde_json::Value = test::read_body_json(resp).await; assert!(body.get("id").is_some()); assert_eq!(body.get("slug").unwrap(), &json!("testslug")); - let debug_time_start_4 = std::time::Instant::now(); - println!("Request time: {:?}", debug_time_start_4 - debug_time_start_3); - - ///////////////////////////////////// // Request should fail on non-existent project println!("Requesting non-existent project"); let req = test::TestRequest::get() @@ -81,31 +72,19 @@ async fn test_get_project() { println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 404); - let debug_time_start_5 = std::time::Instant::now(); - println!("Request time: {:?}", debug_time_start_5 - debug_time_start_4); - - // Similarly, request should fail on non-authorized user + // Similarly, request should fail on non-authorized user, with a 404 (hiding the existence of the project) println!("Requesting project as non-authorized user"); let req = test::TestRequest::get() - .uri("/v2/project/G8") - .append_header(("Authorization","mrp_patenemy")) - .to_request(); + .uri("/v2/project/G8") + .append_header(("Authorization","mrp_patenemy")) + .to_request(); let resp = test::call_service(&test_app, req).await; println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 404); - let debug_time_start_6 = std::time::Instant::now(); - println!("Request time: {:?}", debug_time_start_6 - debug_time_start_5); - // Cleanup test db db.cleanup().await; - - let debug_time_start_7 = std::time::Instant::now(); - println!("Cleanup time: {:?}", debug_time_start_7 - debug_time_start_6); - - println!("Total time: {:?}", debug_time_start_7 - debug_time_start_0); - panic!("Test panic"); } #[actix_rt::test] @@ -118,11 +97,7 @@ async fn test_add_project() { let test_app = test::init_service(app).await; // Generate project data. - let jar_bytes: &[u8] = include_bytes!("../tests/files/basic-mod.jar"); - - // let mut data = HashMap::new(); - - let json_data = json!( + let mut json_data = json!( { "title": "Test_Add_Project project", "slug": "demo", @@ -156,12 +131,10 @@ async fn test_add_project() { name: "basic-mod.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(jar_bytes.to_vec()) + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) } ]); - println!("Sending request"); - let req = test::TestRequest::post() .uri("/v2/project") .append_header(("Authorization","mrp_patuser")) @@ -172,12 +145,108 @@ async fn test_add_project() { let resp = test::call_service(&test_app, req).await; let status = resp.status(); + assert_eq!(status, 200); + + // Get the project we just made + let req = test::TestRequest::get() + .uri("/v2/project/demo") + .append_header(("Authorization","mrp_patuser")) + .to_request(); + + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 200); + + let body : serde_json::Value = test::read_body_json(resp).await; + let versions = body.get("versions").unwrap().as_array().unwrap(); + assert!(versions.len() == 1); + + // Reusing with a different slug and the same file should fail + // Even if that file is named differently + json_data["slug"] = json!("new_demo"); + json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); + println!("JSON data: {:?}", json_data.to_string()); + let (boundary, multipart) = generate_multipart(vec![ + common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + }, + common::actix::MultipartSegment { + name: "basic-mod-different.jar".to_string(), + filename: Some("basic-mod-different.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + } + ]); + let req = test::TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization","mrp_patuser")) + .append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) + .set_payload(multipart) + .to_request(); + + let resp = test::call_service(&test_app, req).await; + println!("Different slug,s same file (with diff name): {:?}", resp.response().body()); println!("Response: {:?}", resp.response().body()); - println!("Response: {:?}", test::read_body(resp).await); + assert_eq!(resp.status(), 400); - assert_eq!(status, 200); + // Reusing with the same slug and a different file should fail + json_data["slug"] = json!("demo"); + json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); + let (boundary, multipart) = generate_multipart(vec![ + common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + }, + common::actix::MultipartSegment { + name: "basic-mod-different.jar".to_string(), + filename: Some("basic-mod-different.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) + } + ]); + let req = test::TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization","mrp_patuser")) + .append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) + .set_payload(multipart) + .to_request(); + + let resp = test::call_service(&test_app, req).await; + println!("Same slug truly different file: {:?}", resp.response().body()); + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 400); + + // Different slug, different file should succeed + json_data["slug"] = json!("new_demo"); + json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); + let (boundary, multipart) = generate_multipart(vec![ + common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + }, + common::actix::MultipartSegment { + name: "basic-mod-different.jar".to_string(), + filename: Some("basic-mod-different.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) + } + ]); + let req = test::TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization","mrp_patuser")) + .append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) + .set_payload(multipart) + .to_request(); + + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 200); // Cleanup test db db.cleanup().await; - } From bab47cee5d33f5810dc48a15d65709d32c4d8f6d Mon Sep 17 00:00:00 2001 From: thesuzerain Date: Sun, 24 Sep 2023 14:23:10 -0700 Subject: [PATCH 03/16] added pr to master --- .github/workflows/tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 311a0bd0..cc4ab75f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -4,6 +4,8 @@ on: push: branches: [master] pull_request: + branches: + - master env: CARGO_TERM_COLOR: always From 0bbed01a5166ccf142b145415bedabd9813c4be3 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Mon, 25 Sep 2023 08:47:54 -0700 Subject: [PATCH 04/16] sqlx database setup --- .github/workflows/tests.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cc4ab75f..389026e8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -51,6 +51,11 @@ jobs: env: SQLX_OFFLINE: true + # sets up database for tests + - uses: actions-rs/cargo@v1 + with: + command: sqlx database setup + - uses: actions-rs/cargo@v1 with: command: test From 100b5659c710912f692fe392e3b07ac593d31c83 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Mon, 25 Sep 2023 09:13:38 -0700 Subject: [PATCH 05/16] switched intial GHA test db --- .github/workflows/tests.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 389026e8..0b5946bd 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -51,7 +51,6 @@ jobs: env: SQLX_OFFLINE: true - # sets up database for tests - uses: actions-rs/cargo@v1 with: command: sqlx database setup @@ -68,5 +67,5 @@ jobs: S3_URL: ${{ secrets.S3_URL }} S3_REGION: ${{ secrets.S3_REGION }} S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }} - SQLX_OFFLINE: false - DATABASE_URL: postgresql://labrinth:labrinth@localhost/labrinth + SQLX_OFFLINE: true + DATABASE_URL: postgresql://labrinth:labrinth@localhost/postgres From 3d336b05ccc10a893ce4c14ec117e1cf54318970 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Mon, 25 Sep 2023 11:40:04 -0700 Subject: [PATCH 06/16] removed sqlx database setup --- .github/workflows/tests.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0b5946bd..93e6a72d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -51,10 +51,6 @@ jobs: env: SQLX_OFFLINE: true - - uses: actions-rs/cargo@v1 - with: - command: sqlx database setup - - uses: actions-rs/cargo@v1 with: command: test From 5a829b198fd06f0721fe2fc354a93a83d6c9fdf3 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Mon, 25 Sep 2023 19:43:05 -0700 Subject: [PATCH 07/16] unfinished patch route --- src/database/models/collection_item.rs | 2 - src/database/models/flow_item.rs | 1 - src/database/models/image_item.rs | 2 - src/database/models/pat_item.rs | 6 +- src/database/models/project_item.rs | 2 - src/database/models/session_item.rs | 2 - src/database/models/team_item.rs | 2 - src/database/models/user_item.rs | 4 - src/database/models/version_item.rs | 2 - src/routes/v2/projects.rs | 2 + tests/common/actix.rs | 17 +- tests/files/dummy_data.sql | 56 +++-- tests/project.rs | 314 ++++++++++++++++++------- 13 files changed, 285 insertions(+), 127 deletions(-) diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index 98ef1574..78390b18 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -4,11 +4,9 @@ use crate::database::models::DatabaseError; use crate::models::collections::CollectionStatus; use crate::database::redis::RedisPool; use chrono::{DateTime, Utc}; -use redis::cmd; use serde::{Deserialize, Serialize}; const COLLECTIONS_NAMESPACE: &str = "collections"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes #[derive(Clone)] pub struct CollectionBuilder { diff --git a/src/database/models/flow_item.rs b/src/database/models/flow_item.rs index 4625f0f1..fe474b55 100644 --- a/src/database/models/flow_item.rs +++ b/src/database/models/flow_item.rs @@ -7,7 +7,6 @@ use rand::distributions::Alphanumeric; use rand::Rng; use rand_chacha::rand_core::SeedableRng; use rand_chacha::ChaCha20Rng; -use redis::cmd; use serde::{Deserialize, Serialize}; const FLOWS_NAMESPACE: &str = "flows"; diff --git a/src/database/models/image_item.rs b/src/database/models/image_item.rs index 17236221..f6e2a50a 100644 --- a/src/database/models/image_item.rs +++ b/src/database/models/image_item.rs @@ -2,11 +2,9 @@ use super::ids::*; use crate::database::redis::RedisPool; use crate::{database::models::DatabaseError, models::images::ImageContext}; use chrono::{DateTime, Utc}; -use redis::cmd; use serde::{Deserialize, Serialize}; const IMAGES_NAMESPACE: &str = "images"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Image { diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index 2d82d582..4c524295 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -4,13 +4,11 @@ use crate::database::redis::RedisPool; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::pats::Scopes; use chrono::{DateTime, Utc}; -use redis::cmd; use serde::{Deserialize, Serialize}; const PATS_NAMESPACE: &str = "pats"; const PATS_TOKENS_NAMESPACE: &str = "pats_tokens"; const PATS_USERS_NAMESPACE: &str = "pats_users"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes #[derive(Deserialize, Serialize, Clone)] pub struct PersonalAccessToken { @@ -164,8 +162,8 @@ impl PersonalAccessToken { .await?; for pat in db_pats { - redis.set(PATS_NAMESPACE, pat.id.0, serde_json::to_string(&pat)?, None); - redis.set(PATS_TOKENS_NAMESPACE, pat.access_token.clone(), pat.id.0, None); + redis.set(PATS_NAMESPACE, pat.id.0, serde_json::to_string(&pat)?, None).await?; + redis.set(PATS_TOKENS_NAMESPACE, pat.access_token.clone(), pat.id.0, None).await?; found_pats.push(pat); } } diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 3c27d0c4..cf7a8c3c 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -5,13 +5,11 @@ use crate::database::redis::RedisPool; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::projects::{MonetizationStatus, ProjectStatus}; use chrono::{DateTime, Utc}; -use redis::cmd; use serde::{Deserialize, Serialize}; pub const PROJECTS_NAMESPACE: &str = "projects"; pub const PROJECTS_SLUGS_NAMESPACE: &str = "projects_slugs"; const PROJECTS_DEPENDENCIES_NAMESPACE: &str = "projects_dependencies"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes #[derive(Clone, Debug, Serialize, Deserialize)] pub struct DonationUrl { diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index 5b9373f2..7b39602e 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -3,13 +3,11 @@ use crate::database::models::DatabaseError; use crate::database::redis::RedisPool; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use chrono::{DateTime, Utc}; -use redis::cmd; use serde::{Deserialize, Serialize}; const SESSIONS_NAMESPACE: &str = "sessions"; const SESSIONS_IDS_NAMESPACE: &str = "sessions_ids"; const SESSIONS_USERS_NAMESPACE: &str = "sessions_users"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes pub struct SessionBuilder { pub session: String, diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index 8d43818d..7a69f760 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -1,12 +1,10 @@ use super::ids::*; use crate::{models::teams::Permissions, database::redis::RedisPool}; use itertools::Itertools; -use redis::cmd; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; const TEAMS_NAMESPACE: &str = "teams"; -const DEFAULT_EXPIRY: i64 = 1800; pub struct TeamBuilder { pub members: Vec, diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 3aa3690d..d807a7ac 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -5,14 +5,12 @@ use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::database::redis::RedisPool; use crate::models::users::{Badges, RecipientType, RecipientWallet}; use chrono::{DateTime, Utc}; -use redis::cmd; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; const USERS_NAMESPACE: &str = "users"; const USER_USERNAMES_NAMESPACE: &str = "users_usernames"; // const USERS_PROJECTS_NAMESPACE: &str = "users_projects"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes #[derive(Deserialize, Serialize, Clone, Debug)] pub struct User { @@ -333,8 +331,6 @@ impl User { redis: &RedisPool, ) -> Result<(), DatabaseError> { - let mut cmd = cmd("DEL"); - for (id, username) in user_ids { redis.delete(USERS_NAMESPACE, id.0).await?; diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index 564f28f2..ab5bac5f 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -3,7 +3,6 @@ use super::DatabaseError; use crate::models::projects::{FileType, VersionStatus}; use chrono::{DateTime, Utc}; use itertools::Itertools; -use redis::cmd; use serde::{Deserialize, Serialize}; use crate::database::redis::RedisPool; use std::cmp::Ordering; @@ -11,7 +10,6 @@ use std::collections::HashMap; const VERSIONS_NAMESPACE: &str = "versions"; const VERSION_FILES_NAMESPACE: &str = "versions_files"; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes #[derive(Clone)] pub struct VersionBuilder { diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index ec82646f..88c45a7e 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -405,6 +405,8 @@ pub async fn project_edit( let string = info.into_inner().0; let result = database::models::Project::get(&string, &**pool, &redis).await?; + println!("user role {:?}", user.name); + println!("user role {}", user.role); if let Some(project_item) = result { let id = project_item.inner.id; diff --git a/tests/common/actix.rs b/tests/common/actix.rs index d9c9930e..df6d222e 100644 --- a/tests/common/actix.rs +++ b/tests/common/actix.rs @@ -1,5 +1,7 @@ +use actix_web::test::TestRequest; use bytes::{BytesMut, Bytes}; +#[derive(Debug, Clone)] pub struct MultipartSegment { pub name : String, pub filename : Option, @@ -7,12 +9,25 @@ pub struct MultipartSegment { pub data : MultipartSegmentData } +#[derive(Debug, Clone)] pub enum MultipartSegmentData { Text(String), Binary(Vec) } -pub fn generate_multipart(data: Vec) -> (String, Bytes) { +pub trait AppendsMultipart { + fn set_multipart(self, data: Vec) -> Self; +} + +impl AppendsMultipart for TestRequest { + fn set_multipart(self, data: Vec) -> Self { + let (boundary, payload) = generate_multipart(data); + self.append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) + .set_payload(payload) + } +} + +fn generate_multipart(data: Vec) -> (String, Bytes) { let mut boundary = String::from("----WebKitFormBoundary"); boundary.push_str(&rand::random::().to_string()); boundary.push_str(&rand::random::().to_string()); diff --git a/tests/files/dummy_data.sql b/tests/files/dummy_data.sql index 8fe596dc..eb57e631 100644 --- a/tests/files/dummy_data.sql +++ b/tests/files/dummy_data.sql @@ -10,7 +10,7 @@ ALTER TABLE hashes DISABLE TRIGGER ALL; -- IDs 1-5, 1-5 INSERT INTO users (id, username, name, email, role) VALUES (1, 'admin', 'Administrator Test', 'admin@modrinth.com', 'admin'); -INSERT INTO users (id, username, name, email, role) VALUES (2, 'moderator', 'Moderator Test', 'moderator@modrinth.com', 'mod'); +INSERT INTO users (id, username, name, email, role) VALUES (2, 'moderator', 'Moderator Test', 'moderator@modrinth.com', 'moderator'); INSERT INTO users (id, username, name, email, role) VALUES (3, 'user', 'User Test', 'user@modrinth.com', 'developer'); INSERT INTO users (id, username, name, email, role) VALUES (4, 'friend', 'Friend Test', 'friend@modrinth.com', 'developer'); INSERT INTO users (id, username, name, email, role) VALUES (5, 'enemy', 'Enemy Test', 'enemy@modrinth.com', 'developer'); @@ -29,36 +29,18 @@ INSERT INTO loaders (id, loader, icon) VALUES (1, 'fabric', 'svgloadercode'); INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,1);--SELECT 1, id FROM project_types WHERE name = 'mod'; INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,2); --SELECT 1, id FROM project_types WHERE name = 'modpack'; +-- Inserts 2 dummy projects for testing, with slight differences +------------------------------------------------------------ INSERT INTO teams (id) VALUES (100); INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (200, 100, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); -- ID: 1000, G8 -INSERT INTO mods ( - id, team_id, title, description, body, - published, downloads, - status, requested_status, - client_side, server_side, license, - slug, project_type, monetization_status -) -VALUES ( - 1000, 100, 'Test Mod', 'Test mod description', 'Test mod body', - timezone('utc', now()), 0, - 'processing', 'approved', - 1, 2, 'MIT', - 'testslug', 1, 'monetized' -); +INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) +VALUES (1000, 100, 'Test Mod', 'Test mod description', 'Test mod body', timezone('utc', now()), 0, 'processing', 'approved', 1, 2, 'MIT', 'testslug', 1, 'monetized'); -- ID: 1100, Hk -INSERT INTO versions ( - id, mod_id, author_id, name, version_number, - changelog, date_published, downloads, - version_type, featured, status -) -VALUES ( - 1100, 1000, 3, 'v1', 'v1.2.1', - 'No changes', timezone('utc', now()), 0, - 'released', true, 'listed' -); +INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) +VALUES (1100, 1000, 3, 'v1', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'listed'); INSERT INTO loaders_versions (loader_id, version_id) VALUES (1, 1100); INSERT INTO game_versions_versions (game_version_id, joining_version_id) VALUES (20000, 1100); @@ -68,4 +50,26 @@ INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) VALUES (800, 1100, 'http://www.url.to/myfile.jar', 'myfile.jar', true, 1, 'jar'); INSERT INTO hashes (file_id, algorithm, hash) VALUES (800, 'sha1', '10101010'); -INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (30, 'project', '1000', null); \ No newline at end of file +INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (30, 'project', 1000, null); + +------------------------------------------------------------ +INSERT INTO teams (id) VALUES (101); +INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (201, 101, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); + +-- ID: 1001, G9 +INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) +VALUES (1001, 101, 'Test Mod 2', 'Test mod description 2', 'Test mod body 2', timezone('utc', now()), 0, 'processing', 'approved', 1, 2, 'MIT', 'testslug2', 1, 'monetized'); + +-- ID: 1100, Hl +INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) +VALUES (1101, 1001, 3, 'v1.0', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'listed'); + +INSERT INTO loaders_versions (loader_id, version_id) VALUES (1, 1101); +INSERT INTO game_versions_versions (game_version_id, joining_version_id) VALUES (20000, 1101); + +-- not real hash or file +INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) +VALUES (801, 1101, 'http://www.url.to/myfile2.jar', 'myfile2.jar', true, 1, 'jar'); +INSERT INTO hashes (file_id, algorithm, hash) VALUES (801, 'sha1', '101010101'); + +INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (31, 'project', 1001, null); \ No newline at end of file diff --git a/tests/project.rs b/tests/project.rs index 789f9a7e..ad074ccc 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -3,7 +3,7 @@ use common::database::TemporaryDatabase; use labrinth::database::models::project_item::{PROJECTS_NAMESPACE, PROJECTS_SLUGS_NAMESPACE}; use serde_json::json; -use crate::common::{setup, actix::generate_multipart}; +use crate::common::{setup, actix::AppendsMultipart}; // importing common module. mod common; @@ -96,7 +96,7 @@ async fn test_add_project() { .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; - // Generate project data. + // Generate test project data. let mut json_data = json!( { "title": "Test_Add_Project project", @@ -120,34 +120,68 @@ async fn test_add_project() { } ); - let (boundary, multipart) = generate_multipart(vec![ - common::actix::MultipartSegment { - name: "data".to_string(), - filename: None, - content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) - }, - common::actix::MultipartSegment { - name: "basic-mod.jar".to_string(), - filename: Some("basic-mod.jar".to_string()), - content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) - } - ]); + // Basic json + let json_segment = common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + }; + + // Basic json, with a different file + json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); + let json_diff_file_segment = common::actix::MultipartSegment { + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + ..json_segment.clone() + }; + + // Basic json, with a different file, and a different slug + json_data["slug"] = json!("new_demo"); + json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); + let json_diff_slug_file_segment = common::actix::MultipartSegment { + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + ..json_segment.clone() + }; + + // Basic file + let file_segment = common::actix::MultipartSegment { + name: "basic-mod.jar".to_string(), + filename: Some("basic-mod.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + }; + // Differently named file, with the same content (for hash testing) + let file_diff_name_segment = common::actix::MultipartSegment { + name: "basic-mod-different.jar".to_string(), + filename: Some("basic-mod-different.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + }; + + // Differently named file, with different content + let file_diff_name_content_segment = common::actix::MultipartSegment { + name: "basic-mod-different.jar".to_string(), + filename: Some("basic-mod-different.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) + }; + + // Add a project- simple, should work. let req = test::TestRequest::post() .uri("/v2/project") .append_header(("Authorization","mrp_patuser")) - .append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) - .set_payload(multipart) + .set_multipart(vec![ + json_segment.clone(), + file_segment.clone() + ]) .to_request(); - let resp = test::call_service(&test_app, req).await; let status = resp.status(); assert_eq!(status, 200); - // Get the project we just made + // Get the project we just made, and confirm that it's correct let req = test::TestRequest::get() .uri("/v2/project/demo") .append_header(("Authorization","mrp_patuser")) @@ -159,94 +193,216 @@ async fn test_add_project() { let body : serde_json::Value = test::read_body_json(resp).await; let versions = body.get("versions").unwrap().as_array().unwrap(); assert!(versions.len() == 1); + let uploaded_version_id = &versions[0]; + + // Checks files to ensure they were uploaded and correctly identify the file + let hash = sha1::Sha1::from(include_bytes!("../tests/files/basic-mod.jar").to_vec()).digest().to_string(); + let req = test::TestRequest::get() + .uri(&format!("/v2/version_file/{hash}?algorithm=sha1")) + .append_header(("Authorization","mrp_patuser")) + .to_request(); + + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 200); + + let body : serde_json::Value = test::read_body_json(resp).await; + let file_version_id = body.get("id").unwrap(); + assert_eq!(&file_version_id, &uploaded_version_id); // Reusing with a different slug and the same file should fail // Even if that file is named differently - json_data["slug"] = json!("new_demo"); - json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); - println!("JSON data: {:?}", json_data.to_string()); - let (boundary, multipart) = generate_multipart(vec![ - common::actix::MultipartSegment { - name: "data".to_string(), - filename: None, - content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) - }, - common::actix::MultipartSegment { - name: "basic-mod-different.jar".to_string(), - filename: Some("basic-mod-different.jar".to_string()), - content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) - } - ]); let req = test::TestRequest::post() .uri("/v2/project") .append_header(("Authorization","mrp_patuser")) - .append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) - .set_payload(multipart) + .set_multipart(vec![ + json_diff_slug_file_segment.clone(), // Different slug, different file name + file_diff_name_segment.clone() // Different file name, same content + ]) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Different slug,s same file (with diff name): {:?}", resp.response().body()); - println!("Response: {:?}", resp.response().body()); + println!("Different slug, same file: {:?}", resp.response().body()); assert_eq!(resp.status(), 400); // Reusing with the same slug and a different file should fail - json_data["slug"] = json!("demo"); - json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); - let (boundary, multipart) = generate_multipart(vec![ - common::actix::MultipartSegment { - name: "data".to_string(), - filename: None, - content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) - }, - common::actix::MultipartSegment { - name: "basic-mod-different.jar".to_string(), - filename: Some("basic-mod-different.jar".to_string()), - content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) - } - ]); let req = test::TestRequest::post() .uri("/v2/project") .append_header(("Authorization","mrp_patuser")) - .append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) - .set_payload(multipart) + .set_multipart(vec![ + json_diff_file_segment.clone(), // Same slug, different file name + file_diff_name_content_segment.clone() // Different file name, different content + ]) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Same slug truly different file: {:?}", resp.response().body()); - println!("Response: {:?}", resp.response().body()); + println!("Same slug, different file: {:?}", resp.response().body()); assert_eq!(resp.status(), 400); // Different slug, different file should succeed - json_data["slug"] = json!("new_demo"); - json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); - let (boundary, multipart) = generate_multipart(vec![ - common::actix::MultipartSegment { - name: "data".to_string(), - filename: None, - content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) - }, - common::actix::MultipartSegment { - name: "basic-mod-different.jar".to_string(), - filename: Some("basic-mod-different.jar".to_string()), - content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) - } - ]); let req = test::TestRequest::post() .uri("/v2/project") .append_header(("Authorization","mrp_patuser")) - .append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) - .set_payload(multipart) + .set_multipart(vec![ + json_diff_slug_file_segment.clone(), // Different slug, different file name + file_diff_name_content_segment.clone() // Different file name, same content + ]) .to_request(); let resp = test::call_service(&test_app, req).await; + println!("Different slug, different file: {:?}", resp.response().body()); assert_eq!(resp.status(), 200); // Cleanup test db db.cleanup().await; } + +#[actix_rt::test] +pub async fn test_patch_project() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new() + .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // First, we do some patch requests that should fail. + // Failure because the user is not authorized. + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patenemy")) + .set_json(json!({ + "title": "Test_Add_Project project - test 1", + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 401); + + // Failure because we are setting URL fields to invalid urls. + for url_type in ["issues_url", "source_url", "wiki_url", "discord_url"] { + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patuser")) + .set_json(json!({ + url_type: "w.fake.url", + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 400); + } + + // Failure because these are illegal requested statuses for a normal user. + for req in ["unknown","processing", "withheld", "scheduled"] { + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patuser")) + .set_json(json!({ + "requested_status": req, + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 400); + } + + // Failure because these should not be able to be set by a non-mod + for key in ["moderation_message", "moderation_message_body"] { + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patuser")) + .set_json(json!({ + key: "test", + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 401); + + // (should work for a mod, though) + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patmoderator")) + .set_json(json!({ + key: "test", + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 204); + } + + // Failure because the slug is already taken. + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patuser")) + .set_json(json!({ + "slug": "testslug2", // the other dummy project has this slug + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 400); + + // Not allowed to directly set status, as default dummy is "processing" + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patuser")) + .set_json(json!({ + "status": "private", // the other dummy project has this slug + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 401); + + // Sucessful request to patch many fields. + let req = test::TestRequest::patch() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patuser")) + .set_json(json!({ + "slug": "newslug", + "title": "New successful title", + "description": "New successful description", + "body": "New successful body", + "categories": ["fabric"], + "license_id": "MIT", + "issues_url": "https://github.com", + "discord_url": "https://discord.gg", + "wiki_url": "https://wiki.com", + "client_side": "optional", + "server_side": "required", + "donation_urls:": ["https://donate.com"], + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 204); + + let req = test::TestRequest::get() + .uri("/v2/project/testslug") + .append_header(("Authorization","mrp_patuser")) + .to_request(); + let resp = test::call_service(&test_app, req).await; + println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 200); + + let body : serde_json::Value = test::read_body_json(resp).await; + println!("Body: {:?}", body.to_string()); + assert_eq!(body.get("slug").unwrap(), &json!("newslug")); + assert_eq!(body.get("title").unwrap(), &json!("New successful title")); + assert_eq!(body.get("description").unwrap(), &json!("New successful description")); + assert_eq!(body.get("body").unwrap(), &json!("New successful body")); + assert_eq!(body.get("categories").unwrap(), &json!(["fabric"])); + assert_eq!(body.get("license_id").unwrap(), &json!("MIT")); + assert_eq!(body.get("issues_url").unwrap(), &json!("https://github.com")); + assert_eq!(body.get("discord_url").unwrap(), &json!("https://discord.gg")); + assert_eq!(body.get("wiki_url").unwrap(), &json!("https://wiki.com")); + assert_eq!(body.get("client_side").unwrap(), &json!("optional")); + assert_eq!(body.get("server_side").unwrap(), &json!("required")); + assert_eq!(body.get("donation_urls").unwrap(), &json!(["https://donate.com"])); + + + + // Cleanup test db + db.cleanup().await; +} \ No newline at end of file From 85697e1a57ec9ccc18d88b3c172e9b89389216cc Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Thu, 28 Sep 2023 17:38:34 -0700 Subject: [PATCH 08/16] bug fixes + tests --- Cargo.lock | 7 +- Cargo.toml | 1 + src/database/models/project_item.rs | 15 -- src/database/redis.rs | 9 +- src/routes/v2/notifications.rs | 2 +- src/routes/v2/projects.rs | 9 +- src/routes/v2/users.rs | 12 +- tests/common/database.rs | 15 +- tests/files/dummy_data.sql | 18 +- tests/pats.rs | 395 ++++++++++++++++++++++++++++ tests/project.rs | 105 +++++--- 11 files changed, 492 insertions(+), 96 deletions(-) create mode 100644 tests/pats.rs diff --git a/Cargo.lock b/Cargo.lock index cb780278..c0b7170e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -83,9 +83,9 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.3.1" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2079246596c18b4a33e274ae10c0e50613f4d32a4198e09c7b93771013fed74" +checksum = "a92ef85799cba03f76e4f7c10f533e66d87c9a7e7055f3391f09000ad8351bc9" dependencies = [ "actix-codec", "actix-rt", @@ -93,7 +93,7 @@ dependencies = [ "actix-utils", "ahash 0.8.3", "base64 0.21.2", - "bitflags 1.3.2", + "bitflags 2.3.3", "brotli", "bytes", "bytestring", @@ -2230,6 +2230,7 @@ dependencies = [ "actix", "actix-cors", "actix-files", + "actix-http", "actix-multipart", "actix-rt", "actix-web", diff --git a/Cargo.toml b/Cargo.toml index 755adc38..55dff9a0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,6 +18,7 @@ actix-multipart = "0.6.0" actix-cors = "0.6.4" actix-ws = "0.2.5" actix-files = "0.6.2" +actix-http = "3.4.0" tokio = { version = "1.29.1", features = ["sync"] } tokio-stream = "0.1.14" diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index cf7a8c3c..af7fc9e4 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -473,8 +473,6 @@ impl Project { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let debug_time_0 = std::time::Instant::now(); - use futures::TryStreamExt; if project_strings.is_empty() { @@ -499,9 +497,6 @@ impl Project { .collect(), ); - let debug_time_1 = std::time::Instant::now(); - println!("Redis time: {:?}", debug_time_1 - debug_time_0); - if !project_ids.is_empty() { let projects = redis.multi_get::(PROJECTS_NAMESPACE, project_ids).await?; for project in projects { @@ -519,9 +514,6 @@ impl Project { } } - let debug_time_2 = std::time::Instant::now(); - println!("Redis time: {:?}", debug_time_2 - debug_time_1); - if !remaining_strings.is_empty() { let project_ids_parsed: Vec = remaining_strings .iter() @@ -564,8 +556,6 @@ impl Project { .try_filter_map(|e| async { Ok(e.right().map(|m| { let id = m.id; - let debug_time_3 = std::time::Instant::now(); - println!("inner SQL time: {:?}", debug_time_3 - debug_time_2); QueryProject { inner: Project { @@ -648,9 +638,6 @@ impl Project { .try_collect::>() .await?; - let debug_time_3 = std::time::Instant::now(); - println!("total SQL time: {:?}", debug_time_3 - debug_time_2); - for project in db_projects { redis.set(PROJECTS_NAMESPACE, project.inner.id.0, serde_json::to_string(&project)?, None).await?; if let Some(slug) = &project.inner.slug { @@ -660,8 +647,6 @@ impl Project { found_projects.push(project); } - let debug_time_4 = std::time::Instant::now(); - println!("Redis time: {:?}", debug_time_4 - debug_time_3); } Ok(found_projects) diff --git a/src/database/redis.rs b/src/database/redis.rs index f79bca64..59c07786 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -42,7 +42,6 @@ impl RedisPool { { let mut redis_connection = self.pool.get().await?; - println!("SET: {}_{}:{}", self.meta_namespace, namespace, id); cmd("SET") .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) .arg(data) @@ -60,7 +59,6 @@ impl RedisPool { { let mut redis_connection = self.pool.get().await?; - println!("GET: {}_{}:{}", self.meta_namespace, namespace, id); let res = cmd("GET") .arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)) .query_async::<_, Option>(&mut redis_connection) @@ -72,12 +70,7 @@ impl RedisPool { where T1 : Display, R: FromRedisValue { - let mut redis_connection = self.pool.get().await?; - - for id in &ids { - println!("MGET: {}_{}:{}", self.meta_namespace, namespace, id); - } - + let mut redis_connection = self.pool.get().await?; let res = cmd("MGET") .arg( ids diff --git a/src/routes/v2/notifications.rs b/src/routes/v2/notifications.rs index 80b432fd..376f2859 100644 --- a/src/routes/v2/notifications.rs +++ b/src/routes/v2/notifications.rs @@ -18,7 +18,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("notification") .service(notification_get) - .service(notifications_read) + .service(notification_read) .service(notification_delete), ); } diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 88c45a7e..9a912717 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -152,9 +152,7 @@ pub async fn project_get( ) -> Result { let string = info.into_inner().0; - println!("Getting project {}", &string); let project_data = database::models::Project::get(&string, &**pool, &redis).await?; - println!("Got project data {:?}", &project_data); let user_option = get_user_from_headers( &req, &**pool, @@ -165,10 +163,8 @@ pub async fn project_get( .await .map(|x| x.1) .ok(); - println!("Got user option {:?}", &user_option); if let Some(data) = project_data { - println!("Got project data {:?}", &data); if is_authorized(&data.inner, &user_option, &pool).await? { return Ok(HttpResponse::Ok().json(Project::from(data))); } @@ -279,7 +275,7 @@ pub async fn dependency_list( } } -#[derive(Deserialize, Validate)] +#[derive(Serialize, Deserialize, Validate)] pub struct EditProject { #[validate( length(min = 3, max = 64), @@ -405,8 +401,6 @@ pub async fn project_edit( let string = info.into_inner().0; let result = database::models::Project::get(&string, &**pool, &redis).await?; - println!("user role {:?}", user.name); - println!("user role {}", user.role); if let Some(project_item) = result { let id = project_item.inner.id; @@ -983,7 +977,6 @@ pub async fn project_edit( .execute(&mut *transaction) .await?; } - if let Some(donations) = &new_project.donation_urls { if !perms.contains(Permissions::EDIT_DETAILS) { return Err(ApiError::CustomAuthentication( diff --git a/src/routes/v2/users.rs b/src/routes/v2/users.rs index bee91fb1..8110cd18 100644 --- a/src/routes/v2/users.rs +++ b/src/routes/v2/users.rs @@ -67,17 +67,7 @@ pub async fn user_auth_get( user.payout_data = None; } - Ok(HttpResponse::Ok().json( - get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_READ]), - ) - .await? - .1, - )) + Ok(HttpResponse::Ok().json(user)) } #[derive(Serialize, Deserialize)] diff --git a/tests/common/database.rs b/tests/common/database.rs index 989563fa..240dc0d9 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -4,6 +4,12 @@ use sqlx::{PgPool, postgres::PgPoolOptions, Executor}; use url::Url; use std::time::Duration; +pub const ADMIN_USER_ID: i64 = 1; +pub const MOD_USER_ID: i64 = 2; +pub const USER_USER_ID: i64 = 3; +pub const FRIEND_USER_ID: i64 = 4; +pub const ENEMY_USER_ID: i64 = 5; + pub struct TemporaryDatabase { pub pool: PgPool, pub redis_pool : RedisPool, @@ -95,12 +101,8 @@ impl TemporaryDatabase { - PATs for each of the five users, with full privileges (for testing purposes). - 'mrp_patadmin' for admin, etc - 1 game version (1.20.1) - - 1 dummy project called 'testslug' with the following properties: - - 1 team (12345) - - 1 team member (test_user) - - 1 mod (testslug) - - 1 category (test_category) - - 1 thread (100) + - 1 dummy project called 'testslug' (and testslug2) with the following properties: + - several categories, tags, etc This is a test function, so it panics on error. */ @@ -108,6 +110,7 @@ impl TemporaryDatabase { let pool = &self.pool.clone(); pool.execute(include_str!("../files/dummy_data.sql")).await.unwrap(); } + } diff --git a/tests/files/dummy_data.sql b/tests/files/dummy_data.sql index eb57e631..e565c383 100644 --- a/tests/files/dummy_data.sql +++ b/tests/files/dummy_data.sql @@ -25,13 +25,21 @@ INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (54, INSERT INTO game_versions (id, version, type, created) VALUES (20000, '1.20.1', 'release', timezone('utc', now())); -INSERT INTO loaders (id, loader, icon) VALUES (1, 'fabric', 'svgloadercode'); -INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,1);--SELECT 1, id FROM project_types WHERE name = 'mod'; -INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,2); --SELECT 1, id FROM project_types WHERE name = 'modpack'; +INSERT INTO loaders (id, loader) VALUES (1, 'fabric'); +INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,1); +INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,2); + +INSERT INTO categories (id, category, project_type) VALUES (1, 'combat', 1); +INSERT INTO categories (id, category, project_type) VALUES (2, 'decoration', 1); +INSERT INTO categories (id, category, project_type) VALUES (3, 'economy', 1); + +INSERT INTO categories (id, category, project_type) VALUES (4, 'combat', 2); +INSERT INTO categories (id, category, project_type) VALUES (5, 'decoration', 2); +INSERT INTO categories (id, category, project_type) VALUES (6, 'economy', 2); -- Inserts 2 dummy projects for testing, with slight differences ------------------------------------------------------------ -INSERT INTO teams (id) VALUES (100); +INSERT INTO teams (id) VALUES (100); -- ID: 100, 1c INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (200, 100, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); -- ID: 1000, G8 @@ -53,7 +61,7 @@ INSERT INTO hashes (file_id, algorithm, hash) VALUES (800, 'sha1', '10101010'); INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (30, 'project', 1000, null); ------------------------------------------------------------ -INSERT INTO teams (id) VALUES (101); +INSERT INTO teams (id) VALUES (101); -- ID: 101, 1d INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (201, 101, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); -- ID: 1001, G9 diff --git a/tests/pats.rs b/tests/pats.rs new file mode 100644 index 00000000..0876212d --- /dev/null +++ b/tests/pats.rs @@ -0,0 +1,395 @@ +use actix_web::{App, test::{self, TestRequest}, dev::ServiceResponse}; +use chrono::Utc; +use common::{database::TemporaryDatabase, actix::AppendsMultipart}; +use labrinth::{models::pats::Scopes, database::{self, models::generate_pat_id}}; +use serde_json::json; + +use crate::common::{setup, database::{USER_USER_ID, ENEMY_USER_ID, ADMIN_USER_ID, FRIEND_USER_ID, MOD_USER_ID}}; + +// importing common module. +mod common; + +// For each scope, we (using test_scope): +// - create a PAT with a given set of scopes for a function +// - create a PAT with all other scopes for a function +// - test the function with the PAT with the given scopes +// - test the function with the PAT with all other scopes + +// Test for users, emails, and payout scopes (not user auth scope or notifs) +#[actix_rt::test] +async fn test_user_scopes() { + // Test setup and dummy data + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new() + .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // User reading + println!("Testing user reading..."); + let read_user = Scopes::USER_READ; + let request_generator = || { + test::TestRequest::get() + .uri("/v2/user") + }; + let read_user = test_scope(&test_app, &db, request_generator, all_scopes_except(read_user), read_user, USER_USER_ID).await; + assert!(read_user["email"].as_str().is_none()); // email should not be present + assert!(read_user["payout_data"].as_object().is_none()); // payout should not be present + + // Email reading + println!("Testing email reading..."); + let read_email = Scopes::USER_READ | Scopes::USER_READ_EMAIL; + let request_generator = || { + test::TestRequest::get() + .uri("/v2/user") + }; + let read_email_test = test_scope(&test_app, &db, request_generator, all_scopes_except(read_email), read_email, USER_USER_ID).await; + assert_eq!(read_email_test["email"], json!("user@modrinth.com")); // email should be present + + // Payout reading + println!("Testing payout reading..."); + let read_payout = Scopes::USER_READ | Scopes::PAYOUTS_READ; + let request_generator = || { + test::TestRequest::get() + .uri("/v2/user") + }; + let read_payout_test = test_scope(&test_app, &db, request_generator, all_scopes_except(read_payout), read_payout, USER_USER_ID).await; + assert!(read_payout_test["payout_data"].as_object().is_some()); // payout should be present + + // User writing + // We use the Admin PAT for this test, on the 'user' user + println!("Testing user writing..."); + let write_user = Scopes::USER_WRITE; + let request_generator = || { + test::TestRequest::patch() + .uri("/v2/user/user") + .set_json(json!( { + // Do not include 'username', as to not change the rest of the tests + "name": "NewName", + "bio": "New bio", + "location": "New location", + "role": "admin", + "badges": 5, + // Do not include payout info, different scope + })) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(write_user), write_user, ADMIN_USER_ID).await; + + // User payout info writing + println!("Testing user payout info writing..."); + let failure_write_user_payout = all_scopes_except(Scopes::PAYOUTS_WRITE); // Failure case should include USER_WRITE + let write_user_payout = Scopes::USER_WRITE | Scopes::PAYOUTS_WRITE; + let request_generator = || { + test::TestRequest::patch() + .uri("/v2/user/user") + .set_json(json!( { + "payout_data": { + "payout_wallet": "paypal", + "payout_wallet_type": "email", + "payout_address": "test@modrinth.com" + } + })) + }; + test_scope(&test_app, &db, request_generator, failure_write_user_payout, write_user_payout, USER_USER_ID).await; + + // User deletion + // (The failure is first, and this is the last test for this test function, we can delete it and use the same PAT for both tests) + println!("Testing user deletion..."); + let delete_user = Scopes::USER_DELETE; + let request_generator = || { + test::TestRequest::delete() + .uri("/v2/user/enemy") + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(delete_user), delete_user, ENEMY_USER_ID).await; + + // Cleanup test db + db.cleanup().await; +} + +// Notifications +#[actix_rt::test] +pub async fn test_notifications_scopes() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new() + .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // We will invite user 'friend' to project team, and use that as a notification + // Get notifications + let req = test::TestRequest::post() + .uri("/v2/team/1c/members") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!( { + "user_id": "4" // friend + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 204); + + // Notification get + println!("Testing getting notifications..."); + let read_notifications = Scopes::NOTIFICATION_READ; + let request_generator = || { + test::TestRequest::get() + .uri("/v2/user/4/notifications") + }; + let notifications = test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, FRIEND_USER_ID).await; + let notification_id = notifications.as_array().unwrap()[0]["id"].as_str().unwrap(); + + let request_generator = || { + test::TestRequest::get() + .uri(&format!("/v2/notifications?ids=[{uri}]", uri=urlencoding::encode(&format!("\"{notification_id}\"")))) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, FRIEND_USER_ID).await; + + let request_generator = || { + test::TestRequest::get() + .uri(&format!("/v2/notification/{notification_id}")) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, FRIEND_USER_ID).await; + + // Notification mark as read + println!("Testing marking notifications as read..."); + + let write_notifications = Scopes::NOTIFICATION_WRITE; + let request_generator = || { + test::TestRequest::patch() + .uri(&format!("/v2/notifications?ids=[{uri}]", uri=urlencoding::encode(&format!("\"{notification_id}\"")))) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, FRIEND_USER_ID).await; + let request_generator = || { + test::TestRequest::patch() + .uri(&format!("/v2/notification/{notification_id}")) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, FRIEND_USER_ID).await; + + // Notification delete + println!("Testing deleting notifications..."); + let request_generator = || { + test::TestRequest::delete() + .uri(&format!("/v2/notification/{notification_id}")) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, FRIEND_USER_ID).await; + + // Mass notification delete + // We invite mod, get the notification ID, and do mass delete using that + let req = test::TestRequest::post() + .uri("/v2/team/1c/members") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!( { + "user_id": "2" // mod + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 204); + let read_notifications = Scopes::NOTIFICATION_READ; + let request_generator = || { + test::TestRequest::get() + .uri("/v2/user/2/notifications") + }; + let notifications = test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, MOD_USER_ID).await; + let notification_id = notifications.as_array().unwrap()[0]["id"].as_str().unwrap(); + + let request_generator = || { + test::TestRequest::delete() + .uri(&format!("/v2/notifications?ids=[{uri}]", uri=urlencoding::encode(&format!("\"{notification_id}\"")))) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, MOD_USER_ID).await; + + // Cleanup test db + db.cleanup().await; +} + + +// User authentication +#[actix_rt::test] +pub async fn test_user_auth() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new() + .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // TODO: Test user auth scopes + + // Cleanup test db + db.cleanup().await; +} + +// Project version creation scopes +#[actix_rt::test] +pub async fn test_project_version_create() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new() + .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // Create project + println!("Testing creating project..."); + let create_project = Scopes::PROJECT_CREATE; + let json_data = json!( + { + "title": "Test_Add_Project project", + "slug": "demo", + "description": "Example description.", + "body": "Example body.", + "client_side": "required", + "server_side": "optional", + "initial_versions": [{ + "file_parts": ["basic-mod.jar"], + "version_number": "1.2.3", + "version_title": "start", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + }], + "categories": [], + "license_id": "MIT" + } + ); + let json_segment = common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + }; + let file_segment = common::actix::MultipartSegment { + name: "basic-mod.jar".to_string(), + filename: Some("basic-mod.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + }; + + let request_generator = || { + test::TestRequest::post() + .uri(&format!("/v2/project")) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + }; + let project = test_scope(&test_app, &db, request_generator, all_scopes_except(create_project), create_project, USER_USER_ID).await; + let project_id = project["id"].as_str().unwrap(); + + // Add version to project + println!("Testing adding version to project..."); + let create_version = Scopes::VERSION_CREATE; + let json_data = json!( + { + "project_id": project_id, + "file_parts": ["basic-mod-different.jar"], + "version_number": "1.2.3.4", + "version_title": "start", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + } + ); + let json_segment = common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + }; + let file_segment = common::actix::MultipartSegment { + name: "basic-mod-different.jar".to_string(), + filename: Some("basic-mod.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) + }; + + let request_generator = || { + test::TestRequest::post() + .uri(&format!("/v2/version")) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + }; + test_scope(&test_app, &db, request_generator, all_scopes_except(create_version), create_version, USER_USER_ID).await; + + + // Cleanup test db + db.cleanup().await; +} + + +// Project scopes +// Version scopes + +// Report scopes + +// Thread scopes + +// Pat scopes + +// Session scopes + +// Analytics scopes + +// Collection scopes + + +// A reusable test that works for any scope test that: +// - returns a 401 if the scope is not present +// - returns a 200-299 if the scope is present +// - returns a JSON body on a successful request +// Some tests (ie: USER_READ_EMAIL) will still need to have additional checks (ie: email is present/absent) because it doesn't affect the response code +// test_app is the generated test app from init_service +// Closure generates a TestRequest. The authorization header (if any) will be overwritten by the generated PAT +async fn test_scope(test_app : &impl actix_web::dev::Service, db : &TemporaryDatabase, request_generator : T, failure_scopes: Scopes, success_scopes : Scopes, user_id : i64) -> serde_json::Value +where T : Fn() -> TestRequest +{ + // First, create a PAT with all OTHER scopes + let access_token_all_others = create_test_pat(failure_scopes, user_id, &db).await; + + // Create a PAT with the given scopes + let access_token = create_test_pat(success_scopes, user_id, &db).await; + + // Perform test twice, once with each PAT + // the first time, we expect a 401 + // the second time, we expect a 200 or 204, and it will return a JSON body of the response + let req = request_generator() + .append_header(("Authorization", access_token_all_others.as_str())) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 401); + + let req = request_generator() + .append_header(("Authorization", access_token.as_str())) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert!(resp.status().is_success()); + let body = if resp.status() == 200 { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + body +} + +// Creates a PAT with the given scopes, and returns the access token +// this allows us to make PATs with scopes that are not allowed to be created by PATs +async fn create_test_pat(scopes : Scopes, user_id : i64, db : &TemporaryDatabase) -> String { + let mut transaction = db.pool.begin().await.unwrap(); + let id = generate_pat_id(&mut transaction).await.unwrap(); + let pat = database::models::pat_item::PersonalAccessToken { + id, + name: format!("test_pat_{}", scopes.bits()), + access_token: format!("mrp_{}", id.0), + scopes, + user_id: database::models::ids::UserId(user_id), + created: Utc::now(), + expires: Utc::now() + chrono::Duration::days(1), + last_used: None, + }; + pat.insert(&mut transaction).await.unwrap(); + transaction.commit().await.unwrap(); + pat.access_token +} + +// Inversion of scopes for testing +// ie: To ensure that ONLY this scope is required, we need to create a PAT with all other scopes +fn all_scopes_except(success_scopes : Scopes) -> Scopes { + Scopes::ALL ^ success_scopes +} diff --git a/tests/project.rs b/tests/project.rs index ad074ccc..23eb0105 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -31,9 +31,9 @@ async fn test_get_project() { let body : serde_json::Value = test::read_body_json(resp).await; assert_eq!(status, 200); - assert!(body.get("id").is_some()); - assert_eq!(body.get("slug").unwrap(), &json!("testslug")); - let versions = body.get("versions").unwrap().as_array().unwrap(); + assert_eq!(body["id"], json!("G8")); + assert_eq!(body["slug"], json!("testslug")); + let versions = body["versions"].as_array().unwrap(); assert!(versions.len() > 0); assert_eq!(versions[0], json!("Hk")); @@ -43,10 +43,7 @@ async fn test_get_project() { let cached_project = db.redis_pool.get::(PROJECTS_NAMESPACE, 1000).await.unwrap().unwrap(); let cached_project : serde_json::Value = serde_json::from_str(&cached_project).unwrap(); - println!("Cached project: {:?}", cached_project); - println!("Cached project: {:?}", cached_project.to_string()); - println!("{:?}",cached_project.as_object().unwrap()); - assert_eq!(cached_project.get("inner").unwrap().get("slug").unwrap(), &json!("testslug")); + assert_eq!(cached_project["inner"]["slug"], json!("testslug")); // Make the request again, this time it should be cached let req = test::TestRequest::get() @@ -58,8 +55,8 @@ async fn test_get_project() { assert_eq!(status, 200); let body : serde_json::Value = test::read_body_json(resp).await; - assert!(body.get("id").is_some()); - assert_eq!(body.get("slug").unwrap(), &json!("testslug")); + assert_eq!(body["id"], json!("G8")); + assert_eq!(body["slug"], json!("testslug")); // Request should fail on non-existent project println!("Requesting non-existent project"); @@ -69,7 +66,6 @@ async fn test_get_project() { .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 404); // Similarly, request should fail on non-authorized user, with a 404 (hiding the existence of the project) @@ -80,7 +76,6 @@ async fn test_get_project() { .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 404); // Cleanup test db @@ -88,7 +83,7 @@ async fn test_get_project() { } #[actix_rt::test] -async fn test_add_project() { +async fn test_add_remove_project() { // Test setup and dummy data let db = TemporaryDatabase::create_with_dummy().await; let labrinth_config = setup(&db).await; @@ -191,7 +186,7 @@ async fn test_add_project() { assert_eq!(resp.status(), 200); let body : serde_json::Value = test::read_body_json(resp).await; - let versions = body.get("versions").unwrap().as_array().unwrap(); + let versions = body["versions"].as_array().unwrap(); assert!(versions.len() == 1); let uploaded_version_id = &versions[0]; @@ -206,7 +201,7 @@ async fn test_add_project() { assert_eq!(resp.status(), 200); let body : serde_json::Value = test::read_body_json(resp).await; - let file_version_id = body.get("id").unwrap(); + let file_version_id = &body["id"]; assert_eq!(&file_version_id, &uploaded_version_id); // Reusing with a different slug and the same file should fail @@ -252,6 +247,36 @@ async fn test_add_project() { println!("Different slug, different file: {:?}", resp.response().body()); assert_eq!(resp.status(), 200); + // Get + let req = test::TestRequest::get() + .uri("/v2/project/demo") + .append_header(("Authorization","mrp_patuser")) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 200); + let body : serde_json::Value = test::read_body_json(resp).await; + let id = body["id"].to_string(); + + // Remove the project + let req = test::TestRequest::delete() + .uri("/v2/project/demo") + .append_header(("Authorization","mrp_patuser")) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 204); + + // Confirm that the project is gone from the cache + assert_eq!(db.redis_pool.get::(PROJECTS_SLUGS_NAMESPACE, "demo").await.unwrap(), None); + assert_eq!(db.redis_pool.get::(PROJECTS_SLUGS_NAMESPACE, id).await.unwrap(), None); + + // Old slug no longer works + let req = test::TestRequest::get() + .uri("/v2/project/demo") + .append_header(("Authorization","mrp_patuser")) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 404); + // Cleanup test db db.cleanup().await; } @@ -274,7 +299,6 @@ pub async fn test_patch_project() { })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 401); // Failure because we are setting URL fields to invalid urls. @@ -287,7 +311,6 @@ pub async fn test_patch_project() { })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 400); } @@ -301,7 +324,6 @@ pub async fn test_patch_project() { })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 400); } @@ -315,7 +337,6 @@ pub async fn test_patch_project() { })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 401); // (should work for a mod, though) @@ -327,7 +348,6 @@ pub async fn test_patch_project() { })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 204); } @@ -340,7 +360,6 @@ pub async fn test_patch_project() { })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 400); // Not allowed to directly set status, as default dummy is "processing" @@ -352,7 +371,6 @@ pub async fn test_patch_project() { })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 401); // Sucessful request to patch many fields. @@ -364,43 +382,52 @@ pub async fn test_patch_project() { "title": "New successful title", "description": "New successful description", "body": "New successful body", - "categories": ["fabric"], + "categories": ["combat"], "license_id": "MIT", "issues_url": "https://github.com", "discord_url": "https://discord.gg", "wiki_url": "https://wiki.com", "client_side": "optional", "server_side": "required", - "donation_urls:": ["https://donate.com"], + "donation_urls": [{ + "id": "patreon", + "platform": "Patreon", + "url": "https://patreon.com" + }] })) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); assert_eq!(resp.status(), 204); + // Old slug no longer works let req = test::TestRequest::get() .uri("/v2/project/testslug") .append_header(("Authorization","mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Response: {:?}", resp.response().body()); + assert_eq!(resp.status(), 404); + + // Old slug no longer works + let req = test::TestRequest::get() + .uri("/v2/project/newslug") + .append_header(("Authorization","mrp_patuser")) + .to_request(); + let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 200); let body : serde_json::Value = test::read_body_json(resp).await; - println!("Body: {:?}", body.to_string()); - assert_eq!(body.get("slug").unwrap(), &json!("newslug")); - assert_eq!(body.get("title").unwrap(), &json!("New successful title")); - assert_eq!(body.get("description").unwrap(), &json!("New successful description")); - assert_eq!(body.get("body").unwrap(), &json!("New successful body")); - assert_eq!(body.get("categories").unwrap(), &json!(["fabric"])); - assert_eq!(body.get("license_id").unwrap(), &json!("MIT")); - assert_eq!(body.get("issues_url").unwrap(), &json!("https://github.com")); - assert_eq!(body.get("discord_url").unwrap(), &json!("https://discord.gg")); - assert_eq!(body.get("wiki_url").unwrap(), &json!("https://wiki.com")); - assert_eq!(body.get("client_side").unwrap(), &json!("optional")); - assert_eq!(body.get("server_side").unwrap(), &json!("required")); - assert_eq!(body.get("donation_urls").unwrap(), &json!(["https://donate.com"])); - + assert_eq!(body["slug"], json!("newslug")); + assert_eq!(body["title"], json!("New successful title")); + assert_eq!(body["description"], json!("New successful description")); + assert_eq!(body["body"], json!("New successful body")); + assert_eq!(body["categories"], json!(["combat"])); + assert_eq!(body["license"]["id"], json!("MIT")); + assert_eq!(body["issues_url"], json!("https://github.com")); + assert_eq!(body["discord_url"], json!("https://discord.gg")); + assert_eq!(body["wiki_url"], json!("https://wiki.com")); + assert_eq!(body["client_side"], json!("optional")); + assert_eq!(body["server_side"], json!("required")); + assert_eq!(body["donation_urls"][0]["url"], json!("https://patreon.com")); // Cleanup test db From 54bb6b501f1e29c2016e502fc589df77ff86f641 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Tue, 3 Oct 2023 11:08:57 -0700 Subject: [PATCH 09/16] more tests, more fixes, cargo fmt --- src/auth/pats.rs | 2 +- src/auth/session.rs | 2 +- src/database/mod.rs | 2 +- src/database/models/categories.rs | 125 ++- src/database/models/collection_item.rs | 24 +- src/database/models/flow_item.rs | 22 +- src/database/models/image_item.rs | 19 +- src/database/models/pat_item.rs | 41 +- src/database/models/project_item.rs | 51 +- src/database/models/session_item.rs | 42 +- src/database/models/team_item.rs | 20 +- src/database/models/user_item.rs | 40 +- src/database/models/version_item.rs | 48 +- src/database/redis.rs | 88 +- src/lib.rs | 46 +- src/main.rs | 10 +- src/models/pats.rs | 2 +- src/queue/payouts.rs | 2 +- src/queue/session.rs | 6 +- src/routes/analytics.rs | 4 +- src/routes/maven.rs | 2 +- src/routes/v2/admin.rs | 4 +- src/routes/v2/analytics_get.rs | 10 +- src/routes/v2/collections.rs | 2 +- src/routes/v2/images.rs | 2 +- src/routes/v2/notifications.rs | 2 +- src/routes/v2/project_creation.rs | 2 +- src/routes/v2/projects.rs | 2 +- src/routes/v2/reports.rs | 2 +- src/routes/v2/tags.rs | 2 +- src/routes/v2/teams.rs | 2 +- src/routes/v2/threads.rs | 2 +- src/routes/v2/users.rs | 2 +- src/routes/v2/version_creation.rs | 3 + src/routes/v2/version_file.rs | 10 +- src/routes/v2/versions.rs | 2 +- src/util/img.rs | 8 +- src/util/webhook.rs | 2 +- tests/common/actix.rs | 46 +- tests/common/database.rs | 36 +- tests/common/mod.rs | 20 +- tests/files/200x200.png | Bin 0 -> 606 bytes tests/files/dummy_data.sql | 18 +- tests/files/simple-zip.zip | Bin 0 -> 271 bytes tests/pats.rs | 1099 +++++++++++++++++++++--- tests/project.rs | 232 ++--- 46 files changed, 1602 insertions(+), 506 deletions(-) create mode 100644 tests/files/200x200.png create mode 100644 tests/files/simple-zip.zip diff --git a/src/auth/pats.rs b/src/auth/pats.rs index bc56088d..15ff23dc 100644 --- a/src/auth/pats.rs +++ b/src/auth/pats.rs @@ -4,6 +4,7 @@ use crate::database::models::generate_pat_id; use crate::auth::get_user_from_headers; use crate::routes::ApiError; +use crate::database::redis::RedisPool; use actix_web::web::{self, Data}; use actix_web::{delete, get, patch, post, HttpRequest, HttpResponse}; use chrono::{DateTime, Utc}; @@ -11,7 +12,6 @@ use rand::distributions::Alphanumeric; use rand::Rng; use rand_chacha::rand_core::SeedableRng; use rand_chacha::ChaCha20Rng; -use crate::database::redis::RedisPool; use crate::models::pats::{PersonalAccessToken, Scopes}; use crate::queue::session::AuthQueue; diff --git a/src/auth/session.rs b/src/auth/session.rs index a51e5e5a..7d1b7d85 100644 --- a/src/auth/session.rs +++ b/src/auth/session.rs @@ -2,6 +2,7 @@ use crate::auth::{get_user_from_headers, AuthenticationError}; use crate::database::models::session_item::Session as DBSession; use crate::database::models::session_item::SessionBuilder; use crate::database::models::UserId; +use crate::database::redis::RedisPool; use crate::models::pats::Scopes; use crate::models::sessions::Session; use crate::queue::session::AuthQueue; @@ -10,7 +11,6 @@ use crate::util::env::parse_var; use actix_web::http::header::AUTHORIZATION; use actix_web::web::{scope, Data, ServiceConfig}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; -use crate::database::redis::RedisPool; use chrono::Utc; use rand::distributions::Alphanumeric; use rand::{Rng, SeedableRng}; diff --git a/src/database/mod.rs b/src/database/mod.rs index b03354bf..2bba7dca 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -1,6 +1,6 @@ pub mod models; -pub mod redis; mod postgres_database; +pub mod redis; pub use models::Image; pub use models::Project; pub use models::Version; diff --git a/src/database/models/categories.rs b/src/database/models/categories.rs index bb470046..6bca5379 100644 --- a/src/database/models/categories.rs +++ b/src/database/models/categories.rs @@ -98,15 +98,14 @@ impl Category { Ok(result.map(|r| CategoryId(r.id))) } - pub async fn list<'a, E>( - exec: E, - redis: &RedisPool, - ) -> Result, DatabaseError> + pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(TAGS_NAMESPACE, "category").await? - .and_then(|x| serde_json::from_str::>(&x).ok()); + let res = redis + .get::(TAGS_NAMESPACE, "category") + .await? + .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { return Ok(res); @@ -133,7 +132,14 @@ impl Category { .try_collect::>() .await?; - redis.set(TAGS_NAMESPACE, "category", serde_json::to_string(&result)?, None).await?; + redis + .set( + TAGS_NAMESPACE, + "category", + serde_json::to_string(&result)?, + None, + ) + .await?; Ok(result) } @@ -157,15 +163,14 @@ impl Loader { Ok(result.map(|r| LoaderId(r.id))) } - pub async fn list<'a, E>( - exec: E, - redis: &RedisPool, - ) -> Result, DatabaseError> + pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(TAGS_NAMESPACE, "loader").await? - .and_then(|x| serde_json::from_str::>(&x).ok()); + let res = redis + .get::(TAGS_NAMESPACE, "loader") + .await? + .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { return Ok(res); @@ -198,7 +203,14 @@ impl Loader { .try_collect::>() .await?; - redis.set(TAGS_NAMESPACE, "loader", serde_json::to_string(&result)?, None).await?; + redis + .set( + TAGS_NAMESPACE, + "loader", + serde_json::to_string(&result)?, + None, + ) + .await?; Ok(result) } @@ -236,14 +248,13 @@ impl GameVersion { Ok(result.map(|r| GameVersionId(r.id))) } - pub async fn list<'a, E>( - exec: E, - redis: &RedisPool, - ) -> Result, DatabaseError> + pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(TAGS_NAMESPACE, "game_version").await? + let res = redis + .get::(TAGS_NAMESPACE, "game_version") + .await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -267,7 +278,14 @@ impl GameVersion { .try_collect::>() .await?; - redis.set(TAGS_NAMESPACE, "game_version", serde_json::to_string(&result)?, None).await?; + redis + .set( + TAGS_NAMESPACE, + "game_version", + serde_json::to_string(&result)?, + None, + ) + .await?; Ok(result) } @@ -382,7 +400,9 @@ impl DonationPlatform { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(TAGS_NAMESPACE, "donation_platform").await? + let res = redis + .get::(TAGS_NAMESPACE, "donation_platform") + .await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -405,7 +425,14 @@ impl DonationPlatform { .try_collect::>() .await?; - redis.set(TAGS_NAMESPACE, "donation_platform", serde_json::to_string(&result)?, None).await?; + redis + .set( + TAGS_NAMESPACE, + "donation_platform", + serde_json::to_string(&result)?, + None, + ) + .await?; Ok(result) } @@ -429,14 +456,13 @@ impl ReportType { Ok(result.map(|r| ReportTypeId(r.id))) } - pub async fn list<'a, E>( - exec: E, - redis: &RedisPool, - ) -> Result, DatabaseError> + pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(TAGS_NAMESPACE, "report_type").await? + let res = redis + .get::(TAGS_NAMESPACE, "report_type") + .await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -453,7 +479,14 @@ impl ReportType { .try_collect::>() .await?; - redis.set(TAGS_NAMESPACE, "report_type", serde_json::to_string(&result)?, None).await?; + redis + .set( + TAGS_NAMESPACE, + "report_type", + serde_json::to_string(&result)?, + None, + ) + .await?; Ok(result) } @@ -477,14 +510,13 @@ impl ProjectType { Ok(result.map(|r| ProjectTypeId(r.id))) } - pub async fn list<'a, E>( - exec: E, - redis: &RedisPool, - ) -> Result, DatabaseError> + pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(TAGS_NAMESPACE, "project_type").await? + let res = redis + .get::(TAGS_NAMESPACE, "project_type") + .await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -501,7 +533,14 @@ impl ProjectType { .try_collect::>() .await?; - redis.set(TAGS_NAMESPACE, "project_type", serde_json::to_string(&result)?, None).await?; + redis + .set( + TAGS_NAMESPACE, + "project_type", + serde_json::to_string(&result)?, + None, + ) + .await?; Ok(result) } @@ -525,14 +564,13 @@ impl SideType { Ok(result.map(|r| SideTypeId(r.id))) } - pub async fn list<'a, E>( - exec: E, - redis: &RedisPool, - ) -> Result, DatabaseError> + pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(TAGS_NAMESPACE, "side_type").await? + let res = redis + .get::(TAGS_NAMESPACE, "side_type") + .await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -549,7 +587,14 @@ impl SideType { .try_collect::>() .await?; - redis.set(TAGS_NAMESPACE, "side_type", serde_json::to_string(&result)?, None).await?; + redis + .set( + TAGS_NAMESPACE, + "side_type", + serde_json::to_string(&result)?, + None, + ) + .await?; Ok(result) } diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index 78390b18..4aacfe1c 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -1,8 +1,8 @@ use super::ids::*; use crate::database::models; use crate::database::models::DatabaseError; -use crate::models::collections::CollectionStatus; use crate::database::redis::RedisPool; +use crate::models::collections::CollectionStatus; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; @@ -165,8 +165,12 @@ impl Collection { let mut remaining_collections: Vec = collection_ids.to_vec(); if !collection_ids.is_empty() { - let collections = redis.multi_get::( - COLLECTIONS_NAMESPACE, collection_ids.iter().map(|x| x.0).collect()).await?; + let collections = redis + .multi_get::( + COLLECTIONS_NAMESPACE, + collection_ids.iter().map(|x| x.0).collect(), + ) + .await?; for collection in collections { if let Some(collection) = @@ -223,7 +227,14 @@ impl Collection { .await?; for collection in db_collections { - redis.set(COLLECTIONS_NAMESPACE, collection.id.0, serde_json::to_string(&collection)?, None).await?; + redis + .set( + COLLECTIONS_NAMESPACE, + collection.id.0, + serde_json::to_string(&collection)?, + None, + ) + .await?; found_collections.push(collection); } } @@ -231,10 +242,7 @@ impl Collection { Ok(found_collections) } - pub async fn clear_cache( - id: CollectionId, - redis: &RedisPool, - ) -> Result<(), DatabaseError> { + pub async fn clear_cache(id: CollectionId, redis: &RedisPool) -> Result<(), DatabaseError> { redis.delete(COLLECTIONS_NAMESPACE, id.0).await?; Ok(()) } diff --git a/src/database/models/flow_item.rs b/src/database/models/flow_item.rs index fe474b55..d9e8cfa3 100644 --- a/src/database/models/flow_item.rs +++ b/src/database/models/flow_item.rs @@ -1,8 +1,8 @@ use super::ids::*; use crate::auth::flows::AuthProvider; use crate::database::models::DatabaseError; -use chrono::Duration; use crate::database::redis::RedisPool; +use chrono::Duration; use rand::distributions::Alphanumeric; use rand::Rng; use rand_chacha::rand_core::SeedableRng; @@ -42,29 +42,29 @@ impl Flow { expires: Duration, redis: &RedisPool, ) -> Result { - let flow = ChaCha20Rng::from_entropy() .sample_iter(&Alphanumeric) .take(32) .map(char::from) .collect::(); - redis.set(FLOWS_NAMESPACE, &flow, serde_json::to_string(&self)?, Some(expires.num_seconds())).await?; + redis + .set( + FLOWS_NAMESPACE, + &flow, + serde_json::to_string(&self)?, + Some(expires.num_seconds()), + ) + .await?; Ok(flow) } - pub async fn get( - id: &str, - redis: &RedisPool, - ) -> Result, DatabaseError> { + pub async fn get(id: &str, redis: &RedisPool) -> Result, DatabaseError> { let res = redis.get::(FLOWS_NAMESPACE, id).await?; Ok(res.and_then(|x| serde_json::from_str(&x).ok())) } - pub async fn remove( - id: &str, - redis: &RedisPool, - ) -> Result, DatabaseError> { + pub async fn remove(id: &str, redis: &RedisPool) -> Result, DatabaseError> { redis.delete(FLOWS_NAMESPACE, id).await?; Ok(Some(())) } diff --git a/src/database/models/image_item.rs b/src/database/models/image_item.rs index f6e2a50a..45f42583 100644 --- a/src/database/models/image_item.rs +++ b/src/database/models/image_item.rs @@ -184,14 +184,15 @@ impl Image { return Ok(Vec::new()); } - let mut found_images = Vec::new(); let mut remaining_ids = image_ids.to_vec(); let image_ids = image_ids.iter().map(|x| x.0).collect::>(); if !image_ids.is_empty() { - let images = redis.multi_get::(IMAGES_NAMESPACE, image_ids).await?; + let images = redis + .multi_get::(IMAGES_NAMESPACE, image_ids) + .await?; for image in images { if let Some(image) = image.and_then(|x| serde_json::from_str::(&x).ok()) { remaining_ids.retain(|x| image.id.0 != x.0); @@ -234,7 +235,14 @@ impl Image { .await?; for image in db_images { - redis.set(IMAGES_NAMESPACE, image.id.0, serde_json::to_string(&image)?, None).await?; + redis + .set( + IMAGES_NAMESPACE, + image.id.0, + serde_json::to_string(&image)?, + None, + ) + .await?; found_images.push(image); } } @@ -242,10 +250,7 @@ impl Image { Ok(found_images) } - pub async fn clear_cache( - id: ImageId, - redis: &RedisPool, - ) -> Result<(), DatabaseError> { + pub async fn clear_cache(id: ImageId, redis: &RedisPool) -> Result<(), DatabaseError> { redis.delete(IMAGES_NAMESPACE, id.0).await?; Ok(()) } diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index 4c524295..cf458ea0 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -10,7 +10,7 @@ const PATS_NAMESPACE: &str = "pats"; const PATS_TOKENS_NAMESPACE: &str = "pats_tokens"; const PATS_USERS_NAMESPACE: &str = "pats_users"; -#[derive(Deserialize, Serialize, Clone)] +#[derive(Deserialize, Serialize, Clone, Debug)] pub struct PersonalAccessToken { pub id: PatId, pub name: String, @@ -93,7 +93,6 @@ impl PersonalAccessToken { return Ok(Vec::new()); } - let mut found_pats = Vec::new(); let mut remaining_strings = pat_strings .iter() @@ -106,14 +105,21 @@ impl PersonalAccessToken { .collect::>(); pat_ids.append( - &mut redis.multi_get::(PATS_TOKENS_NAMESPACE, pat_strings.iter().map(|x| x.to_string()).collect()).await? + &mut redis + .multi_get::( + PATS_TOKENS_NAMESPACE, + pat_strings.iter().map(|x| x.to_string()).collect(), + ) + .await? .into_iter() .flatten() - .collect() + .collect(), ); if !pat_ids.is_empty() { - let pats = redis.multi_get::(PATS_NAMESPACE, pat_ids).await?; + let pats = redis + .multi_get::(PATS_NAMESPACE, pat_ids) + .await?; for pat in pats { if let Some(pat) = pat.and_then(|x| serde_json::from_str::(&x).ok()) @@ -162,8 +168,17 @@ impl PersonalAccessToken { .await?; for pat in db_pats { - redis.set(PATS_NAMESPACE, pat.id.0, serde_json::to_string(&pat)?, None).await?; - redis.set(PATS_TOKENS_NAMESPACE, pat.access_token.clone(), pat.id.0, None).await?; + redis + .set(PATS_NAMESPACE, pat.id.0, serde_json::to_string(&pat)?, None) + .await?; + redis + .set( + PATS_TOKENS_NAMESPACE, + pat.access_token.clone(), + pat.id.0, + None, + ) + .await?; found_pats.push(pat); } } @@ -179,7 +194,8 @@ impl PersonalAccessToken { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(PATS_USERS_NAMESPACE, user_id.0) + let res = redis + .get::(PATS_USERS_NAMESPACE, user_id.0) .await? .and_then(|x| serde_json::from_str::>(&x).ok()); @@ -202,7 +218,14 @@ impl PersonalAccessToken { .try_collect::>() .await?; - redis.set(PATS_USERS_NAMESPACE, user_id.0, serde_json::to_string(&db_pats)?, None).await?; + redis + .set( + PATS_USERS_NAMESPACE, + user_id.0, + serde_json::to_string(&db_pats)?, + None, + ) + .await?; Ok(db_pats) } diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index af7fc9e4..9138dad4 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -491,14 +491,24 @@ impl Project { .collect::>(); project_ids.append( - &mut redis.multi_get::(PROJECTS_SLUGS_NAMESPACE, project_strings.iter().map(|x| x.to_string().to_lowercase()).collect()).await? + &mut redis + .multi_get::( + PROJECTS_SLUGS_NAMESPACE, + project_strings + .iter() + .map(|x| x.to_string().to_lowercase()) + .collect(), + ) + .await? .into_iter() .flatten() .collect(), ); if !project_ids.is_empty() { - let projects = redis.multi_get::(PROJECTS_NAMESPACE, project_ids).await?; + let projects = redis + .multi_get::(PROJECTS_NAMESPACE, project_ids) + .await?; for project in projects { if let Some(project) = project.and_then(|x| serde_json::from_str::(&x).ok()) @@ -639,14 +649,26 @@ impl Project { .await?; for project in db_projects { - redis.set(PROJECTS_NAMESPACE, project.inner.id.0, serde_json::to_string(&project)?, None).await?; + redis + .set( + PROJECTS_NAMESPACE, + project.inner.id.0, + serde_json::to_string(&project)?, + None, + ) + .await?; if let Some(slug) = &project.inner.slug { - redis.set(PROJECTS_SLUGS_NAMESPACE, - slug.to_lowercase(), project.inner.id.0, None).await?; + redis + .set( + PROJECTS_SLUGS_NAMESPACE, + slug.to_lowercase(), + project.inner.id.0, + None, + ) + .await?; } found_projects.push(project); } - } Ok(found_projects) @@ -664,7 +686,9 @@ impl Project { use futures::stream::TryStreamExt; - let dependencies = redis.get::(PROJECTS_DEPENDENCIES_NAMESPACE, id.0).await?; + let dependencies = redis + .get::(PROJECTS_DEPENDENCIES_NAMESPACE, id.0) + .await?; if let Some(dependencies) = dependencies.and_then(|x| serde_json::from_str::(&x).ok()) { @@ -698,7 +722,14 @@ impl Project { .try_collect::() .await?; - redis.set(PROJECTS_DEPENDENCIES_NAMESPACE, id.0, serde_json::to_string(&dependencies)?, None).await?; + redis + .set( + PROJECTS_DEPENDENCIES_NAMESPACE, + id.0, + serde_json::to_string(&dependencies)?, + None, + ) + .await?; Ok(dependencies) } @@ -760,7 +791,9 @@ impl Project { ) -> Result<(), DatabaseError> { redis.delete(PROJECTS_NAMESPACE, id.0).await?; if let Some(slug) = slug { - redis.delete(PROJECTS_SLUGS_NAMESPACE, slug.to_lowercase()).await?; + redis + .delete(PROJECTS_SLUGS_NAMESPACE, slug.to_lowercase()) + .await?; } if clear_dependencies.unwrap_or(false) { redis.delete(PROJECTS_DEPENDENCIES_NAMESPACE, id.0).await?; diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index 7b39602e..aeb2c849 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -146,14 +146,21 @@ impl Session { .collect::>(); session_ids.append( - &mut redis.multi_get::(SESSIONS_IDS_NAMESPACE, session_strings.iter().map(|x| x.to_string()).collect()).await? + &mut redis + .multi_get::( + SESSIONS_IDS_NAMESPACE, + session_strings.iter().map(|x| x.to_string()).collect(), + ) + .await? .into_iter() .flatten() .collect(), ); if !session_ids.is_empty() { - let sessions = redis.multi_get::(SESSIONS_NAMESPACE, session_ids).await?; + let sessions = redis + .multi_get::(SESSIONS_NAMESPACE, session_ids) + .await?; for session in sessions { if let Some(session) = session.and_then(|x| serde_json::from_str::(&x).ok()) @@ -205,8 +212,22 @@ impl Session { .await?; for session in db_sessions { - redis.set(SESSIONS_NAMESPACE, session.id.0, serde_json::to_string(&session)?, None).await?; - redis.set(SESSIONS_IDS_NAMESPACE, session.session.clone(), session.id.0, None).await?; + redis + .set( + SESSIONS_NAMESPACE, + session.id.0, + serde_json::to_string(&session)?, + None, + ) + .await?; + redis + .set( + SESSIONS_IDS_NAMESPACE, + session.session.clone(), + session.id.0, + None, + ) + .await?; found_sessions.push(session); } } @@ -222,7 +243,9 @@ impl Session { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let res = redis.get::(SESSIONS_USERS_NAMESPACE, user_id.0).await? + let res = redis + .get::(SESSIONS_USERS_NAMESPACE, user_id.0) + .await? .and_then(|x| serde_json::from_str::>(&x).ok()); if let Some(res) = res { @@ -244,7 +267,14 @@ impl Session { .try_collect::>() .await?; - redis.set(SESSIONS_USERS_NAMESPACE, user_id.0, serde_json::to_string(&db_sessions)?, None).await?; + redis + .set( + SESSIONS_USERS_NAMESPACE, + user_id.0, + serde_json::to_string(&db_sessions)?, + None, + ) + .await?; Ok(db_sessions) } diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index 7a69f760..5cda1f94 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -1,5 +1,5 @@ use super::ids::*; -use crate::{models::teams::Permissions, database::redis::RedisPool}; +use crate::{database::redis::RedisPool, models::teams::Permissions}; use itertools::Itertools; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; @@ -123,7 +123,9 @@ impl TeamMember { let mut found_teams = Vec::new(); - let teams = redis.multi_get::(TEAMS_NAMESPACE, team_ids_parsed.clone()).await?; + let teams = redis + .multi_get::(TEAMS_NAMESPACE, team_ids_parsed.clone()) + .await?; for team_raw in teams { if let Some(mut team) = team_raw @@ -171,7 +173,14 @@ impl TeamMember { for (id, members) in &teams.into_iter().group_by(|x| x.team_id) { let mut members = members.collect::>(); - redis.set(TEAMS_NAMESPACE, id.0, serde_json::to_string(&members)?, None).await?; + redis + .set( + TEAMS_NAMESPACE, + id.0, + serde_json::to_string(&members)?, + None, + ) + .await?; found_teams.append(&mut members); } } @@ -179,10 +188,7 @@ impl TeamMember { Ok(found_teams) } - pub async fn clear_cache( - id: TeamId, - redis: &RedisPool, - ) -> Result<(), super::DatabaseError> { + pub async fn clear_cache(id: TeamId, redis: &RedisPool) -> Result<(), super::DatabaseError> { redis.delete(TEAMS_NAMESPACE, id.0).await?; Ok(()) } diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index d807a7ac..42de0ea0 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -1,8 +1,8 @@ use super::ids::{ProjectId, UserId}; use super::CollectionId; use crate::database::models::DatabaseError; -use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::database::redis::RedisPool; +use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::users::{Badges, RecipientType, RecipientWallet}; use chrono::{DateTime, Utc}; use rust_decimal::Decimal; @@ -150,14 +150,24 @@ impl User { .collect::>(); user_ids.append( - &mut redis.multi_get::(USER_USERNAMES_NAMESPACE, users_strings.iter().map(|x| x.to_string().to_lowercase()).collect()).await? + &mut redis + .multi_get::( + USER_USERNAMES_NAMESPACE, + users_strings + .iter() + .map(|x| x.to_string().to_lowercase()) + .collect(), + ) + .await? .into_iter() .flatten() .collect(), ); if !user_ids.is_empty() { - let users = redis.multi_get::(USERS_NAMESPACE, user_ids).await?; + let users = redis + .multi_get::(USERS_NAMESPACE, user_ids) + .await?; for user in users { if let Some(user) = user.and_then(|x| serde_json::from_str::(&x).ok()) { remaining_strings.retain(|x| { @@ -226,8 +236,22 @@ impl User { .await?; for user in db_users { - redis.set(USERS_NAMESPACE, user.id.0, serde_json::to_string(&user)?, None).await?; - redis.set(USER_USERNAMES_NAMESPACE, user.username.to_lowercase(), user.id.0, None).await?; + redis + .set( + USERS_NAMESPACE, + user.id.0, + serde_json::to_string(&user)?, + None, + ) + .await?; + redis + .set( + USER_USERNAMES_NAMESPACE, + user.username.to_lowercase(), + user.id.0, + None, + ) + .await?; found_users.push(user); } } @@ -330,13 +354,13 @@ impl User { user_ids: &[(UserId, Option)], redis: &RedisPool, ) -> Result<(), DatabaseError> { - for (id, username) in user_ids { redis.delete(USERS_NAMESPACE, id.0).await?; if let Some(username) = username { - redis.delete(USER_USERNAMES_NAMESPACE, - username.to_lowercase()).await?; + redis + .delete(USER_USERNAMES_NAMESPACE, username.to_lowercase()) + .await?; } } diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index ab5bac5f..cf48dfc8 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -1,10 +1,10 @@ use super::ids::*; use super::DatabaseError; +use crate::database::redis::RedisPool; use crate::models::projects::{FileType, VersionStatus}; use chrono::{DateTime, Utc}; use itertools::Itertools; use serde::{Deserialize, Serialize}; -use crate::database::redis::RedisPool; use std::cmp::Ordering; use std::collections::HashMap; @@ -77,7 +77,7 @@ impl DependencyBuilder { } } -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct VersionFileBuilder { pub url: String, pub filename: String, @@ -129,7 +129,7 @@ impl VersionFileBuilder { } } -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct HashBuilder { pub algorithm: String, pub hash: Vec, @@ -423,10 +423,11 @@ impl Version { let mut version_ids_parsed: Vec = version_ids.iter().map(|x| x.0).collect(); - let mut found_versions = Vec::new(); - let versions = redis.multi_get::(VERSIONS_NAMESPACE, version_ids_parsed.clone()).await?; + let versions = redis + .multi_get::(VERSIONS_NAMESPACE, version_ids_parsed.clone()) + .await?; for version in versions { if let Some(version) = @@ -578,7 +579,14 @@ impl Version { .await?; for version in db_versions { - redis.set(VERSIONS_NAMESPACE, version.inner.id.0,serde_json::to_string(&version)?,None).await?; + redis + .set( + VERSIONS_NAMESPACE, + version.inner.id.0, + serde_json::to_string(&version)?, + None, + ) + .await?; found_versions.push(version); } @@ -622,13 +630,17 @@ impl Version { let mut file_ids_parsed = hashes.to_vec(); - let mut found_files = Vec::new(); - let files = redis.multi_get::(VERSION_FILES_NAMESPACE, file_ids_parsed - .iter() - .map(|hash| format!("{}_{}", algorithm, hash)) - .collect::>()).await?; + let files = redis + .multi_get::( + VERSION_FILES_NAMESPACE, + file_ids_parsed + .iter() + .map(|hash| format!("{}_{}", algorithm, hash)) + .collect::>(), + ) + .await?; for file in files { if let Some(mut file) = @@ -704,7 +716,14 @@ impl Version { } for (key, mut files) in save_files { - redis.set(VERSION_FILES_NAMESPACE, key, serde_json::to_string(&files)?, None).await?; + redis + .set( + VERSION_FILES_NAMESPACE, + key, + serde_json::to_string(&files)?, + None, + ) + .await?; found_files.append(&mut files); } @@ -717,12 +736,13 @@ impl Version { version: &QueryVersion, redis: &RedisPool, ) -> Result<(), DatabaseError> { - redis.delete(VERSIONS_NAMESPACE, version.inner.id.0).await?; for file in &version.files { for (algo, hash) in &file.hashes { - redis.delete(VERSION_FILES_NAMESPACE, format!("{}_{}", algo, hash)).await?; + redis + .delete(VERSION_FILES_NAMESPACE, format!("{}_{}", algo, hash)) + .await?; } } diff --git a/src/database/redis.rs b/src/database/redis.rs index 59c07786..941cdcd7 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -1,44 +1,50 @@ -use std::fmt::Display; -use deadpool_redis::{Runtime, Config}; -use redis::{cmd, ToRedisArgs, FromRedisValue}; use super::models::DatabaseError; +use deadpool_redis::{Config, Runtime}; +use redis::{cmd, FromRedisValue, ToRedisArgs}; +use std::fmt::Display; -const DEFAULT_EXPIRY : i64 = 1800; // 30 minutes +const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes #[derive(Clone)] pub struct RedisPool { - pool : deadpool_redis::Pool, - meta_namespace : String + pool: deadpool_redis::Pool, + meta_namespace: String, } impl RedisPool { - // initiate a new redis pool // testing pool uses a hashmap to mimic redis behaviour for very small data sizes (ie: tests) // PANICS: production pool will panic if redis url is not set - pub fn new(meta_namespace : Option) -> Self { + pub fn new(meta_namespace: Option) -> Self { let redis_pool = Config::from_url(dotenvy::var("REDIS_URL").expect("Redis URL not set")) - .builder() - .expect("Error building Redis pool") - .max_size( - dotenvy::var("DATABASE_MAX_CONNECTIONS") - .ok() - .and_then(|x| x.parse().ok()) - .unwrap_or(10000), - ) - .runtime(Runtime::Tokio1) - .build() - .expect("Redis connection failed"); + .builder() + .expect("Error building Redis pool") + .max_size( + dotenvy::var("DATABASE_MAX_CONNECTIONS") + .ok() + .and_then(|x| x.parse().ok()) + .unwrap_or(10000), + ) + .runtime(Runtime::Tokio1) + .build() + .expect("Redis connection failed"); RedisPool { - pool : redis_pool, - meta_namespace: meta_namespace.unwrap_or("".to_string()) + pool: redis_pool, + meta_namespace: meta_namespace.unwrap_or("".to_string()), } -} + } - pub async fn set(&self, namespace : &str, id : T1, data : T2, expiry : Option) -> Result<(), DatabaseError> - where T1 : Display, - T2 : ToRedisArgs + pub async fn set( + &self, + namespace: &str, + id: T1, + data: T2, + expiry: Option, + ) -> Result<(), DatabaseError> + where + T1: Display, + T2: ToRedisArgs, { let mut redis_connection = self.pool.get().await?; @@ -51,11 +57,12 @@ impl RedisPool { .await?; Ok(()) - } + } - pub async fn get(&self, namespace : &str, id : T1) -> Result, DatabaseError> - where T1 : Display, - R: FromRedisValue + pub async fn get(&self, namespace: &str, id: T1) -> Result, DatabaseError> + where + T1: Display, + R: FromRedisValue, { let mut redis_connection = self.pool.get().await?; @@ -66,15 +73,19 @@ impl RedisPool { Ok(res) } - pub async fn multi_get(&self, namespace : &str, ids : Vec) -> Result>, DatabaseError> - where T1 : Display, - R: FromRedisValue + pub async fn multi_get( + &self, + namespace: &str, + ids: Vec, + ) -> Result>, DatabaseError> + where + T1: Display, + R: FromRedisValue, { - let mut redis_connection = self.pool.get().await?; + let mut redis_connection = self.pool.get().await?; let res = cmd("MGET") .arg( - ids - .iter() + ids.iter() .map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x)) .collect::>(), ) @@ -83,8 +94,9 @@ impl RedisPool { Ok(res) } - pub async fn delete(&self, namespace : &str, id : T1) -> Result<(), DatabaseError> - where T1 : Display + pub async fn delete(&self, namespace: &str, id: T1) -> Result<(), DatabaseError> + where + T1: Display, { let mut redis_connection = self.pool.get().await?; @@ -95,6 +107,4 @@ impl RedisPool { Ok(()) } - } - diff --git a/src/lib.rs b/src/lib.rs index 8b9be2ee..26ef471c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,11 +1,12 @@ - - use std::sync::Arc; use actix_web::web; use database::redis::RedisPool; -use log::{warn, info}; -use queue::{session::AuthQueue, socket::ActiveSockets, payouts::PayoutsQueue, analytics::AnalyticsQueue, download::DownloadQueue}; +use log::{info, warn}; +use queue::{ + analytics::AnalyticsQueue, download::DownloadQueue, payouts::PayoutsQueue, session::AuthQueue, + socket::ActiveSockets, +}; use scheduler::Scheduler; use sqlx::Postgres; use tokio::sync::{Mutex, RwLock}; @@ -14,7 +15,9 @@ extern crate clickhouse as clickhouse_crate; use clickhouse_crate::Client; use util::cors::default_cors; -use crate::{queue::payouts::process_payout, search::indexing::index_projects, util::env::parse_var}; +use crate::{ + queue::payouts::process_payout, search::indexing::index_projects, util::env::parse_var, +}; pub mod auth; pub mod clickhouse; @@ -51,8 +54,13 @@ pub struct LabrinthConfig { pub active_sockets: web::Data>, } -pub fn app_setup(pool: sqlx::Pool, redis_pool: RedisPool, clickhouse : &mut Client, file_host : Arc, maxmind : Arc) -> LabrinthConfig { - +pub fn app_setup( + pool: sqlx::Pool, + redis_pool: RedisPool, + clickhouse: &mut Client, + file_host: Arc, + maxmind: Arc, +) -> LabrinthConfig { info!( "Starting Labrinth on {}", dotenvy::var("BIND_ADDR").unwrap() @@ -167,7 +175,6 @@ pub fn app_setup(pool: sqlx::Pool, redis_pool: RedisPool, clickhouse : } }); - let reader = maxmind.clone(); { let reader_ref = reader.clone(); @@ -250,31 +257,26 @@ pub fn app_setup(pool: sqlx::Pool, redis_pool: RedisPool, clickhouse : analytics_queue, active_sockets, } - } pub fn app_config(cfg: &mut web::ServiceConfig, labrinth_config: LabrinthConfig) { cfg.app_data( - web::FormConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), + web::FormConfig::default() + .error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()), ) .app_data( - web::PathConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), + web::PathConfig::default() + .error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()), ) .app_data( - web::QueryConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), + web::QueryConfig::default() + .error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()), ) .app_data( - web::JsonConfig::default().error_handler(|err, _req| { - routes::ApiError::Validation(err.to_string()).into() - }), + web::JsonConfig::default() + .error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()), ) -.app_data(web::Data::new(labrinth_config.redis_pool.clone())) + .app_data(web::Data::new(labrinth_config.redis_pool.clone())) .app_data(web::Data::new(labrinth_config.pool.clone())) .app_data(web::Data::new(labrinth_config.file_host.clone())) .app_data(web::Data::new(labrinth_config.search_config.clone())) diff --git a/src/main.rs b/src/main.rs index 65cb8bd7..f25f7c2a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,12 +1,12 @@ +use actix_web::{App, HttpServer}; +use env_logger::Env; +use labrinth::database::redis::RedisPool; use labrinth::file_hosting::S3Host; use labrinth::ratelimit::errors::ARError; use labrinth::ratelimit::memory::{MemoryStore, MemoryStoreActor}; use labrinth::ratelimit::middleware::RateLimiter; -use labrinth::{clickhouse, database, file_hosting, queue}; use labrinth::util::env::{parse_strings_from_var, parse_var}; -use labrinth::database::redis::RedisPool; -use actix_web::{App, HttpServer}; -use env_logger::Env; +use labrinth::{clickhouse, database, file_hosting, queue}; use log::{error, info, warn}; use std::sync::Arc; @@ -124,7 +124,7 @@ async fn main() -> std::io::Result<()> { .with_ignore_key(dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok()), ) .wrap(sentry_actix::Sentry::new()) - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())) + .configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())) }) .bind(dotenvy::var("BIND_ADDR").unwrap())? .run() diff --git a/src/models/pats.rs b/src/models/pats.rs index 6a906a76..e4e3122e 100644 --- a/src/models/pats.rs +++ b/src/models/pats.rs @@ -51,7 +51,7 @@ bitflags::bitflags! { const VERSION_READ = 1 << 15; // write to a version's data (metadata, files, etc) const VERSION_WRITE = 1 << 16; - // delete a project + // delete a version const VERSION_DELETE = 1 << 17; // create a report diff --git a/src/queue/payouts.rs b/src/queue/payouts.rs index d77d7fa4..73924e9d 100644 --- a/src/queue/payouts.rs +++ b/src/queue/payouts.rs @@ -1,6 +1,6 @@ -use crate::{models::projects::MonetizationStatus, database::redis::RedisPool}; use crate::routes::ApiError; use crate::util::env::parse_var; +use crate::{database::redis::RedisPool, models::projects::MonetizationStatus}; use base64::Engine; use chrono::{DateTime, Datelike, Duration, Utc, Weekday}; use rust_decimal::Decimal; diff --git a/src/queue/session.rs b/src/queue/session.rs index c5728cb9..bbc2896e 100644 --- a/src/queue/session.rs +++ b/src/queue/session.rs @@ -43,11 +43,7 @@ impl AuthQueue { std::mem::replace(&mut queue, HashSet::with_capacity(len)) } - pub async fn index( - &self, - pool: &PgPool, - redis: &RedisPool, - ) -> Result<(), DatabaseError> { + pub async fn index(&self, pool: &PgPool, redis: &RedisPool) -> Result<(), DatabaseError> { let session_queue = self.take_sessions().await; let pat_queue = self.take_pats().await; diff --git a/src/routes/analytics.rs b/src/routes/analytics.rs index 1272154e..5e06b4c5 100644 --- a/src/routes/analytics.rs +++ b/src/routes/analytics.rs @@ -1,17 +1,17 @@ use crate::auth::get_user_from_headers; +use crate::database::redis::RedisPool; use crate::models::analytics::{PageView, Playtime}; use crate::models::pats::Scopes; +use crate::queue::analytics::AnalyticsQueue; use crate::queue::maxmind::MaxMindIndexer; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::util::env::parse_strings_from_var; -use crate::queue::analytics::AnalyticsQueue; use actix_web::{post, web}; use actix_web::{HttpRequest, HttpResponse}; use chrono::Utc; use serde::Deserialize; use sqlx::PgPool; -use crate::database::redis::RedisPool; use std::collections::HashMap; use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr}; use std::sync::Arc; diff --git a/src/routes/maven.rs b/src/routes/maven.rs index f0cae083..e5641106 100644 --- a/src/routes/maven.rs +++ b/src/routes/maven.rs @@ -1,11 +1,11 @@ use crate::database::models::categories::Loader; use crate::database::models::project_item::QueryProject; use crate::database::models::version_item::{QueryFile, QueryVersion}; +use crate::database::redis::RedisPool; use crate::models::pats::Scopes; use crate::models::projects::{ProjectId, VersionId}; use crate::queue::session::AuthQueue; use crate::routes::ApiError; -use crate::database::redis::RedisPool; use crate::{ auth::{get_user_from_headers, is_authorized, is_authorized_version}, database, diff --git a/src/routes/v2/admin.rs b/src/routes/v2/admin.rs index b388cc77..be4db052 100644 --- a/src/routes/v2/admin.rs +++ b/src/routes/v2/admin.rs @@ -1,14 +1,14 @@ use crate::auth::validate::get_user_record_from_bearer_token; +use crate::database::redis::RedisPool; use crate::models::analytics::Download; use crate::models::ids::ProjectId; use crate::models::pats::Scopes; use crate::queue::analytics::AnalyticsQueue; +use crate::queue::download::DownloadQueue; use crate::queue::maxmind::MaxMindIndexer; use crate::queue::session::AuthQueue; use crate::routes::ApiError; -use crate::database::redis::RedisPool; use crate::util::guards::admin_key_guard; -use crate::queue::download::DownloadQueue; use actix_web::{patch, web, HttpRequest, HttpResponse}; use chrono::Utc; use serde::Deserialize; diff --git a/src/routes/v2/analytics_get.rs b/src/routes/v2/analytics_get.rs index 6b758c2a..92558cab 100644 --- a/src/routes/v2/analytics_get.rs +++ b/src/routes/v2/analytics_get.rs @@ -1,8 +1,3 @@ -use actix_web::{get, web, HttpRequest, HttpResponse}; -use chrono::{Duration, NaiveDate, Utc}; -use serde::{Deserialize, Serialize}; -use sqlx::PgPool; -use std::collections::HashMap; use crate::database::redis::RedisPool; use crate::{ auth::{filter_authorized_projects, filter_authorized_versions, get_user_from_headers}, @@ -16,6 +11,11 @@ use crate::{ }, queue::session::AuthQueue, }; +use actix_web::{get, web, HttpRequest, HttpResponse}; +use chrono::{Duration, NaiveDate, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use std::collections::HashMap; use super::ApiError; diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs index d957fab8..b76b09ad 100644 --- a/src/routes/v2/collections.rs +++ b/src/routes/v2/collections.rs @@ -2,13 +2,13 @@ use crate::auth::checks::{filter_authorized_collections, is_authorized_collectio use crate::auth::get_user_from_headers; use crate::database; use crate::database::models::{collection_item, generate_collection_id, project_item}; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::collections::{Collection, CollectionStatus}; use crate::models::ids::base62_impl::parse_base62; use crate::models::ids::{CollectionId, ProjectId}; use crate::models::pats::Scopes; use crate::queue::session::AuthQueue; -use crate::database::redis::RedisPool; use crate::routes::ApiError; use crate::util::routes::read_from_payload; use crate::util::validate::validation_errors_to_string; diff --git a/src/routes/v2/images.rs b/src/routes/v2/images.rs index abd57a31..0d1eecbb 100644 --- a/src/routes/v2/images.rs +++ b/src/routes/v2/images.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use crate::auth::{get_user_from_headers, is_authorized, is_authorized_version}; use crate::database; use crate::database::models::{project_item, report_item, thread_item, version_item}; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::ids::{ThreadMessageId, VersionId}; use crate::models::images::{Image, ImageContext}; @@ -12,7 +13,6 @@ use crate::routes::v2::threads::is_authorized_thread; use crate::routes::ApiError; use crate::util::routes::read_from_payload; use actix_web::{post, web, HttpRequest, HttpResponse}; -use crate::database::redis::RedisPool; use serde::{Deserialize, Serialize}; use sqlx::PgPool; diff --git a/src/routes/v2/notifications.rs b/src/routes/v2/notifications.rs index 376f2859..8923de57 100644 --- a/src/routes/v2/notifications.rs +++ b/src/routes/v2/notifications.rs @@ -1,12 +1,12 @@ use crate::auth::get_user_from_headers; use crate::database; +use crate::database::redis::RedisPool; use crate::models::ids::NotificationId; use crate::models::notifications::Notification; use crate::models::pats::Scopes; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse}; -use crate::database::redis::RedisPool; use serde::{Deserialize, Serialize}; use sqlx::PgPool; diff --git a/src/routes/v2/project_creation.rs b/src/routes/v2/project_creation.rs index e0685293..c1eda493 100644 --- a/src/routes/v2/project_creation.rs +++ b/src/routes/v2/project_creation.rs @@ -2,6 +2,7 @@ use super::version_creation::InitialVersionData; use crate::auth::{get_user_from_headers, AuthenticationError}; use crate::database::models::thread_item::ThreadBuilder; use crate::database::models::{self, image_item}; +use crate::database::redis::RedisPool; use crate::file_hosting::{FileHost, FileHostingError}; use crate::models::error::ApiError; use crate::models::ids::ImageId; @@ -28,7 +29,6 @@ use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; use sqlx::postgres::PgPool; use std::sync::Arc; -use crate::database::redis::RedisPool; use thiserror::Error; use validator::Validate; diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 9a912717..aa303735 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -3,10 +3,10 @@ use crate::database; use crate::database::models::image_item; use crate::database::models::notification_item::NotificationBuilder; use crate::database::models::thread_item::ThreadMessageBuilder; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models; use crate::models::ids::base62_impl::parse_base62; -use crate::database::redis::RedisPool; use crate::models::images::ImageContext; use crate::models::notifications::NotificationBody; use crate::models::pats::Scopes; diff --git a/src/routes/v2/reports.rs b/src/routes/v2/reports.rs index fa9673e0..f336ef5f 100644 --- a/src/routes/v2/reports.rs +++ b/src/routes/v2/reports.rs @@ -2,6 +2,7 @@ use crate::auth::{check_is_moderator_from_headers, get_user_from_headers}; use crate::database; use crate::database::models::image_item; use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder}; +use crate::database::redis::RedisPool; use crate::models::ids::ImageId; use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId}; use crate::models::images::{Image, ImageContext}; @@ -10,7 +11,6 @@ use crate::models::reports::{ItemType, Report}; use crate::models::threads::{MessageBody, ThreadType}; use crate::queue::session::AuthQueue; use crate::routes::ApiError; -use crate::database::redis::RedisPool; use crate::util::img; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use chrono::Utc; diff --git a/src/routes/v2/tags.rs b/src/routes/v2/tags.rs index 1e1e1265..56ffaac5 100644 --- a/src/routes/v2/tags.rs +++ b/src/routes/v2/tags.rs @@ -1,9 +1,9 @@ use super::ApiError; use crate::database::models; use crate::database::models::categories::{DonationPlatform, ProjectType, ReportType, SideType}; +use crate::database::redis::RedisPool; use actix_web::{get, web, HttpResponse}; use chrono::{DateTime, Utc}; -use crate::database::redis::RedisPool; use models::categories::{Category, GameVersion, Loader}; use sqlx::PgPool; diff --git a/src/routes/v2/teams.rs b/src/routes/v2/teams.rs index 0dd234df..6ba7b519 100644 --- a/src/routes/v2/teams.rs +++ b/src/routes/v2/teams.rs @@ -1,6 +1,7 @@ use crate::auth::{get_user_from_headers, is_authorized}; use crate::database::models::notification_item::NotificationBuilder; use crate::database::models::TeamMember; +use crate::database::redis::RedisPool; use crate::models::ids::ProjectId; use crate::models::notifications::NotificationBody; use crate::models::pats::Scopes; @@ -11,7 +12,6 @@ use crate::routes::ApiError; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; -use crate::database::redis::RedisPool; use sqlx::PgPool; pub fn config(cfg: &mut web::ServiceConfig) { diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index a1990101..2aa68617 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -5,11 +5,11 @@ use crate::database; use crate::database::models::image_item; use crate::database::models::notification_item::NotificationBuilder; use crate::database::models::thread_item::ThreadMessageBuilder; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::ids::ThreadMessageId; use crate::models::images::{Image, ImageContext}; use crate::models::notifications::NotificationBody; -use crate::database::redis::RedisPool; use crate::models::pats::Scopes; use crate::models::projects::ProjectStatus; use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType}; diff --git a/src/routes/v2/users.rs b/src/routes/v2/users.rs index 8110cd18..bda564a7 100644 --- a/src/routes/v2/users.rs +++ b/src/routes/v2/users.rs @@ -1,12 +1,12 @@ use crate::auth::{get_user_from_headers, AuthenticationError}; use crate::database::models::User; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::collections::{Collection, CollectionStatus}; use crate::models::notifications::Notification; use crate::models::pats::Scopes; use crate::models::projects::Project; use crate::models::users::{Badges, RecipientType, RecipientWallet, Role, UserId}; -use crate::database::redis::RedisPool; use crate::queue::payouts::{PayoutAmount, PayoutItem, PayoutsQueue}; use crate::queue::session::AuthQueue; use crate::routes::ApiError; diff --git a/src/routes/v2/version_creation.rs b/src/routes/v2/version_creation.rs index dc6af30b..44705b16 100644 --- a/src/routes/v2/version_creation.rs +++ b/src/routes/v2/version_creation.rs @@ -697,6 +697,9 @@ async fn upload_file_to_version_inner( } } + // Clear version cache + models::Version::clear_cache(&version, &redis).await?; + Ok(HttpResponse::NoContent().body("")) } diff --git a/src/routes/v2/version_file.rs b/src/routes/v2/version_file.rs index 63a3575b..9fd26ece 100644 --- a/src/routes/v2/version_file.rs +++ b/src/routes/v2/version_file.rs @@ -3,10 +3,10 @@ use crate::auth::{ filter_authorized_projects, filter_authorized_versions, get_user_from_headers, is_authorized_version, }; +use crate::database::redis::RedisPool; use crate::models::ids::VersionId; use crate::models::pats::Scopes; use crate::models::projects::VersionType; -use crate::database::redis::RedisPool; use crate::models::teams::Permissions; use crate::queue::session::AuthQueue; use crate::{database, models}; @@ -22,7 +22,8 @@ pub fn config(cfg: &mut web::ServiceConfig) { .service(delete_file) .service(get_version_from_hash) .service(download_version) - .service(get_update_from_hash), + .service(get_update_from_hash) + .service(get_projects_from_hashes), // TODO: confirm this should be added ); cfg.service( @@ -325,6 +326,7 @@ pub async fn get_update_from_hash( // Requests above with multiple versions below #[derive(Deserialize)] pub struct FileHashes { + #[serde(default = "default_algorithm")] pub algorithm: String, pub hashes: Vec, } @@ -391,7 +393,7 @@ pub async fn get_projects_from_hashes( &**pool, &redis, &session_queue, - Some(&[Scopes::VERSION_READ]), + Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]), ) .await .map(|x| x.1) @@ -429,6 +431,7 @@ pub async fn get_projects_from_hashes( #[derive(Deserialize)] pub struct ManyUpdateData { + #[serde(default = "default_algorithm")] pub algorithm: String, pub hashes: Vec, pub loaders: Option>, @@ -532,6 +535,7 @@ pub struct FileUpdateData { #[derive(Deserialize)] pub struct ManyFileUpdateData { + #[serde(default = "default_algorithm")] pub algorithm: String, pub hashes: Vec, } diff --git a/src/routes/v2/versions.rs b/src/routes/v2/versions.rs index ada51517..7a6759e7 100644 --- a/src/routes/v2/versions.rs +++ b/src/routes/v2/versions.rs @@ -3,8 +3,8 @@ use crate::auth::{ filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version, }; use crate::database; -use crate::database::redis::RedisPool; use crate::database::models::image_item; +use crate::database::redis::RedisPool; use crate::models; use crate::models::ids::base62_impl::parse_base62; use crate::models::images::ImageContext; diff --git a/src/util/img.rs b/src/util/img.rs index b8db684c..54fe3604 100644 --- a/src/util/img.rs +++ b/src/util/img.rs @@ -1,11 +1,11 @@ -use color_thief::ColorFormat; -use image::imageops::FilterType; -use image::{EncodableLayout, ImageError}; -use crate::database::redis::RedisPool; use crate::database; use crate::database::models::image_item; +use crate::database::redis::RedisPool; use crate::models::images::ImageContext; use crate::routes::ApiError; +use color_thief::ColorFormat; +use image::imageops::FilterType; +use image::{EncodableLayout, ImageError}; pub fn get_color_from_img(data: &[u8]) -> Result, ImageError> { let image = image::load_from_memory(data)? diff --git a/src/util/webhook.rs b/src/util/webhook.rs index 84e46242..8b5b5a65 100644 --- a/src/util/webhook.rs +++ b/src/util/webhook.rs @@ -1,9 +1,9 @@ use crate::database::models::categories::GameVersion; +use crate::database::redis::RedisPool; use crate::models::projects::ProjectId; use crate::routes::ApiError; use chrono::{DateTime, Utc}; use serde::Serialize; -use crate::database::redis::RedisPool; use sqlx::PgPool; use std::usize; diff --git a/tests/common/actix.rs b/tests/common/actix.rs index df6d222e..35bf6483 100644 --- a/tests/common/actix.rs +++ b/tests/common/actix.rs @@ -1,18 +1,18 @@ use actix_web::test::TestRequest; -use bytes::{BytesMut, Bytes}; +use bytes::{Bytes, BytesMut}; #[derive(Debug, Clone)] pub struct MultipartSegment { - pub name : String, - pub filename : Option, - pub content_type : Option, - pub data : MultipartSegmentData + pub name: String, + pub filename: Option, + pub content_type: Option, + pub data: MultipartSegmentData, } #[derive(Debug, Clone)] pub enum MultipartSegmentData { Text(String), - Binary(Vec) + Binary(Vec), } pub trait AppendsMultipart { @@ -22,7 +22,10 @@ pub trait AppendsMultipart { impl AppendsMultipart for TestRequest { fn set_multipart(self, data: Vec) -> Self { let (boundary, payload) = generate_multipart(data); - self.append_header(("Content-Type", format!("multipart/form-data; boundary={}", boundary))) + self.append_header(( + "Content-Type", + format!("multipart/form-data; boundary={}", boundary), + )) .set_payload(payload) } } @@ -36,24 +39,35 @@ fn generate_multipart(data: Vec) -> (String, Bytes) { let mut payload = BytesMut::new(); for segment in data { - payload.extend_from_slice(format!( - "--{boundary}\r\nContent-Disposition: form-data; name=\"{name}\"", - boundary = boundary, - name = segment.name - ).as_bytes()); + payload.extend_from_slice( + format!( + "--{boundary}\r\nContent-Disposition: form-data; name=\"{name}\"", + boundary = boundary, + name = segment.name + ) + .as_bytes(), + ); if let Some(filename) = &segment.filename { - payload.extend_from_slice(format!("; filename=\"{filename}\"", filename = filename).as_bytes()); + payload.extend_from_slice( + format!("; filename=\"{filename}\"", filename = filename).as_bytes(), + ); } if let Some(content_type) = &segment.content_type { - payload.extend_from_slice(format!("\r\nContent-Type: {content_type}", content_type = content_type).as_bytes()); + payload.extend_from_slice( + format!( + "\r\nContent-Type: {content_type}", + content_type = content_type + ) + .as_bytes(), + ); } payload.extend_from_slice(b"\r\n\r\n"); match &segment.data { MultipartSegmentData::Text(text) => { payload.extend_from_slice(text.as_bytes()); - }, + } MultipartSegmentData::Binary(binary) => { payload.extend_from_slice(binary); } @@ -61,6 +75,6 @@ fn generate_multipart(data: Vec) -> (String, Bytes) { payload.extend_from_slice(b"\r\n"); } payload.extend_from_slice(format!("--{boundary}--\r\n", boundary = boundary).as_bytes()); - + (boundary, Bytes::from(payload)) } diff --git a/tests/common/database.rs b/tests/common/database.rs index 240dc0d9..43cee67f 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -1,8 +1,7 @@ - use labrinth::database::redis::RedisPool; -use sqlx::{PgPool, postgres::PgPoolOptions, Executor}; -use url::Url; +use sqlx::{postgres::PgPoolOptions, Executor, PgPool}; use std::time::Duration; +use url::Url; pub const ADMIN_USER_ID: i64 = 1; pub const MOD_USER_ID: i64 = 2; @@ -12,19 +11,19 @@ pub const ENEMY_USER_ID: i64 = 5; pub struct TemporaryDatabase { pub pool: PgPool, - pub redis_pool : RedisPool, + pub redis_pool: RedisPool, pub database_name: String, } impl TemporaryDatabase { - pub async fn create() -> Self { let temp_database_name = generate_random_database_name(); println!("Creating temporary database: {}", &temp_database_name); let database_url = dotenvy::var("DATABASE_URL").expect("No database URL"); let mut url = Url::parse(&database_url).expect("Invalid database URL"); - let pool = PgPool::connect(&database_url).await + let pool = PgPool::connect(&database_url) + .await .expect("Connection to database failed"); // Create the temporary database @@ -37,15 +36,16 @@ impl TemporaryDatabase { pool.close().await; - // Modify the URL to switch to the temporary database url.set_path(&format!("/{}", &temp_database_name)); let temp_db_url = url.to_string(); let pool = PgPoolOptions::new() - .min_connections(0) - .max_connections(4) - .max_lifetime(Some(Duration::from_secs(60 * 60))).connect(&temp_db_url).await + .min_connections(0) + .max_connections(4) + .max_lifetime(Some(Duration::from_secs(60 * 60))) + .connect(&temp_db_url) + .await .expect("Connection to temporary database failed"); // Performs migrations @@ -58,7 +58,7 @@ impl TemporaryDatabase { Self { pool, database_name: temp_database_name, - redis_pool + redis_pool, } } @@ -82,7 +82,10 @@ impl TemporaryDatabase { "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE datname = '{}' AND pid <> pg_backend_pid()", &self.database_name ); - sqlx::query(&terminate_query).execute(&self.pool).await.unwrap(); + sqlx::query(&terminate_query) + .execute(&self.pool) + .await + .unwrap(); // Execute the deletion query asynchronously let drop_db_query = format!("DROP DATABASE IF EXISTS {}", &self.database_name); @@ -92,7 +95,7 @@ impl TemporaryDatabase { .expect("Database deletion failed"); } - /* + /* Adds the following dummy data to the database: - 5 users (admin, mod, user, friend, enemy) - Admin and mod have special powers, the others do not @@ -108,12 +111,12 @@ impl TemporaryDatabase { */ pub async fn add_dummy_data(&self) { let pool = &self.pool.clone(); - pool.execute(include_str!("../files/dummy_data.sql")).await.unwrap(); + pool.execute(include_str!("../files/dummy_data.sql")) + .await + .unwrap(); } - } - fn generate_random_database_name() -> String { // Generate a random database name here // You can use your logic to create a unique name @@ -126,4 +129,3 @@ fn generate_random_database_name() -> String { database_name.push_str(&rand::random::().to_string()[..6]); database_name } - diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 4b1e24f1..186f8f46 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1,16 +1,17 @@ - -use std::sync::Arc; -use labrinth::{LabrinthConfig, - util::env::{parse_strings_from_var, parse_var}, - file_hosting, queue}; use labrinth::clickhouse; +use labrinth::{ + file_hosting, queue, + util::env::{parse_strings_from_var, parse_var}, + LabrinthConfig, +}; +use std::sync::Arc; use self::database::TemporaryDatabase; pub mod actix; pub mod database; -pub async fn setup(db : &TemporaryDatabase) -> LabrinthConfig { +pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { println!("Setting up labrinth config"); dotenvy::dotenv().ok(); @@ -21,7 +22,8 @@ pub async fn setup(db : &TemporaryDatabase) -> LabrinthConfig { let pool = db.pool.clone(); let redis_pool = db.redis_pool.clone(); - let file_host: Arc = Arc::new(file_hosting::MockHost::new()); + let file_host: Arc = + Arc::new(file_hosting::MockHost::new()); let mut clickhouse = clickhouse::init_client().await.unwrap(); let maxmind_reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap()); @@ -68,7 +70,9 @@ fn check_test_vars() -> bool { } if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { - println!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings"); + println!( + "Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings" + ); failed |= true; } diff --git a/tests/files/200x200.png b/tests/files/200x200.png new file mode 100644 index 0000000000000000000000000000000000000000..bb923179eab68f32da03f19083b24ee0eab77799 GIT binary patch literal 606 zcmeAS@N?(olHy`uVBq!ia0vp^CqS5k2}mkgS)K$^jKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sBugD~Uq{1quc4WB(-978JN-d-|fWMJSqyx^z#-IXWUrFJ+v z=m;fE6zFb=P&uNcD8{+bqr=r{RA}gj1V;}4p;K)0E?b@d1WXeQp00i_>zopr0NRGS AsQ>@~ literal 0 HcmV?d00001 diff --git a/tests/files/dummy_data.sql b/tests/files/dummy_data.sql index e565c383..91ec7ad0 100644 --- a/tests/files/dummy_data.sql +++ b/tests/files/dummy_data.sql @@ -43,10 +43,12 @@ INSERT INTO teams (id) VALUES (100); -- ID: 100, 1c INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (200, 100, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); -- ID: 1000, G8 +-- Approved, viewable INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) -VALUES (1000, 100, 'Test Mod', 'Test mod description', 'Test mod body', timezone('utc', now()), 0, 'processing', 'approved', 1, 2, 'MIT', 'testslug', 1, 'monetized'); +VALUES (1000, 100, 'Test Mod', 'Test mod description', 'Test mod body', timezone('utc', now()), 0, 'approved', 'approved', 1, 2, 'MIT', 'testslug', 1, 'monetized'); -- ID: 1100, Hk +-- Listed, viewable INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) VALUES (1100, 1000, 3, 'v1', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'listed'); @@ -55,8 +57,8 @@ INSERT INTO game_versions_versions (game_version_id, joining_version_id) VALUES -- not real hash or file INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) -VALUES (800, 1100, 'http://www.url.to/myfile.jar', 'myfile.jar', true, 1, 'jar'); -INSERT INTO hashes (file_id, algorithm, hash) VALUES (800, 'sha1', '10101010'); +VALUES (800, 1100, 'http://www.url.to/myfile.jar', 'myfile.jar', true, 1, 'required-resource-pack'); +INSERT INTO hashes (file_id, algorithm, hash) VALUES (800, 'sha1', '000000000'); INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (30, 'project', 1000, null); @@ -65,19 +67,21 @@ INSERT INTO teams (id) VALUES (101); -- ID: 101, 1d INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (201, 101, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); -- ID: 1001, G9 +-- Processing, and therefore not viewable INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) VALUES (1001, 101, 'Test Mod 2', 'Test mod description 2', 'Test mod body 2', timezone('utc', now()), 0, 'processing', 'approved', 1, 2, 'MIT', 'testslug2', 1, 'monetized'); --- ID: 1100, Hl +-- ID: 1101, Hl +-- Draft, and therefore not viewable INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) -VALUES (1101, 1001, 3, 'v1.0', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'listed'); +VALUES (1101, 1001, 3, 'v1.0', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'draft'); INSERT INTO loaders_versions (loader_id, version_id) VALUES (1, 1101); INSERT INTO game_versions_versions (game_version_id, joining_version_id) VALUES (20000, 1101); -- not real hash or file INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) -VALUES (801, 1101, 'http://www.url.to/myfile2.jar', 'myfile2.jar', true, 1, 'jar'); -INSERT INTO hashes (file_id, algorithm, hash) VALUES (801, 'sha1', '101010101'); +VALUES (801, 1101, 'http://www.url.to/myfile2.jar', 'myfile2.jar', true, 1, 'required-resource-pack'); +INSERT INTO hashes (file_id, algorithm, hash) VALUES (801, 'sha1', '111111111'); INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (31, 'project', 1001, null); \ No newline at end of file diff --git a/tests/files/simple-zip.zip b/tests/files/simple-zip.zip new file mode 100644 index 0000000000000000000000000000000000000000..20bf64b854049a3228250619e57a038d2b2ddecc GIT binary patch literal 271 zcmWIWW@Zs#U|`^2(5-h0@96!{KOM-6V`5<72GT|b1{FZ8SCE(fn&(6D^QV&@B_$+0 z`1+-Qlj)MW!;EH553R_^c`QLaM;+h!@w`5Fat}*U#3k8USxE`AZ^i#TQ&>M)a@#0% zOG^CUYc_cN@sFsc1Mhs>SQ%%nNB824Qc@BgSnheS{O8@N>Nl2_Sy;W`Q1umgk=Geu zxoC;Tv7HweAHJFylJSDO@W}^xtD|P`H(mYdf57D6gO{(KZ#t*Bkdq<6n~_O`0k;o; bo&W*`Fp1)y0B=?{kSHS%S_0_`YvK literal 0 HcmV?d00001 diff --git a/tests/pats.rs b/tests/pats.rs index 0876212d..9f45a2c7 100644 --- a/tests/pats.rs +++ b/tests/pats.rs @@ -1,10 +1,21 @@ -use actix_web::{App, test::{self, TestRequest}, dev::ServiceResponse}; -use chrono::Utc; -use common::{database::TemporaryDatabase, actix::AppendsMultipart}; -use labrinth::{models::pats::Scopes, database::{self, models::generate_pat_id}}; +use actix_web::{ + dev::ServiceResponse, + test::{self, TestRequest}, + App, +}; +use bytes::Bytes; +use chrono::{Duration, Utc}; +use common::{actix::AppendsMultipart, database::TemporaryDatabase}; +use labrinth::{ + database::{self, models::generate_pat_id}, + models::pats::Scopes, +}; use serde_json::json; -use crate::common::{setup, database::{USER_USER_ID, ENEMY_USER_ID, ADMIN_USER_ID, FRIEND_USER_ID, MOD_USER_ID}}; +use crate::common::{ + database::{ADMIN_USER_ID, ENEMY_USER_ID, FRIEND_USER_ID, MOD_USER_ID, USER_USER_ID}, + setup, +}; // importing common module. mod common; @@ -21,39 +32,56 @@ async fn test_user_scopes() { // Test setup and dummy data let db = TemporaryDatabase::create_with_dummy().await; let labrinth_config = setup(&db).await; - let app = App::new() - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; // User reading println!("Testing user reading..."); let read_user = Scopes::USER_READ; - let request_generator = || { - test::TestRequest::get() - .uri("/v2/user") - }; - let read_user = test_scope(&test_app, &db, request_generator, all_scopes_except(read_user), read_user, USER_USER_ID).await; + let request_generator = || test::TestRequest::get().uri("/v2/user"); + let (_, read_user) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_user), + read_user, + USER_USER_ID, + 401, + ) + .await; assert!(read_user["email"].as_str().is_none()); // email should not be present assert!(read_user["payout_data"].as_object().is_none()); // payout should not be present // Email reading println!("Testing email reading..."); let read_email = Scopes::USER_READ | Scopes::USER_READ_EMAIL; - let request_generator = || { - test::TestRequest::get() - .uri("/v2/user") - }; - let read_email_test = test_scope(&test_app, &db, request_generator, all_scopes_except(read_email), read_email, USER_USER_ID).await; + let request_generator = || test::TestRequest::get().uri("/v2/user"); + let (_, read_email_test) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_email), + read_email, + USER_USER_ID, + 401, + ) + .await; assert_eq!(read_email_test["email"], json!("user@modrinth.com")); // email should be present // Payout reading println!("Testing payout reading..."); let read_payout = Scopes::USER_READ | Scopes::PAYOUTS_READ; - let request_generator = || { - test::TestRequest::get() - .uri("/v2/user") - }; - let read_payout_test = test_scope(&test_app, &db, request_generator, all_scopes_except(read_payout), read_payout, USER_USER_ID).await; + let request_generator = || test::TestRequest::get().uri("/v2/user"); + let (_, read_payout_test) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_payout), + read_payout, + USER_USER_ID, + 401, + ) + .await; assert!(read_payout_test["payout_data"].as_object().is_some()); // payout should be present // User writing @@ -62,18 +90,27 @@ async fn test_user_scopes() { let write_user = Scopes::USER_WRITE; let request_generator = || { test::TestRequest::patch() - .uri("/v2/user/user") - .set_json(json!( { - // Do not include 'username', as to not change the rest of the tests - "name": "NewName", - "bio": "New bio", - "location": "New location", - "role": "admin", - "badges": 5, - // Do not include payout info, different scope - })) + .uri("/v2/user/user") + .set_json(json!( { + // Do not include 'username', as to not change the rest of the tests + "name": "NewName", + "bio": "New bio", + "location": "New location", + "role": "admin", + "badges": 5, + // Do not include payout info, different scope + })) }; - test_scope(&test_app, &db, request_generator, all_scopes_except(write_user), write_user, ADMIN_USER_ID).await; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_user), + write_user, + ADMIN_USER_ID, + 401, + ) + .await; // User payout info writing println!("Testing user payout info writing..."); @@ -81,26 +118,41 @@ async fn test_user_scopes() { let write_user_payout = Scopes::USER_WRITE | Scopes::PAYOUTS_WRITE; let request_generator = || { test::TestRequest::patch() - .uri("/v2/user/user") - .set_json(json!( { - "payout_data": { - "payout_wallet": "paypal", - "payout_wallet_type": "email", - "payout_address": "test@modrinth.com" - } - })) + .uri("/v2/user/user") + .set_json(json!( { + "payout_data": { + "payout_wallet": "paypal", + "payout_wallet_type": "email", + "payout_address": "test@modrinth.com" + } + })) }; - test_scope(&test_app, &db, request_generator, failure_write_user_payout, write_user_payout, USER_USER_ID).await; + test_scope( + &test_app, + &db, + request_generator, + failure_write_user_payout, + write_user_payout, + USER_USER_ID, + 401, + ) + .await; // User deletion // (The failure is first, and this is the last test for this test function, we can delete it and use the same PAT for both tests) println!("Testing user deletion..."); let delete_user = Scopes::USER_DELETE; - let request_generator = || { - test::TestRequest::delete() - .uri("/v2/user/enemy") - }; - test_scope(&test_app, &db, request_generator, all_scopes_except(delete_user), delete_user, ENEMY_USER_ID).await; + let request_generator = || test::TestRequest::delete().uri("/v2/user/enemy"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(delete_user), + delete_user, + ENEMY_USER_ID, + 401, + ) + .await; // Cleanup test db db.cleanup().await; @@ -111,8 +163,7 @@ async fn test_user_scopes() { pub async fn test_notifications_scopes() { let db = TemporaryDatabase::create_with_dummy().await; let labrinth_config = setup(&db).await; - let app = App::new() - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; // We will invite user 'friend' to project team, and use that as a notification @@ -130,88 +181,137 @@ pub async fn test_notifications_scopes() { // Notification get println!("Testing getting notifications..."); let read_notifications = Scopes::NOTIFICATION_READ; - let request_generator = || { - test::TestRequest::get() - .uri("/v2/user/4/notifications") - }; - let notifications = test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, FRIEND_USER_ID).await; + let request_generator = || test::TestRequest::get().uri("/v2/user/4/notifications"); + let (_, notifications) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_notifications), + read_notifications, + FRIEND_USER_ID, + 401, + ) + .await; let notification_id = notifications.as_array().unwrap()[0]["id"].as_str().unwrap(); let request_generator = || { - test::TestRequest::get() - .uri(&format!("/v2/notifications?ids=[{uri}]", uri=urlencoding::encode(&format!("\"{notification_id}\"")))) + test::TestRequest::get().uri(&format!( + "/v2/notifications?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{notification_id}\"")) + )) }; - test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, FRIEND_USER_ID).await; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_notifications), + read_notifications, + FRIEND_USER_ID, + 401, + ) + .await; - let request_generator = || { - test::TestRequest::get() - .uri(&format!("/v2/notification/{notification_id}")) - }; - test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, FRIEND_USER_ID).await; + let request_generator = + || test::TestRequest::get().uri(&format!("/v2/notification/{notification_id}")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_notifications), + read_notifications, + FRIEND_USER_ID, + 401, + ) + .await; // Notification mark as read println!("Testing marking notifications as read..."); - let write_notifications = Scopes::NOTIFICATION_WRITE; let request_generator = || { - test::TestRequest::patch() - .uri(&format!("/v2/notifications?ids=[{uri}]", uri=urlencoding::encode(&format!("\"{notification_id}\"")))) - }; - test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, FRIEND_USER_ID).await; - let request_generator = || { - test::TestRequest::patch() - .uri(&format!("/v2/notification/{notification_id}")) + test::TestRequest::patch().uri(&format!( + "/v2/notifications?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{notification_id}\"")) + )) }; - test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, FRIEND_USER_ID).await; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_notifications), + write_notifications, + FRIEND_USER_ID, + 401, + ) + .await; + let request_generator = + || test::TestRequest::patch().uri(&format!("/v2/notification/{notification_id}")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_notifications), + write_notifications, + FRIEND_USER_ID, + 401, + ) + .await; // Notification delete println!("Testing deleting notifications..."); - let request_generator = || { - test::TestRequest::delete() - .uri(&format!("/v2/notification/{notification_id}")) - }; - test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, FRIEND_USER_ID).await; + let request_generator = + || test::TestRequest::delete().uri(&format!("/v2/notification/{notification_id}")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_notifications), + write_notifications, + FRIEND_USER_ID, + 401, + ) + .await; // Mass notification delete // We invite mod, get the notification ID, and do mass delete using that let req = test::TestRequest::post() - .uri("/v2/team/1c/members") - .append_header(("Authorization", "mrp_patuser")) - .set_json(json!( { - "user_id": "2" // mod - })) - .to_request(); + .uri("/v2/team/1c/members") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!( { + "user_id": "2" // mod + })) + .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 204); let read_notifications = Scopes::NOTIFICATION_READ; - let request_generator = || { - test::TestRequest::get() - .uri("/v2/user/2/notifications") - }; - let notifications = test_scope(&test_app, &db, request_generator, all_scopes_except(read_notifications), read_notifications, MOD_USER_ID).await; + let request_generator = || test::TestRequest::get().uri("/v2/user/2/notifications"); + let (_, notifications) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_notifications), + read_notifications, + MOD_USER_ID, + 401, + ) + .await; let notification_id = notifications.as_array().unwrap()[0]["id"].as_str().unwrap(); - + let request_generator = || { - test::TestRequest::delete() - .uri(&format!("/v2/notifications?ids=[{uri}]", uri=urlencoding::encode(&format!("\"{notification_id}\"")))) + test::TestRequest::delete().uri(&format!( + "/v2/notifications?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{notification_id}\"")) + )) }; - test_scope(&test_app, &db, request_generator, all_scopes_except(write_notifications), write_notifications, MOD_USER_ID).await; - - // Cleanup test db - db.cleanup().await; -} - - -// User authentication -#[actix_rt::test] -pub async fn test_user_auth() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new() - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; - - // TODO: Test user auth scopes + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_notifications), + write_notifications, + MOD_USER_ID, + 401, + ) + .await; // Cleanup test db db.cleanup().await; @@ -219,11 +319,10 @@ pub async fn test_user_auth() { // Project version creation scopes #[actix_rt::test] -pub async fn test_project_version_create() { +pub async fn test_project_version_create_scopes() { let db = TemporaryDatabase::create_with_dummy().await; let labrinth_config = setup(&db).await; - let app = App::new() - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; // Create project @@ -255,21 +354,32 @@ pub async fn test_project_version_create() { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), }; let file_segment = common::actix::MultipartSegment { name: "basic-mod.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/basic-mod.jar").to_vec(), + ), }; let request_generator = || { test::TestRequest::post() - .uri(&format!("/v2/project")) - .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + .uri(&format!("/v2/project")) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) }; - let project = test_scope(&test_app, &db, request_generator, all_scopes_except(create_project), create_project, USER_USER_ID).await; + let (_, project) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(create_project), + create_project, + USER_USER_ID, + 401, + ) + .await; let project_id = project["id"].as_str().unwrap(); // Add version to project @@ -292,60 +402,758 @@ pub async fn test_project_version_create() { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), }; let file_segment = common::actix::MultipartSegment { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/basic-mod-different.jar").to_vec(), + ), + }; + + let request_generator = || { + test::TestRequest::post() + .uri(&format!("/v2/version")) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(create_version), + create_version, + USER_USER_ID, + 401, + ) + .await; + + // Cleanup test db + db.cleanup().await; +} + +// Project management scopes +#[actix_rt::test] +pub async fn test_project_version_reads_scopes() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // Project reading + // Uses 404 as the expected failure code (or 200 and an empty list for mass reads) + let read_project = Scopes::PROJECT_READ; + let request_generator = || test::TestRequest::get().uri("/v2/project/G9"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 404, + ) + .await; + + let request_generator = || test::TestRequest::get().uri("/v2/project/G9/dependencies"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 404, + ) + .await; + + let request_generator = || { + test::TestRequest::get().uri(&format!( + "/v2/projects?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{}\"", "G9")) + )) + }; + let (failure, success) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 200, + ) + .await; + assert!(failure.as_array().unwrap().is_empty()); + assert!(!success.as_array().unwrap().is_empty()); + + // Team project reading + let request_generator = || test::TestRequest::get().uri("/v2/project/G9/members"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 404, + ) + .await; + + // Get team members + // In this case, as these are public endpoints, logging in only is relevant to showing permissions + // So for our test project (with 1 user, 'user') we will check the permissions before and after having the scope. + let request_generator = || test::TestRequest::get().uri("/v2/team/1c/members"); + let (failure, success) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 200, + ) + .await; + assert!(!failure.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); + assert!(success.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); + + let request_generator = || { + test::TestRequest::get().uri(&format!( + "/v2/teams?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{}\"", "1c")) + )) + }; + let (failure, success) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 200, + ) + .await; + assert!(!failure.as_array().unwrap()[0].as_array().unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_number()); + assert!(success.as_array().unwrap()[0].as_array().unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_number()); + + // User project reading + // Test user has two projects, one public and one private + let request_generator = || test::TestRequest::get().uri("/v2/user/3/projects"); + let (failure, success) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 200, + ) + .await; + assert!(failure + .as_array() + .unwrap() + .iter() + .find(|x| x["status"] == "processing") + .is_none()); + assert!(success + .as_array() + .unwrap() + .iter() + .find(|x| x["status"] == "processing") + .is_some()); + + // Project metadata reading + let request_generator = + || test::TestRequest::get().uri("/maven/maven/modrinth/G9/maven-metadata.xml"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project), + read_project, + USER_USER_ID, + 404, + ) + .await; + + // Version reading + // First, set version to hidden (which is when the scope is required to read it) + let read_version = Scopes::VERSION_READ; + let req = test::TestRequest::patch() + .uri("/v2/version/Hl") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!({ + "status": "draft" + })) + .to_request(); + let resp = test::call_service(&test_app, req).await; + assert_eq!(resp.status(), 204); + + let request_generator = || test::TestRequest::get().uri("/v2/version_file/111111111"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_version), + read_version, + USER_USER_ID, + 404, + ) + .await; + + let request_generator = || test::TestRequest::get().uri("/v2/version_file/111111111/download"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_version), + read_version, + USER_USER_ID, + 404, + ) + .await; + + // TODO: it's weird that this is /POST, no? + // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope + // let request_generator = || { + // test::TestRequest::post() + // .uri("/v2/version_file/111111111/update") + // .set_json(json!({})) + // }; + // test_scope(&test_app, &db, request_generator, all_scopes_except(read_version), read_version, USER_USER_ID, 404).await; + + // TODO: this shold get, no? with query + let request_generator = || { + test::TestRequest::post() + .uri("/v2/version_files") + .set_json(json!({ + "hashes": ["111111111"] + })) + }; + let (failure, success) = test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_version), + read_version, + USER_USER_ID, + 200, + ) + .await; + assert!(!failure.as_object().unwrap().contains_key("111111111")); + assert!(success.as_object().unwrap().contains_key("111111111")); + + // Update version file + // TODO: weird that this is post + // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope + + // let request_generator = || { + // test::TestRequest::post() + // .uri(&format!("/v2/version_files/update_individual")) + // .set_json(json!({ + // "hashes": [{ + // "hash": "111111111", + // }] + // })) + // }; + // let (failure, success) = test_scope(&test_app, &db, request_generator, all_scopes_except(read_version), read_version, USER_USER_ID, 200).await; + // assert!(!failure.as_object().unwrap().contains_key("111111111")); + // assert!(success.as_object().unwrap().contains_key("111111111")); + + // Update version file + // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope + // let request_generator = || { + // test::TestRequest::post() + // .uri(&format!("/v2/version_files/update")) + // .set_json(json!({ + // "hashes": ["111111111"] + // })) + // }; + // let (failure, success) = test_scope(&test_app, &db, request_generator, all_scopes_except(read_version), read_version, USER_USER_ID, 200).await; + // assert!(!failure.as_object().unwrap().contains_key("111111111")); + // assert!(success.as_object().unwrap().contains_key("111111111")); + + // Both project and version reading + let read_project_and_version = Scopes::PROJECT_READ | Scopes::VERSION_READ; + let request_generator = || test::TestRequest::get().uri("/v2/project/G9/version"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(read_project_and_version), + read_project_and_version, + USER_USER_ID, + 404, + ) + .await; + + // TODO: fails for the same reason as above + // let request_generator = || { + // test::TestRequest::get() + // .uri("/v2/project/G9/version/Hl") + // }; + // test_scope(&test_app, &db, request_generator, all_scopes_except(read_project_and_version), read_project_and_version, USER_USER_ID, 404).await; + + // Cleanup test db + db.cleanup().await; +} + +// Project writing +#[actix_rt::test] +pub async fn test_project_write_scopes() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // Projects writing + let write_project = Scopes::PROJECT_WRITE; + let request_generator = || { + test::TestRequest::patch() + .uri("/v2/project/G9") + .set_json(json!( + { + "title": "test_project_version_write_scopes Title", + } + )) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + let request_generator = || { + test::TestRequest::patch() + .uri(&format!( + "/v2/projects?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{}\"", "G9")) + )) + .set_json(json!( + { + "description": "test_project_version_write_scopes Description", + } + )) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + let request_generator = || { + test::TestRequest::post() + .uri("/v2/project/G8/schedule") // G8 is an *approved* project, so we can schedule it + .set_json(json!( + { + "requested_status": "private", + "time": Utc::now() + Duration::days(1), + } + )) }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + // Icons and gallery images + let request_generator = || { + test::TestRequest::patch() + .uri("/v2/project/G9/icon?ext=png") + .set_payload(Bytes::from( + include_bytes!("../tests/files/200x200.png") as &[u8] + )) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + let request_generator = || test::TestRequest::delete().uri("/v2/project/G9/icon"); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + let request_generator = || { + test::TestRequest::post() + .uri("/v2/project/G9/gallery?ext=png&featured=true") + .set_payload(Bytes::from( + include_bytes!("../tests/files/200x200.png") as &[u8] + )) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + // Get project, as we need the gallery image url + let request_generator = test::TestRequest::get() + .uri("/v2/project/G9") + .append_header(("Authorization", "mrp_patuser")) + .to_request(); + let resp = test::call_service(&test_app, request_generator).await; + let project: serde_json::Value = test::read_body_json(resp).await; + let gallery_url = project["gallery"][0]["url"].as_str().unwrap(); + let request_generator = + || test::TestRequest::patch().uri(&format!("/v2/project/G9/gallery?url={gallery_url}")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + let request_generator = + || test::TestRequest::delete().uri(&format!("/v2/project/G9/gallery?url={gallery_url}")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + // Team scopes - add user 'friend' let request_generator = || { test::TestRequest::post() - .uri(&format!("/v2/version")) - .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + .uri(&format!("/v2/team/1c/members")) + .set_json(json!({ + "user_id": "4" + })) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + // Accept team invite as 'friend' + let request_generator = || test::TestRequest::post().uri(&format!("/v2/team/1c/join")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + FRIEND_USER_ID, + 401, + ) + .await; + + // Patch 'friend' user + let request_generator = || { + test::TestRequest::patch() + .uri(&format!("/v2/team/1c/members/4")) + .set_json(json!({ + "permissions": 1 + })) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + // Transfer ownership to 'friend' + let request_generator = || { + test::TestRequest::patch() + .uri(&format!("/v2/team/1c/owner")) + .set_json(json!({ + "user_id": "4" + })) }; - test_scope(&test_app, &db, request_generator, all_scopes_except(create_version), create_version, USER_USER_ID).await; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + USER_USER_ID, + 401, + ) + .await; + + // Now as 'friend', delete 'user' + let request_generator = || test::TestRequest::delete().uri(&format!("/v2/team/1c/members/3")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_project), + write_project, + FRIEND_USER_ID, + 401, + ) + .await; + // Delete project + // TODO: this route is currently broken, + // because the Project::get_id contained within Project::remove doesnt include hidden versions, meaning that if there + // is a hidden version, it will fail to delete the project (with a 500 error, as the versions of a project are not all deleted) + // let delete_version = Scopes::PROJECT_DELETE; + // let request_generator = || { + // test::TestRequest::delete() + // .uri(&format!("/v2/project/G9")) + // }; + // test_scope(&test_app, &db, request_generator, all_scopes_except(delete_version), delete_version, USER_USER_ID, 401).await; // Cleanup test db db.cleanup().await; } +// Version write +#[actix_rt::test] +pub async fn test_version_write_scopes() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + let write_version = Scopes::VERSION_WRITE; + + // Schedule version + let request_generator = || { + test::TestRequest::post() + .uri("/v2/version/Hk/schedule") // Hk is an *approved* version, so we can schedule it + .set_json(json!( + { + "requested_status": "archived", + "time": Utc::now() + Duration::days(1), + } + )) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_version), + write_version, + USER_USER_ID, + 401, + ) + .await; + + // Patch version + let request_generator = || { + test::TestRequest::patch() + .uri("/v2/version/Hk") + .set_json(json!( + { + "version_title": "test_version_write_scopes Title", + } + )) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_version), + write_version, + USER_USER_ID, + 401, + ) + .await; + + // Generate test project data. + // Basic json + let json_segment = common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text( + serde_json::to_string(&json!( + { + "file_types": { + "simple-zip.zip": "required-resource-pack" + }, + } + )) + .unwrap(), + ), + }; + + // Differently named file, with different content + let content_segment = common::actix::MultipartSegment { + name: "simple-zip.zip".to_string(), + filename: Some("simple-zip.zip".to_string()), + content_type: Some("application/zip".to_string()), + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/simple-zip.zip").to_vec(), + ), + }; + + // Upload version file + let request_generator = || { + test::TestRequest::post() + .uri(&format!("/v2/version/Hk/file")) + .set_multipart(vec![json_segment.clone(), content_segment.clone()]) + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_version), + write_version, + USER_USER_ID, + 401, + ) + .await; + + // Delete version file + // TODO: should this be VERSION_DELETE? + let request_generator = || { + test::TestRequest::delete().uri(&format!("/v2/version_file/000000000")) // Delete from Hk, as we uploaded to Hk, and it needs another file + }; + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(write_version), + write_version, + USER_USER_ID, + 401, + ) + .await; + + // Delete version + let delete_version = Scopes::VERSION_DELETE; + let request_generator = || test::TestRequest::delete().uri(&format!("/v2/version/Hk")); + test_scope( + &test_app, + &db, + request_generator, + all_scopes_except(delete_version), + delete_version, + USER_USER_ID, + 401, + ) + .await; -// Project scopes -// Version scopes + // Cleanup test db + db.cleanup().await; +} // Report scopes // Thread scopes -// Pat scopes - // Session scopes // Analytics scopes // Collection scopes +// User authentication + +// Pat scopes + +// Organization scopes + +// Some hash/version files functions + +// Meta pat stuff + +#[actix_rt::test] +pub async fn test_user_auth_scopes() { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + + // TODO: Test user auth scopes + + // Cleanup test db + db.cleanup().await; +} // A reusable test that works for any scope test that: -// - returns a 401 if the scope is not present +// - returns a known 'expected_failure_code' if the scope is not present (probably 401) // - returns a 200-299 if the scope is present -// - returns a JSON body on a successful request +// - returns the failure and success bodies for requests that are 209 // Some tests (ie: USER_READ_EMAIL) will still need to have additional checks (ie: email is present/absent) because it doesn't affect the response code // test_app is the generated test app from init_service // Closure generates a TestRequest. The authorization header (if any) will be overwritten by the generated PAT -async fn test_scope(test_app : &impl actix_web::dev::Service, db : &TemporaryDatabase, request_generator : T, failure_scopes: Scopes, success_scopes : Scopes, user_id : i64) -> serde_json::Value -where T : Fn() -> TestRequest +async fn test_scope( + test_app: &impl actix_web::dev::Service< + actix_http::Request, + Response = ServiceResponse, + Error = actix_web::Error, + >, + db: &TemporaryDatabase, + request_generator: T, + failure_scopes: Scopes, + success_scopes: Scopes, + user_id: i64, + expected_failure_code: u16, +) -> (serde_json::Value, serde_json::Value) +where + T: Fn() -> TestRequest, { // First, create a PAT with all OTHER scopes let access_token_all_others = create_test_pat(failure_scopes, user_id, &db).await; - + // Create a PAT with the given scopes let access_token = create_test_pat(success_scopes, user_id, &db).await; - + // Perform test twice, once with each PAT // the first time, we expect a 401 // the second time, we expect a 200 or 204, and it will return a JSON body of the response @@ -353,24 +1161,41 @@ where T : Fn() -> TestRequest .append_header(("Authorization", access_token_all_others.as_str())) .to_request(); let resp = test::call_service(&test_app, req).await; - assert_eq!(resp.status(), 401); - + + assert_eq!(expected_failure_code, resp.status().as_u16()); + let failure_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + let req = request_generator() .append_header(("Authorization", access_token.as_str())) .to_request(); let resp = test::call_service(&test_app, req).await; - assert!(resp.status().is_success()); - let body = if resp.status() == 200 { + println!( + "{}: {}", + resp.status().as_u16(), + resp.status().canonical_reason().unwrap() + ); + assert!(resp.status().is_success() || resp.status().is_redirection()); + let success_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { test::read_body_json(resp).await } else { serde_json::Value::Null }; - body + (failure_body, success_body) } // Creates a PAT with the given scopes, and returns the access token // this allows us to make PATs with scopes that are not allowed to be created by PATs -async fn create_test_pat(scopes : Scopes, user_id : i64, db : &TemporaryDatabase) -> String { +async fn create_test_pat(scopes: Scopes, user_id: i64, db: &TemporaryDatabase) -> String { let mut transaction = db.pool.begin().await.unwrap(); let id = generate_pat_id(&mut transaction).await.unwrap(); let pat = database::models::pat_item::PersonalAccessToken { @@ -390,6 +1215,6 @@ async fn create_test_pat(scopes : Scopes, user_id : i64, db : &TemporaryDatabase // Inversion of scopes for testing // ie: To ensure that ONLY this scope is required, we need to create a PAT with all other scopes -fn all_scopes_except(success_scopes : Scopes) -> Scopes { +fn all_scopes_except(success_scopes: Scopes) -> Scopes { Scopes::ALL ^ success_scopes } diff --git a/tests/project.rs b/tests/project.rs index 23eb0105..55157ed0 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -1,9 +1,9 @@ -use actix_web::{App, test}; +use actix_web::{test, App}; use common::database::TemporaryDatabase; use labrinth::database::models::project_item::{PROJECTS_NAMESPACE, PROJECTS_SLUGS_NAMESPACE}; use serde_json::json; -use crate::common::{setup, actix::AppendsMultipart}; +use crate::common::{actix::AppendsMultipart, setup}; // importing common module. mod common; @@ -13,22 +13,26 @@ async fn test_get_project() { // Test setup and dummy data let db = TemporaryDatabase::create_with_dummy().await; let labrinth_config = setup(&db).await; - let app = App::new() - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; // Cache should default to unpopulated - assert!(db.redis_pool.get::(PROJECTS_NAMESPACE, 1000).await.unwrap().is_none()); + assert!(db + .redis_pool + .get::(PROJECTS_NAMESPACE, 1000) + .await + .unwrap() + .is_none()); // Perform request on dummy data println!("Sending request"); let req = test::TestRequest::get() .uri("/v2/project/G8") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; let status = resp.status(); - let body : serde_json::Value = test::read_body_json(resp).await; + let body: serde_json::Value = test::read_body_json(resp).await; assert_eq!(status, 200); assert_eq!(body["id"], json!("G8")); @@ -39,22 +43,33 @@ async fn test_get_project() { // Confirm that the request was cached println!("Confirming cache"); - assert_eq!(db.redis_pool.get::(PROJECTS_SLUGS_NAMESPACE, "testslug").await.unwrap(), Some(1000)); - - let cached_project = db.redis_pool.get::(PROJECTS_NAMESPACE, 1000).await.unwrap().unwrap(); - let cached_project : serde_json::Value = serde_json::from_str(&cached_project).unwrap(); + assert_eq!( + db.redis_pool + .get::(PROJECTS_SLUGS_NAMESPACE, "testslug") + .await + .unwrap(), + Some(1000) + ); + + let cached_project = db + .redis_pool + .get::(PROJECTS_NAMESPACE, 1000) + .await + .unwrap() + .unwrap(); + let cached_project: serde_json::Value = serde_json::from_str(&cached_project).unwrap(); assert_eq!(cached_project["inner"]["slug"], json!("testslug")); // Make the request again, this time it should be cached let req = test::TestRequest::get() .uri("/v2/project/G8") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; let status = resp.status(); assert_eq!(status, 200); - let body : serde_json::Value = test::read_body_json(resp).await; + let body: serde_json::Value = test::read_body_json(resp).await; assert_eq!(body["id"], json!("G8")); assert_eq!(body["slug"], json!("testslug")); @@ -62,17 +77,17 @@ async fn test_get_project() { println!("Requesting non-existent project"); let req = test::TestRequest::get() .uri("/v2/project/nonexistent") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 404); - // Similarly, request should fail on non-authorized user, with a 404 (hiding the existence of the project) + // Similarly, request should fail on non-authorized user, on a yet-to-be-approved or hidden project, with a 404 (hiding the existence of the project) println!("Requesting project as non-authorized user"); let req = test::TestRequest::get() - .uri("/v2/project/G8") - .append_header(("Authorization","mrp_patenemy")) + .uri("/v2/project/G9") + .append_header(("Authorization", "mrp_patenemy")) .to_request(); let resp = test::call_service(&test_app, req).await; @@ -83,12 +98,11 @@ async fn test_get_project() { } #[actix_rt::test] -async fn test_add_remove_project() { +async fn test_add_remove_project() { // Test setup and dummy data let db = TemporaryDatabase::create_with_dummy().await; let labrinth_config = setup(&db).await; - let app = App::new() - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; // Generate test project data. @@ -120,7 +134,7 @@ async fn test_add_remove_project() { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()) + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), }; // Basic json, with a different file @@ -129,7 +143,7 @@ async fn test_add_remove_project() { data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), ..json_segment.clone() }; - + // Basic json, with a different file, and a different slug json_data["slug"] = json!("new_demo"); json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); @@ -143,7 +157,9 @@ async fn test_add_remove_project() { name: "basic-mod.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/basic-mod.jar").to_vec(), + ), }; // Differently named file, with the same content (for hash testing) @@ -151,25 +167,26 @@ async fn test_add_remove_project() { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod-different.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/basic-mod.jar").to_vec(), + ), }; - + // Differently named file, with different content let file_diff_name_content_segment = common::actix::MultipartSegment { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod-different.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod-different.jar").to_vec()) + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/basic-mod-different.jar").to_vec(), + ), }; - + // Add a project- simple, should work. let req = test::TestRequest::post() .uri("/v2/project") - .append_header(("Authorization","mrp_patuser")) - .set_multipart(vec![ - json_segment.clone(), - file_segment.clone() - ]) + .append_header(("Authorization", "mrp_patuser")) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) .to_request(); let resp = test::call_service(&test_app, req).await; @@ -179,28 +196,30 @@ async fn test_add_remove_project() { // Get the project we just made, and confirm that it's correct let req = test::TestRequest::get() .uri("/v2/project/demo") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 200); - let body : serde_json::Value = test::read_body_json(resp).await; + let body: serde_json::Value = test::read_body_json(resp).await; let versions = body["versions"].as_array().unwrap(); assert!(versions.len() == 1); let uploaded_version_id = &versions[0]; // Checks files to ensure they were uploaded and correctly identify the file - let hash = sha1::Sha1::from(include_bytes!("../tests/files/basic-mod.jar").to_vec()).digest().to_string(); + let hash = sha1::Sha1::from(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + .digest() + .to_string(); let req = test::TestRequest::get() .uri(&format!("/v2/version_file/{hash}?algorithm=sha1")) - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 200); - let body : serde_json::Value = test::read_body_json(resp).await; + let body: serde_json::Value = test::read_body_json(resp).await; let file_version_id = &body["id"]; assert_eq!(&file_version_id, &uploaded_version_id); @@ -208,10 +227,10 @@ async fn test_add_remove_project() { // Even if that file is named differently let req = test::TestRequest::post() .uri("/v2/project") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .set_multipart(vec![ json_diff_slug_file_segment.clone(), // Different slug, different file name - file_diff_name_segment.clone() // Different file name, same content + file_diff_name_segment.clone(), // Different file name, same content ]) .to_request(); @@ -221,58 +240,73 @@ async fn test_add_remove_project() { // Reusing with the same slug and a different file should fail let req = test::TestRequest::post() - .uri("/v2/project") - .append_header(("Authorization","mrp_patuser")) + .uri("/v2/project") + .append_header(("Authorization", "mrp_patuser")) .set_multipart(vec![ json_diff_file_segment.clone(), // Same slug, different file name - file_diff_name_content_segment.clone() // Different file name, different content + file_diff_name_content_segment.clone(), // Different file name, different content ]) .to_request(); let resp = test::call_service(&test_app, req).await; println!("Same slug, different file: {:?}", resp.response().body()); assert_eq!(resp.status(), 400); - + // Different slug, different file should succeed let req = test::TestRequest::post() .uri("/v2/project") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .set_multipart(vec![ json_diff_slug_file_segment.clone(), // Different slug, different file name - file_diff_name_content_segment.clone() // Different file name, same content + file_diff_name_content_segment.clone(), // Different file name, same content ]) .to_request(); let resp = test::call_service(&test_app, req).await; - println!("Different slug, different file: {:?}", resp.response().body()); + println!( + "Different slug, different file: {:?}", + resp.response().body() + ); assert_eq!(resp.status(), 200); // Get let req = test::TestRequest::get() .uri("/v2/project/demo") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 200); - let body : serde_json::Value = test::read_body_json(resp).await; + let body: serde_json::Value = test::read_body_json(resp).await; let id = body["id"].to_string(); // Remove the project let req = test::TestRequest::delete() .uri("/v2/project/demo") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 204); // Confirm that the project is gone from the cache - assert_eq!(db.redis_pool.get::(PROJECTS_SLUGS_NAMESPACE, "demo").await.unwrap(), None); - assert_eq!(db.redis_pool.get::(PROJECTS_SLUGS_NAMESPACE, id).await.unwrap(), None); + assert_eq!( + db.redis_pool + .get::(PROJECTS_SLUGS_NAMESPACE, "demo") + .await + .unwrap(), + None + ); + assert_eq!( + db.redis_pool + .get::(PROJECTS_SLUGS_NAMESPACE, id) + .await + .unwrap(), + None + ); // Old slug no longer works let req = test::TestRequest::get() .uri("/v2/project/demo") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 404); @@ -285,15 +319,14 @@ async fn test_add_remove_project() { pub async fn test_patch_project() { let db = TemporaryDatabase::create_with_dummy().await; let labrinth_config = setup(&db).await; - let app = App::new() - .configure(|cfg | labrinth::app_config(cfg, labrinth_config.clone())); + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; // First, we do some patch requests that should fail. // Failure because the user is not authorized. let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patenemy")) + .append_header(("Authorization", "mrp_patenemy")) .set_json(json!({ "title": "Test_Add_Project project - test 1", })) @@ -304,25 +337,25 @@ pub async fn test_patch_project() { // Failure because we are setting URL fields to invalid urls. for url_type in ["issues_url", "source_url", "wiki_url", "discord_url"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patuser")) - .set_json(json!({ - url_type: "w.fake.url", - })) - .to_request(); + .uri("/v2/project/testslug") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!({ + url_type: "w.fake.url", + })) + .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 400); } // Failure because these are illegal requested statuses for a normal user. - for req in ["unknown","processing", "withheld", "scheduled"] { + for req in ["unknown", "processing", "withheld", "scheduled"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patuser")) - .set_json(json!({ - "requested_status": req, - })) - .to_request(); + .uri("/v2/project/testslug") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!({ + "requested_status": req, + })) + .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 400); } @@ -330,23 +363,23 @@ pub async fn test_patch_project() { // Failure because these should not be able to be set by a non-mod for key in ["moderation_message", "moderation_message_body"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patuser")) - .set_json(json!({ - key: "test", - })) - .to_request(); + .uri("/v2/project/testslug") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!({ + key: "test", + })) + .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 401); // (should work for a mod, though) let req = test::TestRequest::patch() - .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patmoderator")) - .set_json(json!({ - key: "test", - })) - .to_request(); + .uri("/v2/project/testslug") + .append_header(("Authorization", "mrp_patmoderator")) + .set_json(json!({ + key: "test", + })) + .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 204); } @@ -354,7 +387,7 @@ pub async fn test_patch_project() { // Failure because the slug is already taken. let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .set_json(json!({ "slug": "testslug2", // the other dummy project has this slug })) @@ -362,21 +395,21 @@ pub async fn test_patch_project() { let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 400); - // Not allowed to directly set status, as default dummy is "processing" + // Not allowed to directly set status, as 'testslug2' (the other project) is "processing" and cannot have its status changed like this. let req = test::TestRequest::patch() - .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patuser")) - .set_json(json!({ - "status": "private", // the other dummy project has this slug - })) - .to_request(); + .uri("/v2/project/testslug2") + .append_header(("Authorization", "mrp_patuser")) + .set_json(json!({ + "status": "private" + })) + .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 401); // Sucessful request to patch many fields. let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .set_json(json!({ "slug": "newslug", "title": "New successful title", @@ -402,20 +435,20 @@ pub async fn test_patch_project() { // Old slug no longer works let req = test::TestRequest::get() .uri("/v2/project/testslug") - .append_header(("Authorization","mrp_patuser")) + .append_header(("Authorization", "mrp_patuser")) .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 404); // Old slug no longer works let req = test::TestRequest::get() - .uri("/v2/project/newslug") - .append_header(("Authorization","mrp_patuser")) - .to_request(); + .uri("/v2/project/newslug") + .append_header(("Authorization", "mrp_patuser")) + .to_request(); let resp = test::call_service(&test_app, req).await; assert_eq!(resp.status(), 200); - let body : serde_json::Value = test::read_body_json(resp).await; + let body: serde_json::Value = test::read_body_json(resp).await; assert_eq!(body["slug"], json!("newslug")); assert_eq!(body["title"], json!("New successful title")); assert_eq!(body["description"], json!("New successful description")); @@ -427,9 +460,14 @@ pub async fn test_patch_project() { assert_eq!(body["wiki_url"], json!("https://wiki.com")); assert_eq!(body["client_side"], json!("optional")); assert_eq!(body["server_side"], json!("required")); - assert_eq!(body["donation_urls"][0]["url"], json!("https://patreon.com")); - + assert_eq!( + body["donation_urls"][0]["url"], + json!("https://patreon.com") + ); // Cleanup test db db.cleanup().await; -} \ No newline at end of file +} + +// TODO: you are missing a lot of routes on projects here +// TODO: using permissions/scopes, can we SEE projects existence that we are not allowed to? (ie 401 isntead of 404) From e9e7fc1662dca0355c1b5644658678e9a341d99e Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Tue, 3 Oct 2023 11:57:03 -0700 Subject: [PATCH 10/16] merge fixes --- src/database/models/organization_item.rs | 110 ++++++++++------------- src/routes/v2/organizations.rs | 21 ++--- src/routes/v2/teams.rs | 2 +- 3 files changed, 57 insertions(+), 76 deletions(-) diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index 5a52558a..64d0d2ba 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -1,14 +1,11 @@ -use crate::models::ids::base62_impl::{parse_base62, to_base62}; +use crate::{models::ids::base62_impl::{parse_base62, to_base62}, database::redis::RedisPool}; use super::{ids::*, TeamMember}; -use redis::cmd; use serde::{Deserialize, Serialize}; const ORGANIZATIONS_NAMESPACE: &str = "organizations"; const ORGANIZATIONS_TITLES_NAMESPACE: &str = "organizations_titles"; -const DEFAULT_EXPIRY: i64 = 1800; - #[derive(Deserialize, Serialize, Clone, Debug)] /// An organization of users who together control one or more projects and organizations. pub struct Organization { @@ -55,7 +52,7 @@ impl Organization { pub async fn get<'a, E>( string: &str, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, super::DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -68,7 +65,7 @@ impl Organization { pub async fn get_id<'a, 'b, E>( id: OrganizationId, exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, super::DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -81,7 +78,7 @@ impl Organization { pub async fn get_many_ids<'a, 'b, E>( organization_ids: &[OrganizationId], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, super::DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -96,7 +93,7 @@ impl Organization { pub async fn get_many<'a, E, T: ToString>( organization_strings: &[T], exec: E, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, super::DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, @@ -107,8 +104,6 @@ impl Organization { return Ok(Vec::new()); } - let mut redis = redis.get().await?; - let mut found_organizations = Vec::new(); let mut remaining_strings = organization_strings .iter() @@ -121,36 +116,24 @@ impl Organization { .collect::>(); organization_ids.append( - &mut cmd("MGET") - .arg( - organization_strings - .iter() - .map(|x| { - format!( - "{}:{}", - ORGANIZATIONS_TITLES_NAMESPACE, - x.to_string().to_lowercase() - ) - }) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await? - .into_iter() - .flatten() - .collect(), + &mut redis + .multi_get::( + ORGANIZATIONS_TITLES_NAMESPACE, + organization_strings + .iter() + .map(|x| x.to_string().to_lowercase()) + .collect(), + ) + .await? + .into_iter() + .flatten() + .collect() ); if !organization_ids.is_empty() { - let organizations = cmd("MGET") - .arg( - organization_ids - .iter() - .map(|x| format!("{}:{}", ORGANIZATIONS_NAMESPACE, x)) - .collect::>(), - ) - .query_async::<_, Vec>>(&mut redis) - .await?; + let organizations = redis + .multi_get::(ORGANIZATIONS_NAMESPACE, organization_ids) + .await?; for organization in organizations { if let Some(organization) = @@ -201,25 +184,23 @@ impl Organization { .await?; for organization in organizations { - cmd("SET") - .arg(format!("{}:{}", ORGANIZATIONS_NAMESPACE, organization.id.0)) - .arg(serde_json::to_string(&organization)?) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) - .await?; - - cmd("SET") - .arg(format!( - "{}:{}", + redis + .set( + ORGANIZATIONS_NAMESPACE, + organization.id.0, + serde_json::to_string(&organization)?, + None, + ) + .await?; + redis + .set( ORGANIZATIONS_TITLES_NAMESPACE, - organization.title.to_lowercase() - )) - .arg(organization.id.0) - .arg("EX") - .arg(DEFAULT_EXPIRY) - .query_async::<_, ()>(&mut redis) + organization.title.to_lowercase(), + organization.id.0, + None, + ) .await?; + found_organizations.push(organization); } } @@ -265,7 +246,7 @@ impl Organization { pub async fn remove( id: OrganizationId, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result, super::DatabaseError> { use futures::TryStreamExt; @@ -333,19 +314,18 @@ impl Organization { pub async fn clear_cache( id: OrganizationId, title: Option, - redis: &deadpool_redis::Pool, + redis: &RedisPool, ) -> Result<(), super::DatabaseError> { - let mut redis = redis.get().await?; - let mut cmd = cmd("DEL"); - cmd.arg(format!("{}:{}", ORGANIZATIONS_NAMESPACE, id.0)); + + redis.delete(ORGANIZATIONS_NAMESPACE, id.0).await?; if let Some(title) = title { - cmd.arg(format!( - "{}:{}", - ORGANIZATIONS_TITLES_NAMESPACE, - title.to_lowercase() - )); + redis + .delete( + ORGANIZATIONS_TITLES_NAMESPACE, + title.to_lowercase(), + ) + .await?; } - cmd.query_async::<_, ()>(&mut redis).await?; Ok(()) } diff --git a/src/routes/v2/organizations.rs b/src/routes/v2/organizations.rs index 427fe5ee..fe0235d3 100644 --- a/src/routes/v2/organizations.rs +++ b/src/routes/v2/organizations.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use crate::auth::{filter_authorized_projects, get_user_from_headers}; use crate::database::models::team_item::TeamMember; use crate::database::models::{generate_organization_id, team_item, Organization}; +use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::ids::base62_impl::parse_base62; use crate::models::organizations::OrganizationId; @@ -56,7 +57,7 @@ pub async fn organization_create( req: HttpRequest, new_organization: web::Json, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let current_user = get_user_from_headers( @@ -143,7 +144,7 @@ pub async fn organization_get( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let id = info.into_inner().0; @@ -208,7 +209,7 @@ pub async fn organizations_get( req: HttpRequest, web::Query(ids): web::Query, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let ids = serde_json::from_str::>(&ids.ids)?; @@ -298,7 +299,7 @@ pub async fn organizations_edit( info: web::Path<(String,)>, new_organization: web::Json, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -434,7 +435,7 @@ pub async fn organization_delete( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -498,7 +499,7 @@ pub async fn organization_projects_get( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let info = info.into_inner().0; @@ -547,7 +548,7 @@ pub async fn organization_projects_add( info: web::Path<(String,)>, project_info: web::Json, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let info = info.into_inner().0; @@ -649,7 +650,7 @@ pub async fn organization_projects_remove( req: HttpRequest, info: web::Path<(String, String)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let (organization_id, project_id) = info.into_inner(); @@ -743,7 +744,7 @@ pub async fn organization_icon_edit( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, mut payload: web::Payload, session_queue: web::Data, @@ -848,7 +849,7 @@ pub async fn delete_organization_icon( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, file_host: web::Data>, session_queue: web::Data, ) -> Result { diff --git a/src/routes/v2/teams.rs b/src/routes/v2/teams.rs index 0240fe44..d16cc314 100644 --- a/src/routes/v2/teams.rs +++ b/src/routes/v2/teams.rs @@ -117,7 +117,7 @@ pub async fn team_members_get_organization( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - redis: web::Data, + redis: web::Data, session_queue: web::Data, ) -> Result { let string = info.into_inner().0; From 3cead46a07da13157a6cbc3605f25c221ac563b4 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Wed, 4 Oct 2023 17:44:47 -0700 Subject: [PATCH 11/16] more tests, full reorganization --- Cargo.lock | 8 +- src/auth/validate.rs | 3 +- src/models/pats.rs | 28 +- src/routes/v2/collections.rs | 75 +- src/routes/v2/moderation.rs | 11 +- src/routes/v2/organizations.rs | 11 +- src/routes/v2/reports.rs | 11 +- src/routes/v2/teams.rs | 1 - src/routes/v2/threads.rs | 18 +- tests/common/actix.rs | 2 + tests/common/database.rs | 77 +- tests/common/environment.rs | 191 +++++ tests/common/mod.rs | 6 + tests/files/dummy_data.sql | 28 +- tests/pats.rs | 1416 ++++++-------------------------- tests/project.rs | 160 ++-- tests/scopes.rs | 1001 ++++++++++++++++++++++ 17 files changed, 1692 insertions(+), 1355 deletions(-) create mode 100644 tests/common/environment.rs create mode 100644 tests/scopes.rs diff --git a/Cargo.lock b/Cargo.lock index c0b7170e..030e2fc7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -93,7 +93,7 @@ dependencies = [ "actix-utils", "ahash 0.8.3", "base64 0.21.2", - "bitflags 2.3.3", + "bitflags 2.4.0", "brotli", "bytes", "bytestring", @@ -597,9 +597,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" [[package]] name = "bitvec" @@ -3597,7 +3597,7 @@ version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "errno", "libc", "linux-raw-sys 0.4.3", diff --git a/src/auth/validate.rs b/src/auth/validate.rs index b2599e3c..e37d1415 100644 --- a/src/auth/validate.rs +++ b/src/auth/validate.rs @@ -166,11 +166,12 @@ pub async fn check_is_moderator_from_headers<'a, 'b, E>( executor: E, redis: &RedisPool, session_queue: &AuthQueue, + required_scopes: Option<&[Scopes]>, ) -> Result where E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, { - let user = get_user_from_headers(req, executor, redis, session_queue, None) + let user = get_user_from_headers(req, executor, redis, session_queue, required_scopes) .await? .1; diff --git a/src/models/pats.rs b/src/models/pats.rs index 130adfe5..44b9ee9c 100644 --- a/src/models/pats.rs +++ b/src/models/pats.rs @@ -104,7 +104,7 @@ bitflags::bitflags! { const ORGANIZATION_DELETE = 1 << 38; const ALL = 0b111111111111111111111111111111111111111; - const NOT_RESTRICTED = 0b1111111100000011111111111111100111; + const NOT_RESTRICTED = 0b1111111110000000111111111111111111100111; const NONE = 0b0; } } @@ -112,7 +112,7 @@ bitflags::bitflags! { impl Scopes { // these scopes cannot be specified in a personal access token pub fn restricted(&self) -> bool { - self.contains( + self.intersects( Scopes::PAT_CREATE | Scopes::PAT_READ | Scopes::PAT_WRITE @@ -159,3 +159,27 @@ impl PersonalAccessToken { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + pub fn pat_sanity() { + assert_eq!(Scopes::NONE, Scopes::empty()); + + // Ensure PATs add up and match + // (Such as NOT_RESTRICTED lining up with is_restricted()) + let mut calculated_not_restricted = Scopes::NONE; + let mut calculated_all = Scopes::NONE; + for i in 0..64 { + let scope = Scopes::from_bits_truncate(1 << i); + if !scope.restricted() { + calculated_not_restricted |= scope; + } + calculated_all |= scope; + } + assert_eq!(Scopes::ALL | Scopes::NOT_RESTRICTED, calculated_all); + assert_eq!(Scopes::NOT_RESTRICTED, calculated_not_restricted); + } +} diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs index b76b09ad..56b658c1 100644 --- a/src/routes/v2/collections.rs +++ b/src/routes/v2/collections.rs @@ -212,16 +212,15 @@ pub async fn collection_edit( redis: web::Data, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( + let user = get_user_from_headers( &req, &**pool, &redis, &session_queue, Some(&[Scopes::COLLECTION_WRITE]), ) - .await - .map(|x| x.1) - .ok(); + .await? + .1; new_collection .validate() @@ -232,7 +231,7 @@ pub async fn collection_edit( let result = database::models::Collection::get(id, &**pool, &redis).await?; if let Some(collection_item) = result { - if !is_authorized_collection(&collection_item, &user_option).await? { + if collection_item.user_id != user.id.into() && !user.role.is_mod() { return Ok(HttpResponse::Unauthorized().body("")); } @@ -269,27 +268,25 @@ pub async fn collection_edit( } if let Some(status) = &new_collection.status { - if let Some(user) = user_option { - if !(user.role.is_mod() - || collection_item.status.is_approved() && status.can_be_requested()) - { - return Err(ApiError::CustomAuthentication( - "You don't have permission to set this status!".to_string(), - )); - } - - sqlx::query!( - " - UPDATE collections - SET status = $1 - WHERE (id = $2) - ", - status.to_string(), - id as database::models::ids::CollectionId, - ) - .execute(&mut *transaction) - .await?; + if !(user.role.is_mod() + || collection_item.status.is_approved() && status.can_be_requested()) + { + return Err(ApiError::CustomAuthentication( + "You don't have permission to set this status!".to_string(), + )); } + + sqlx::query!( + " + UPDATE collections + SET status = $1 + WHERE (id = $2) + ", + status.to_string(), + id as database::models::ids::CollectionId, + ) + .execute(&mut *transaction) + .await?; } if let Some(new_project_ids) = &new_collection.new_projects { @@ -356,16 +353,15 @@ pub async fn collection_icon_edit( ) -> Result { if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { let cdn_url = dotenvy::var("CDN_URL")?; - let user_option = get_user_from_headers( + let user = get_user_from_headers( &req, &**pool, &redis, &session_queue, Some(&[Scopes::COLLECTION_WRITE]), ) - .await - .map(|x| x.1) - .ok(); + .await? + .1; let string = info.into_inner().0; let id = database::models::CollectionId(parse_base62(&string)? as i64); @@ -375,7 +371,7 @@ pub async fn collection_icon_edit( ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if !is_authorized_collection(&collection_item, &user_option).await? { + if collection_item.user_id != user.id.into() && !user.role.is_mod() { return Ok(HttpResponse::Unauthorized().body("")); } @@ -439,16 +435,16 @@ pub async fn delete_collection_icon( file_host: web::Data>, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( + let user = get_user_from_headers( &req, &**pool, &redis, &session_queue, Some(&[Scopes::COLLECTION_WRITE]), ) - .await - .map(|x| x.1) - .ok(); + .await? + .1; + let string = info.into_inner().0; let id = database::models::CollectionId(parse_base62(&string)? as i64); let collection_item = database::models::Collection::get(id, &**pool, &redis) @@ -456,7 +452,7 @@ pub async fn delete_collection_icon( .ok_or_else(|| { ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if !is_authorized_collection(&collection_item, &user_option).await? { + if collection_item.user_id != user.id.into() && !user.role.is_mod() { return Ok(HttpResponse::Unauthorized().body("")); } @@ -497,16 +493,15 @@ pub async fn collection_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( + let user = get_user_from_headers( &req, &**pool, &redis, &session_queue, Some(&[Scopes::COLLECTION_DELETE]), ) - .await - .map(|x| x.1) - .ok(); + .await? + .1; let string = info.into_inner().0; let id = database::models::CollectionId(parse_base62(&string)? as i64); @@ -515,7 +510,7 @@ pub async fn collection_delete( .ok_or_else(|| { ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if !is_authorized_collection(&collection, &user_option).await? { + if collection.user_id != user.id.into() && !user.role.is_mod() { return Ok(HttpResponse::Unauthorized().body("")); } let mut transaction = pool.begin().await?; diff --git a/src/routes/v2/moderation.rs b/src/routes/v2/moderation.rs index f1d56dd1..ebebf654 100644 --- a/src/routes/v2/moderation.rs +++ b/src/routes/v2/moderation.rs @@ -1,9 +1,9 @@ use super::ApiError; -use crate::auth::check_is_moderator_from_headers; use crate::database; use crate::database::redis::RedisPool; use crate::models::projects::ProjectStatus; use crate::queue::session::AuthQueue; +use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes}; use actix_web::{get, web, HttpRequest, HttpResponse}; use serde::Deserialize; use sqlx::PgPool; @@ -30,7 +30,14 @@ pub async fn get_projects( count: web::Query, session_queue: web::Data, ) -> Result { - check_is_moderator_from_headers(&req, &**pool, &redis, &session_queue).await?; + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await?; use futures::stream::TryStreamExt; diff --git a/src/routes/v2/organizations.rs b/src/routes/v2/organizations.rs index fe0235d3..754d1a1e 100644 --- a/src/routes/v2/organizations.rs +++ b/src/routes/v2/organizations.rs @@ -40,16 +40,14 @@ pub fn config(cfg: &mut web::ServiceConfig) { #[derive(Deserialize, Validate)] pub struct NewOrganization { - #[validate(length(min = 3, max = 256))] - pub description: String, #[validate( length(min = 3, max = 64), regex = "crate::util::validate::RE_URL_SAFE" )] // Title of the organization, also used as slug pub title: String, - #[serde(default = "crate::models::teams::ProjectPermissions::default")] - pub default_project_permissions: ProjectPermissions, + #[validate(length(min = 3, max = 256))] + pub description: String, } #[post("organization")] @@ -290,7 +288,6 @@ pub struct OrganizationEdit { )] // Title of the organization, also used as slug pub title: Option, - pub default_project_permissions: Option, } #[patch("{id}")] @@ -508,7 +505,7 @@ pub async fn organization_projects_get( &**pool, &redis, &session_queue, - Some(&[Scopes::ORGANIZATION_READ]), + Some(&[Scopes::ORGANIZATION_READ, Scopes::PROJECT_READ]), ) .await .map(|x| x.1) @@ -520,7 +517,7 @@ pub async fn organization_projects_get( let project_ids = sqlx::query!( " SELECT m.id FROM organizations o - LEFT JOIN mods m ON m.id = o.id + INNER JOIN mods m ON m.organization_id = o.id WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL) ", possible_organization_id.map(|x| x as i64), diff --git a/src/routes/v2/reports.rs b/src/routes/v2/reports.rs index f336ef5f..c0eba9c3 100644 --- a/src/routes/v2/reports.rs +++ b/src/routes/v2/reports.rs @@ -405,7 +405,7 @@ pub async fn report_edit( let report = crate::database::models::report_item::Report::get(id, &**pool).await?; if let Some(report) = report { - if !user.role.is_mod() && report.user_id != Some(user.id.into()) { + if !user.role.is_mod() && report.reporter != user.id.into() { return Ok(HttpResponse::NotFound().body("")); } @@ -496,7 +496,14 @@ pub async fn report_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - check_is_moderator_from_headers(&req, &**pool, &redis, &session_queue).await?; + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::REPORT_DELETE]), + ) + .await?; let mut transaction = pool.begin().await?; diff --git a/src/routes/v2/teams.rs b/src/routes/v2/teams.rs index d16cc314..866ee436 100644 --- a/src/routes/v2/teams.rs +++ b/src/routes/v2/teams.rs @@ -453,7 +453,6 @@ pub async fn add_team_member( let organization_permissions = OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member) .unwrap_or_default(); - println!("{:?}", organization_permissions); if !organization_permissions.contains(OrganizationPermissions::MANAGE_INVITES) { return Err(ApiError::CustomAuthentication( "You don't have permission to invite users to this organization".to_string(), diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index 2aa68617..79930a0b 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -512,7 +512,14 @@ pub async fn moderation_inbox( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = check_is_moderator_from_headers(&req, &**pool, &redis, &session_queue).await?; + let user = check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_READ]), + ) + .await?; let ids = sqlx::query!( " @@ -540,7 +547,14 @@ pub async fn thread_read( redis: web::Data, session_queue: web::Data, ) -> Result { - check_is_moderator_from_headers(&req, &**pool, &redis, &session_queue).await?; + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_READ]), + ) + .await?; let id = info.into_inner().0; let mut transaction = pool.begin().await?; diff --git a/tests/common/actix.rs b/tests/common/actix.rs index 35bf6483..03935e50 100644 --- a/tests/common/actix.rs +++ b/tests/common/actix.rs @@ -1,6 +1,7 @@ use actix_web::test::TestRequest; use bytes::{Bytes, BytesMut}; +// Multipart functionality (actix-test does not innately support multipart) #[derive(Debug, Clone)] pub struct MultipartSegment { pub name: String, @@ -10,6 +11,7 @@ pub struct MultipartSegment { } #[derive(Debug, Clone)] +#[allow(dead_code)] pub enum MultipartSegmentData { Text(String), Binary(Vec), diff --git a/tests/common/database.rs b/tests/common/database.rs index 43cee67f..2be236cd 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -1,13 +1,56 @@ +#![allow(dead_code)] + use labrinth::database::redis::RedisPool; use sqlx::{postgres::PgPoolOptions, Executor, PgPool}; use std::time::Duration; use url::Url; -pub const ADMIN_USER_ID: i64 = 1; -pub const MOD_USER_ID: i64 = 2; -pub const USER_USER_ID: i64 = 3; -pub const FRIEND_USER_ID: i64 = 4; -pub const ENEMY_USER_ID: i64 = 5; +// The dummy test database adds a fair bit of 'dummy' data to test with. +// These constants are used to refer to that data, and are described here. + +// The user IDs are as follows: +pub const ADMIN_USER_ID: &str = "1"; +pub const MOD_USER_ID: &str = "2"; +pub const USER_USER_ID: &str = "3"; // This is the 'main' user ID, and is used for most tests. +pub const FRIEND_USER_ID: &str = "4"; // This is exactly the same as USER_USER_ID, but could be used for testing friend-only endpoints (ie: teams, etc) +pub const ENEMY_USER_ID: &str = "5"; // This is exactly the same as USER_USER_ID, but could be used for testing friend-only endpoints (ie: teams, etc) + +pub const ADMIN_USER_ID_PARSED: i64 = 1; +pub const MOD_USER_ID_PARSED: i64 = 2; +pub const USER_USER_ID_PARSED: i64 = 3; +pub const FRIEND_USER_ID_PARSED: i64 = 4; +pub const ENEMY_USER_ID_PARSED: i64 = 5; + +// These are full-scoped PATs- as if the user was logged in (including illegal scopes). +pub const ADMIN_USER_PAT : &str = "mrp_patadmin"; +pub const MOD_USER_PAT : &str = "mrp_patmoderator"; +pub const USER_USER_PAT : &str = "mrp_patuser"; +pub const FRIEND_USER_PAT : &str = "mrp_patfriend"; +pub const ENEMY_USER_PAT : &str = "mrp_patenemy"; + +// There are two test projects. They are both created by user 3 (USER_USER_ID). +// They differ only in that 'ALPHA' is a public, approved project, and 'BETA' is a private, project in queue. +// The same goes for their corresponding versions- one listed, one draft. +pub const PROJECT_ALPHA_TEAM_ID : &str = "1c"; +pub const PROJECT_BETA_TEAM_ID : &str = "1d"; + +pub const PROJECT_ALPHA_PROJECT_ID : &str = "G8"; +pub const PROJECT_BETA_PROJECT_ID : &str = "G9"; + +pub const PROJECT_ALPHA_PROJECT_SLUG : &str = "testslug"; +pub const PROJECT_BETA_PROJECT_SLUG : &str = "testslug2"; + +pub const PROJECT_ALPHA_VERSION_ID : &str = "Hk"; +pub const PROJECT_BETA_VERSION_ID : &str = "Hl"; + +// These are threads created alongside the projects. +pub const PROJECT_ALPHA_THREAD_ID : &str = "U"; +pub const PROJECT_BETA_THREAD_ID : &str = "V"; + +// These are the hashes of the files attached to their versions: they do not reflect a 'real' hash of data. +// This can be used for /version_file/ type endpoints which get a project's data from its hash. +pub const PROJECT_ALPHA_THREAD_FILE_HASH : &str = "000000000"; +pub const PROJECT_BETA_THREAD_FILE_HASH : &str = "111111111"; pub struct TemporaryDatabase { pub pool: PgPool, @@ -16,6 +59,13 @@ pub struct TemporaryDatabase { } impl TemporaryDatabase { + // Creates a temporary database like sqlx::test does + // 1. Logs into the main database + // 2. Creates a new randomly generated database + // 3. Runs migrations on the new database + // 4. (Optionally, by using create_with_dummy) adds dummy data to the database + // If a db is created with create_with_dummy, it must be cleaned up with cleanup. + // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. pub async fn create() -> Self { let temp_database_name = generate_random_database_name(); println!("Creating temporary database: {}", &temp_database_name); @@ -68,6 +118,9 @@ impl TemporaryDatabase { db } + // Deletes the temporary database + // If a temporary db is created, it must be cleaned up with cleanup. + // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. pub async fn cleanup(mut self) { let database_url = dotenvy::var("DATABASE_URL").expect("No database URL"); self.pool.close().await; @@ -95,20 +148,6 @@ impl TemporaryDatabase { .expect("Database deletion failed"); } - /* - Adds the following dummy data to the database: - - 5 users (admin, mod, user, friend, enemy) - - Admin and mod have special powers, the others do not - - User is our mock user. Friend and enemy can be used to simulate a collaborator to user to be given permnissions on a project, - whereas enemy might be banned or otherwise not given permission. (These are arbitrary and dependent on the test) - - PATs for each of the five users, with full privileges (for testing purposes). - - 'mrp_patadmin' for admin, etc - - 1 game version (1.20.1) - - 1 dummy project called 'testslug' (and testslug2) with the following properties: - - several categories, tags, etc - - This is a test function, so it panics on error. - */ pub async fn add_dummy_data(&self) { let pool = &self.pool.clone(); pool.execute(include_str!("../files/dummy_data.sql")) diff --git a/tests/common/environment.rs b/tests/common/environment.rs new file mode 100644 index 00000000..b01dbb64 --- /dev/null +++ b/tests/common/environment.rs @@ -0,0 +1,191 @@ +#![allow(dead_code)] + +use actix_web::{ + dev::ServiceResponse, + test::{self, TestRequest}, + App, +}; +use chrono::Utc; +use super::database::{TemporaryDatabase, USER_USER_ID_PARSED}; +use labrinth::{ + database::{self, models::generate_pat_id}, + models::pats::Scopes, +}; +use crate::common::setup; + +// A complete test environment, with a test actix app and a database. +// Must be called in an #[actix_rt::test] context. It also simulates a +// temporary sqlx db like #[sqlx::test] would. +// Use .call(req) on it directly to make a test call as if test::call_service(req) were being used. +pub struct TestEnvironment { + test_app: Box, + pub db: TemporaryDatabase, +} + +impl TestEnvironment { + pub async fn new() -> Self { + let db = TemporaryDatabase::create_with_dummy().await; + let labrinth_config = setup(&db).await; + let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); + let test_app = test::init_service(app).await; + Self { test_app: Box::new(test_app), db } + } + pub async fn cleanup(self) { + self.db.cleanup().await; + } + + pub async fn call(&self, req: actix_http::Request) -> ServiceResponse { + self.test_app.call(req).await.unwrap() + } +} + + +trait LocalService { + fn call(&self, req: actix_http::Request) -> std::pin::Pin>>>; +} +impl LocalService for S +where + S: actix_web::dev::Service, + S::Future: 'static, +{ + fn call(&self, req: actix_http::Request) -> std::pin::Pin>>> { + Box::pin(self.call(req)) + } +} + +// A reusable test type that works for any scope test testing an endpoint that: +// - returns a known 'expected_failure_code' if the scope is not present (defaults to 401) +// - returns a 200-299 if the scope is present +// - returns failure and success JSON bodies for requests that are 200 (for performing non-simple follow-up tests on) +// This uses a builder format, so you can chain methods to set the parameters to non-defaults (most will probably be not need to be set). +pub struct ScopeTest<'a> +{ + test_env: &'a TestEnvironment, + // Scopes expected to fail on this test. By default, this is all scopes except the success scopes. + // (To ensure we have isolated the scope we are testing) + failure_scopes: Option, + // User ID to use for the PATs. By default, this is the USER_USER_ID_PARSED constant. + user_id: i64, + // The code that is expected to be returned if the scope is not present. By default, this is 401 (Unauthorized) + expected_failure_code: u16, +} + +impl<'a> ScopeTest<'a> +{ + pub fn new(test_env: &'a TestEnvironment) -> Self { + Self { + test_env, + failure_scopes: None, + user_id: USER_USER_ID_PARSED, + expected_failure_code: 401, + } + } + + // Set non-standard failure scopes + // If not set, it will be set to all scopes except the success scopes + // (eg: if a combination of scopes is needed, but you want to make sure that the endpoint does not work with all-but-one of them) + pub fn with_failure_scopes(mut self, scopes: Scopes) -> Self { + self.failure_scopes = Some(scopes); + self + } + + // Set the user ID to use + // (eg: a moderator, or friend) + pub fn with_user_id(mut self, user_id: i64) -> Self { + self.user_id = user_id; + self + } + + // If a non-401 code is expected. + // (eg: a 404 for a hidden resource, or 200 for a resource with hidden values deeper in) + pub fn with_failure_code(mut self, code: u16) -> Self { + self.expected_failure_code = code; + self + } + + // Call the endpoint generated by req_gen twice, once with a PAT with the failure scopes, and once with the success scopes. + // success_scopes : the scopes that we are testing that should succeed + // returns a tuple of (failure_body, success_body) + // Should return a String error if on unexpected status code, allowing unwrapping in tests. + pub async fn test(&self, req_gen: T, success_scopes: Scopes) -> Result<(serde_json::Value, serde_json::Value), String> + where T: Fn() -> TestRequest + { + + // First, create a PAT with failure scopes + let failure_scopes = self.failure_scopes.unwrap_or(Scopes::ALL ^ success_scopes); + let access_token_all_others = create_test_pat(failure_scopes, self.user_id, &self.test_env.db).await; + + // Create a PAT with the success scopes + let access_token = create_test_pat(success_scopes, self.user_id, &self.test_env.db).await; + + // Perform test twice, once with each PAT + // the first time, we expect a 401 (or known failure code) + let req = req_gen() + .append_header(("Authorization", access_token_all_others.as_str())) + .to_request(); + let resp = self.test_env.test_app.call(req).await.unwrap(); + + if resp.status().as_u16() != self.expected_failure_code { + return Err(format!( + "Expected failure code {}, got {}", + self.expected_failure_code, + resp.status().as_u16() + )); + } + + let failure_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + + // The second time, we expect a success code + let req = req_gen() + .append_header(("Authorization", access_token.as_str())) + .to_request(); + let resp = self.test_env.test_app.call(req).await.unwrap(); + + if !(resp.status().is_success() || resp.status().is_redirection()) { + return Err(format!( + "Expected success code, got {}", + resp.status().as_u16() + )); + } + + let success_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + Ok((failure_body, success_body)) + + } + +} + +// Creates a PAT with the given scopes, and returns the access token +// Interfacing with the db directly, rather than using a ourte, +// allows us to test with scopes that are not allowed to be created by PATs +async fn create_test_pat(scopes: Scopes, user_id: i64, db: &TemporaryDatabase) -> String { + let mut transaction = db.pool.begin().await.unwrap(); + let id = generate_pat_id(&mut transaction).await.unwrap(); + let pat = database::models::pat_item::PersonalAccessToken { + id, + name: format!("test_pat_{}", scopes.bits()), + access_token: format!("mrp_{}", id.0), + scopes, + user_id: database::models::ids::UserId(user_id), + created: Utc::now(), + expires: Utc::now() + chrono::Duration::days(1), + last_used: None, + }; + pat.insert(&mut transaction).await.unwrap(); + transaction.commit().await.unwrap(); + pat.access_token +} diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 186f8f46..122849ea 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -10,7 +10,10 @@ use self::database::TemporaryDatabase; pub mod actix; pub mod database; +pub mod environment; +// Testing equivalent to 'setup' function, producing a LabrinthConfig +// If making a test, you should probably use environment::TestEnvironment::new() (which calls this) pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { println!("Setting up labrinth config"); @@ -38,6 +41,9 @@ pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { } // This is so that env vars not used immediately don't panic at runtime +// Currently, these are the same as main.rs ones. +// TODO: go through after all tests are created and remove any that are not used +// Low priority as .env file should include all of these anyway fn check_test_vars() -> bool { let mut failed = false; diff --git a/tests/files/dummy_data.sql b/tests/files/dummy_data.sql index 91ec7ad0..be118513 100644 --- a/tests/files/dummy_data.sql +++ b/tests/files/dummy_data.sql @@ -1,13 +1,8 @@ -ALTER TABLE threads DISABLE TRIGGER ALL; -ALTER TABLE pats DISABLE TRIGGER ALL; -ALTER TABLE loaders_project_types DISABLE TRIGGER ALL; -ALTER TABLE team_members DISABLE TRIGGER ALL; -ALTER TABLE versions DISABLE TRIGGER ALL; -ALTER TABLE loaders_versions DISABLE TRIGGER ALL; -ALTER TABLE game_versions_versions DISABLE TRIGGER ALL; -ALTER TABLE files DISABLE TRIGGER ALL; -ALTER TABLE hashes DISABLE TRIGGER ALL; +-- Dummy test data for use in tests. +-- IDs are listed as integers, followed by their equivalent base 62 representation. +-- Inserts 5 dummy users for testing, with slight differences +-- 'Friend' and 'enemy' function like 'user', but we can use them to simulate 'other' users that may or may not be able to access certain things -- IDs 1-5, 1-5 INSERT INTO users (id, username, name, email, role) VALUES (1, 'admin', 'Administrator Test', 'admin@modrinth.com', 'admin'); INSERT INTO users (id, username, name, email, role) VALUES (2, 'moderator', 'Moderator Test', 'moderator@modrinth.com', 'moderator'); @@ -15,6 +10,8 @@ INSERT INTO users (id, username, name, email, role) VALUES (3, 'user', 'User Tes INSERT INTO users (id, username, name, email, role) VALUES (4, 'friend', 'Friend Test', 'friend@modrinth.com', 'developer'); INSERT INTO users (id, username, name, email, role) VALUES (5, 'enemy', 'Enemy Test', 'enemy@modrinth.com', 'developer'); +-- Full PATs for each user, with different scopes +-- These are not legal PATs, as they contain all scopes- they mimic permissions of a logged in user -- IDs: 50-54, o p q r s INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (50, 1, 'admin-pat', 'mrp_patadmin', B'11111111111111111111111111111111111'::BIGINT, '2030-08-18 15:48:58.435729+00'); INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (51, 2, 'moderator-pat', 'mrp_patmoderator', B'11111111111111111111111111111111111'::BIGINT, '2030-08-18 15:48:58.435729+00'); @@ -22,6 +19,7 @@ INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (52, INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (53, 4, 'friend-pat', 'mrp_patfriend', B'11111111111111111111111111111111111'::BIGINT, '2030-08-18 15:48:58.435729+00'); INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (54, 5, 'enemy-pat', 'mrp_patenemy', B'11111111111111111111111111111111111'::BIGINT, '2030-08-18 15:48:58.435729+00'); +-- Sample game versions, loaders, categories INSERT INTO game_versions (id, version, type, created) VALUES (20000, '1.20.1', 'release', timezone('utc', now())); @@ -43,12 +41,12 @@ INSERT INTO teams (id) VALUES (100); -- ID: 100, 1c INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (200, 100, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); -- ID: 1000, G8 --- Approved, viewable +-- This project is approved, viewable INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) VALUES (1000, 100, 'Test Mod', 'Test mod description', 'Test mod body', timezone('utc', now()), 0, 'approved', 'approved', 1, 2, 'MIT', 'testslug', 1, 'monetized'); -- ID: 1100, Hk --- Listed, viewable +-- This version is listed, viewable INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) VALUES (1100, 1000, 3, 'v1', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'listed'); @@ -60,6 +58,7 @@ INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) VALUES (800, 1100, 'http://www.url.to/myfile.jar', 'myfile.jar', true, 1, 'required-resource-pack'); INSERT INTO hashes (file_id, algorithm, hash) VALUES (800, 'sha1', '000000000'); +-- ID: 30, U INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (30, 'project', 1000, null); ------------------------------------------------------------ @@ -67,12 +66,12 @@ INSERT INTO teams (id) VALUES (101); -- ID: 101, 1d INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (201, 101, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); -- ID: 1001, G9 --- Processing, and therefore not viewable +-- This project is processing, and therefore not publically viewable INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) VALUES (1001, 101, 'Test Mod 2', 'Test mod description 2', 'Test mod body 2', timezone('utc', now()), 0, 'processing', 'approved', 1, 2, 'MIT', 'testslug2', 1, 'monetized'); -- ID: 1101, Hl --- Draft, and therefore not viewable +-- This version is a draft, and therefore not publically viewable INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) VALUES (1101, 1001, 3, 'v1.0', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'draft'); @@ -84,4 +83,5 @@ INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) VALUES (801, 1101, 'http://www.url.to/myfile2.jar', 'myfile2.jar', true, 1, 'required-resource-pack'); INSERT INTO hashes (file_id, algorithm, hash) VALUES (801, 'sha1', '111111111'); -INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (31, 'project', 1001, null); \ No newline at end of file +-- ID: 31, V +INSERT INTO threads (id, thread_type, mod_id, report_id, show_in_mod_inbox) VALUES (31, 'project', 1001, null, true); \ No newline at end of file diff --git a/tests/pats.rs b/tests/pats.rs index 9f45a2c7..c5f37ab4 100644 --- a/tests/pats.rs +++ b/tests/pats.rs @@ -1,1220 +1,292 @@ -use actix_web::{ - dev::ServiceResponse, - test::{self, TestRequest}, - App, -}; -use bytes::Bytes; +use actix_web::test; use chrono::{Duration, Utc}; -use common::{actix::AppendsMultipart, database::TemporaryDatabase}; -use labrinth::{ - database::{self, models::generate_pat_id}, - models::pats::Scopes, -}; +use common::database::*; +use labrinth::models::pats::Scopes; use serde_json::json; -use crate::common::{ - database::{ADMIN_USER_ID, ENEMY_USER_ID, FRIEND_USER_ID, MOD_USER_ID, USER_USER_ID}, - setup, -}; +use crate::common::environment::TestEnvironment; // importing common module. mod common; -// For each scope, we (using test_scope): -// - create a PAT with a given set of scopes for a function -// - create a PAT with all other scopes for a function -// - test the function with the PAT with the given scopes -// - test the function with the PAT with all other scopes - -// Test for users, emails, and payout scopes (not user auth scope or notifs) +// Full pat test: +// - create a PAT and ensure it can be used for the scope +// - ensure access token is not returned for any PAT in GET +// - ensure PAT can be patched to change scopes +// - ensure PAT can be patched to change expiry +// - ensure expired PATs cannot be used +// - ensure PATs can be deleted #[actix_rt::test] -async fn test_user_scopes() { - // Test setup and dummy data - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; - - // User reading - println!("Testing user reading..."); - let read_user = Scopes::USER_READ; - let request_generator = || test::TestRequest::get().uri("/v2/user"); - let (_, read_user) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_user), - read_user, - USER_USER_ID, - 401, - ) - .await; - assert!(read_user["email"].as_str().is_none()); // email should not be present - assert!(read_user["payout_data"].as_object().is_none()); // payout should not be present - - // Email reading - println!("Testing email reading..."); - let read_email = Scopes::USER_READ | Scopes::USER_READ_EMAIL; - let request_generator = || test::TestRequest::get().uri("/v2/user"); - let (_, read_email_test) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_email), - read_email, - USER_USER_ID, - 401, - ) - .await; - assert_eq!(read_email_test["email"], json!("user@modrinth.com")); // email should be present - - // Payout reading - println!("Testing payout reading..."); - let read_payout = Scopes::USER_READ | Scopes::PAYOUTS_READ; - let request_generator = || test::TestRequest::get().uri("/v2/user"); - let (_, read_payout_test) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_payout), - read_payout, - USER_USER_ID, - 401, - ) - .await; - assert!(read_payout_test["payout_data"].as_object().is_some()); // payout should be present - - // User writing - // We use the Admin PAT for this test, on the 'user' user - println!("Testing user writing..."); - let write_user = Scopes::USER_WRITE; - let request_generator = || { - test::TestRequest::patch() - .uri("/v2/user/user") - .set_json(json!( { - // Do not include 'username', as to not change the rest of the tests - "name": "NewName", - "bio": "New bio", - "location": "New location", - "role": "admin", - "badges": 5, - // Do not include payout info, different scope - })) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_user), - write_user, - ADMIN_USER_ID, - 401, - ) - .await; - - // User payout info writing - println!("Testing user payout info writing..."); - let failure_write_user_payout = all_scopes_except(Scopes::PAYOUTS_WRITE); // Failure case should include USER_WRITE - let write_user_payout = Scopes::USER_WRITE | Scopes::PAYOUTS_WRITE; - let request_generator = || { - test::TestRequest::patch() - .uri("/v2/user/user") - .set_json(json!( { - "payout_data": { - "payout_wallet": "paypal", - "payout_wallet_type": "email", - "payout_address": "test@modrinth.com" - } - })) - }; - test_scope( - &test_app, - &db, - request_generator, - failure_write_user_payout, - write_user_payout, - USER_USER_ID, - 401, - ) - .await; - - // User deletion - // (The failure is first, and this is the last test for this test function, we can delete it and use the same PAT for both tests) - println!("Testing user deletion..."); - let delete_user = Scopes::USER_DELETE; - let request_generator = || test::TestRequest::delete().uri("/v2/user/enemy"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(delete_user), - delete_user, - ENEMY_USER_ID, - 401, - ) - .await; +pub async fn pat_full_test() { + let test_env = TestEnvironment::new().await; - // Cleanup test db - db.cleanup().await; -} - -// Notifications -#[actix_rt::test] -pub async fn test_notifications_scopes() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; - - // We will invite user 'friend' to project team, and use that as a notification - // Get notifications + // Create a PAT for a full test let req = test::TestRequest::post() - .uri("/v2/team/1c/members") - .append_header(("Authorization", "mrp_patuser")) - .set_json(json!( { - "user_id": "4" // friend + .uri(&"/v2/pat".to_string()) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "scopes": Scopes::COLLECTION_CREATE, // Collection create as an easily tested example + "name": "test_pat_scopes Test", + "expires": Utc::now() + Duration::days(1), })) .to_request(); - let resp = test::call_service(&test_app, req).await; - assert_eq!(resp.status(), 204); - - // Notification get - println!("Testing getting notifications..."); - let read_notifications = Scopes::NOTIFICATION_READ; - let request_generator = || test::TestRequest::get().uri("/v2/user/4/notifications"); - let (_, notifications) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_notifications), - read_notifications, - FRIEND_USER_ID, - 401, - ) - .await; - let notification_id = notifications.as_array().unwrap()[0]["id"].as_str().unwrap(); - - let request_generator = || { - test::TestRequest::get().uri(&format!( - "/v2/notifications?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{notification_id}\"")) - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_notifications), - read_notifications, - FRIEND_USER_ID, - 401, - ) - .await; - - let request_generator = - || test::TestRequest::get().uri(&format!("/v2/notification/{notification_id}")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_notifications), - read_notifications, - FRIEND_USER_ID, - 401, - ) - .await; - - // Notification mark as read - println!("Testing marking notifications as read..."); - let write_notifications = Scopes::NOTIFICATION_WRITE; - let request_generator = || { - test::TestRequest::patch().uri(&format!( - "/v2/notifications?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{notification_id}\"")) - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_notifications), - write_notifications, - FRIEND_USER_ID, - 401, - ) - .await; - let request_generator = - || test::TestRequest::patch().uri(&format!("/v2/notification/{notification_id}")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_notifications), - write_notifications, - FRIEND_USER_ID, - 401, - ) - .await; - - // Notification delete - println!("Testing deleting notifications..."); - let request_generator = - || test::TestRequest::delete().uri(&format!("/v2/notification/{notification_id}")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_notifications), - write_notifications, - FRIEND_USER_ID, - 401, - ) - .await; + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 200); + let success: serde_json::Value = test::read_body_json(resp).await; + let id = success["id"].as_str().unwrap(); + + // Has access token and correct scopes + assert!(success["access_token"].as_str().is_some()); + assert_eq!( + success["scopes"].as_u64().unwrap(), + Scopes::COLLECTION_CREATE.bits() + ); + let access_token = success["access_token"].as_str().unwrap(); - // Mass notification delete - // We invite mod, get the notification ID, and do mass delete using that - let req = test::TestRequest::post() - .uri("/v2/team/1c/members") - .append_header(("Authorization", "mrp_patuser")) - .set_json(json!( { - "user_id": "2" // mod - })) + // Get PAT again + let req = test::TestRequest::get() + .append_header(("Authorization", USER_USER_PAT)) + .uri(&"/v2/pat".to_string()) .to_request(); - let resp = test::call_service(&test_app, req).await; - assert_eq!(resp.status(), 204); - let read_notifications = Scopes::NOTIFICATION_READ; - let request_generator = || test::TestRequest::get().uri("/v2/user/2/notifications"); - let (_, notifications) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_notifications), - read_notifications, - MOD_USER_ID, - 401, - ) - .await; - let notification_id = notifications.as_array().unwrap()[0]["id"].as_str().unwrap(); - - let request_generator = || { - test::TestRequest::delete().uri(&format!( - "/v2/notifications?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{notification_id}\"")) - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_notifications), - write_notifications, - MOD_USER_ID, - 401, - ) - .await; - - // Cleanup test db - db.cleanup().await; -} - -// Project version creation scopes -#[actix_rt::test] -pub async fn test_project_version_create_scopes() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; - - // Create project - println!("Testing creating project..."); - let create_project = Scopes::PROJECT_CREATE; - let json_data = json!( - { - "title": "Test_Add_Project project", - "slug": "demo", - "description": "Example description.", - "body": "Example body.", - "client_side": "required", - "server_side": "optional", - "initial_versions": [{ - "file_parts": ["basic-mod.jar"], - "version_number": "1.2.3", - "version_title": "start", - "dependencies": [], - "game_versions": ["1.20.1"] , - "release_channel": "release", - "loaders": ["fabric"], - "featured": true - }], - "categories": [], - "license_id": "MIT" + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 200); + let success: serde_json::Value = test::read_body_json(resp).await; + + // Ensure access token is NOT returned for any PATs + for pat in success.as_array().unwrap() { + assert!(pat["access_token"].as_str().is_none()); + } + + // Create mock test for using PAT + let mock_pat_test = |token: &str| { + let token = token.to_string(); + async { + let req = test::TestRequest::post() + .uri(&"/v2/collection".to_string()) + .append_header(("Authorization", token)) + .set_json(json!({ + "title": "Test Collection 1", + "description": "Test Collection Description" + })) + .to_request(); + let resp = test_env.call(req).await; + resp.status().as_u16() } - ); - let json_segment = common::actix::MultipartSegment { - name: "data".to_string(), - filename: None, - content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), - }; - let file_segment = common::actix::MultipartSegment { - name: "basic-mod.jar".to_string(), - filename: Some("basic-mod.jar".to_string()), - content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( - include_bytes!("../tests/files/basic-mod.jar").to_vec(), - ), - }; - - let request_generator = || { - test::TestRequest::post() - .uri(&format!("/v2/project")) - .set_multipart(vec![json_segment.clone(), file_segment.clone()]) - }; - let (_, project) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(create_project), - create_project, - USER_USER_ID, - 401, - ) - .await; - let project_id = project["id"].as_str().unwrap(); - - // Add version to project - println!("Testing adding version to project..."); - let create_version = Scopes::VERSION_CREATE; - let json_data = json!( - { - "project_id": project_id, - "file_parts": ["basic-mod-different.jar"], - "version_number": "1.2.3.4", - "version_title": "start", - "dependencies": [], - "game_versions": ["1.20.1"] , - "release_channel": "release", - "loaders": ["fabric"], - "featured": true - } - ); - let json_segment = common::actix::MultipartSegment { - name: "data".to_string(), - filename: None, - content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), }; - let file_segment = common::actix::MultipartSegment { - name: "basic-mod-different.jar".to_string(), - filename: Some("basic-mod.jar".to_string()), - content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( - include_bytes!("../tests/files/basic-mod-different.jar").to_vec(), - ), - }; - - let request_generator = || { - test::TestRequest::post() - .uri(&format!("/v2/version")) - .set_multipart(vec![json_segment.clone(), file_segment.clone()]) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(create_version), - create_version, - USER_USER_ID, - 401, - ) - .await; - - // Cleanup test db - db.cleanup().await; -} -// Project management scopes -#[actix_rt::test] -pub async fn test_project_version_reads_scopes() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; - - // Project reading - // Uses 404 as the expected failure code (or 200 and an empty list for mass reads) - let read_project = Scopes::PROJECT_READ; - let request_generator = || test::TestRequest::get().uri("/v2/project/G9"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 404, - ) - .await; - - let request_generator = || test::TestRequest::get().uri("/v2/project/G9/dependencies"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 404, - ) - .await; - - let request_generator = || { - test::TestRequest::get().uri(&format!( - "/v2/projects?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{}\"", "G9")) - )) - }; - let (failure, success) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 200, - ) - .await; - assert!(failure.as_array().unwrap().is_empty()); - assert!(!success.as_array().unwrap().is_empty()); + assert_eq!(mock_pat_test(access_token).await, 200); - // Team project reading - let request_generator = || test::TestRequest::get().uri("/v2/project/G9/members"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 404, - ) - .await; - - // Get team members - // In this case, as these are public endpoints, logging in only is relevant to showing permissions - // So for our test project (with 1 user, 'user') we will check the permissions before and after having the scope. - let request_generator = || test::TestRequest::get().uri("/v2/team/1c/members"); - let (failure, success) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 200, - ) - .await; - assert!(!failure.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); - assert!(success.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); - - let request_generator = || { - test::TestRequest::get().uri(&format!( - "/v2/teams?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{}\"", "1c")) - )) - }; - let (failure, success) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 200, - ) - .await; - assert!(!failure.as_array().unwrap()[0].as_array().unwrap()[0] - .as_object() - .unwrap()["permissions"] - .is_number()); - assert!(success.as_array().unwrap()[0].as_array().unwrap()[0] - .as_object() - .unwrap()["permissions"] - .is_number()); - - // User project reading - // Test user has two projects, one public and one private - let request_generator = || test::TestRequest::get().uri("/v2/user/3/projects"); - let (failure, success) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 200, - ) - .await; - assert!(failure - .as_array() - .unwrap() - .iter() - .find(|x| x["status"] == "processing") - .is_none()); - assert!(success - .as_array() - .unwrap() - .iter() - .find(|x| x["status"] == "processing") - .is_some()); - - // Project metadata reading - let request_generator = - || test::TestRequest::get().uri("/maven/maven/modrinth/G9/maven-metadata.xml"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project), - read_project, - USER_USER_ID, - 404, - ) - .await; + // Change scopes and test again + let req = test::TestRequest::patch() + .uri(&format!("/v2/pat/{}", id)) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "scopes": 0, + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 204); + assert_eq!(mock_pat_test(access_token).await, 401); // No longer works - // Version reading - // First, set version to hidden (which is when the scope is required to read it) - let read_version = Scopes::VERSION_READ; + // Change scopes back, and set expiry to the past, and test again let req = test::TestRequest::patch() - .uri("/v2/version/Hl") - .append_header(("Authorization", "mrp_patuser")) + .uri(&format!("/v2/pat/{}", id)) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "status": "draft" + "scopes": Scopes::COLLECTION_CREATE, + "expires": Utc::now() + Duration::seconds(1), // expires in 1 second })) .to_request(); - let resp = test::call_service(&test_app, req).await; - assert_eq!(resp.status(), 204); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 204); - let request_generator = || test::TestRequest::get().uri("/v2/version_file/111111111"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_version), - read_version, - USER_USER_ID, - 404, - ) - .await; + // Wait 1 second before testing again for expiry + tokio::time::sleep(Duration::seconds(1).to_std().unwrap()).await; + assert_eq!(mock_pat_test(access_token).await, 401); // No longer works - let request_generator = || test::TestRequest::get().uri("/v2/version_file/111111111/download"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_version), - read_version, - USER_USER_ID, - 404, - ) - .await; + // Change everything back to normal and test again + let req = test::TestRequest::patch() + .uri(&format!("/v2/pat/{}", id)) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "expires": Utc::now() + Duration::days(1), // no longer expired! + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 204); + assert_eq!(mock_pat_test(access_token).await, 200); // Works again - // TODO: it's weird that this is /POST, no? - // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope - // let request_generator = || { - // test::TestRequest::post() - // .uri("/v2/version_file/111111111/update") - // .set_json(json!({})) - // }; - // test_scope(&test_app, &db, request_generator, all_scopes_except(read_version), read_version, USER_USER_ID, 404).await; + // Patching to a bad expiry should fail + let req = test::TestRequest::patch() + .uri(&format!("/v2/pat/{}", id)) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "expires": Utc::now() - Duration::days(1), // Past + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 400); + + // Similar to above with PAT creation, patching to a bad scope should fail + for i in 0..64 { + let scope = Scopes::from_bits_truncate(1 << i); + if !Scopes::ALL.contains(scope) { + continue; + } - // TODO: this shold get, no? with query - let request_generator = || { - test::TestRequest::post() - .uri("/v2/version_files") + let req = test::TestRequest::patch() + .uri(&format!("/v2/pat/{}", id)) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "hashes": ["111111111"] + "scopes": scope.bits(), })) - }; - let (failure, success) = test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_version), - read_version, - USER_USER_ID, - 200, - ) - .await; - assert!(!failure.as_object().unwrap().contains_key("111111111")); - assert!(success.as_object().unwrap().contains_key("111111111")); - - // Update version file - // TODO: weird that this is post - // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope - - // let request_generator = || { - // test::TestRequest::post() - // .uri(&format!("/v2/version_files/update_individual")) - // .set_json(json!({ - // "hashes": [{ - // "hash": "111111111", - // }] - // })) - // }; - // let (failure, success) = test_scope(&test_app, &db, request_generator, all_scopes_except(read_version), read_version, USER_USER_ID, 200).await; - // assert!(!failure.as_object().unwrap().contains_key("111111111")); - // assert!(success.as_object().unwrap().contains_key("111111111")); - - // Update version file - // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope - // let request_generator = || { - // test::TestRequest::post() - // .uri(&format!("/v2/version_files/update")) - // .set_json(json!({ - // "hashes": ["111111111"] - // })) - // }; - // let (failure, success) = test_scope(&test_app, &db, request_generator, all_scopes_except(read_version), read_version, USER_USER_ID, 200).await; - // assert!(!failure.as_object().unwrap().contains_key("111111111")); - // assert!(success.as_object().unwrap().contains_key("111111111")); - - // Both project and version reading - let read_project_and_version = Scopes::PROJECT_READ | Scopes::VERSION_READ; - let request_generator = || test::TestRequest::get().uri("/v2/project/G9/version"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(read_project_and_version), - read_project_and_version, - USER_USER_ID, - 404, - ) - .await; - - // TODO: fails for the same reason as above - // let request_generator = || { - // test::TestRequest::get() - // .uri("/v2/project/G9/version/Hl") - // }; - // test_scope(&test_app, &db, request_generator, all_scopes_except(read_project_and_version), read_project_and_version, USER_USER_ID, 404).await; + .to_request(); + let resp = test_env.call(req).await; + assert_eq!( + resp.status().as_u16(), + if scope.restricted() { 400 } else { 204 } + ); + } + + // Delete PAT + let req = test::TestRequest::delete() + .append_header(("Authorization", USER_USER_PAT)) + .uri(&format!("/v2/pat/{}", id)) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 204); // Cleanup test db - db.cleanup().await; + test_env.cleanup().await; } -// Project writing +// Test illegal PAT setting, both in POST and PATCH #[actix_rt::test] -pub async fn test_project_write_scopes() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; - - // Projects writing - let write_project = Scopes::PROJECT_WRITE; - let request_generator = || { - test::TestRequest::patch() - .uri("/v2/project/G9") - .set_json(json!( - { - "title": "test_project_version_write_scopes Title", - } - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - let request_generator = || { - test::TestRequest::patch() - .uri(&format!( - "/v2/projects?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{}\"", "G9")) - )) - .set_json(json!( - { - "description": "test_project_version_write_scopes Description", - } - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - let request_generator = || { - test::TestRequest::post() - .uri("/v2/project/G8/schedule") // G8 is an *approved* project, so we can schedule it - .set_json(json!( - { - "requested_status": "private", - "time": Utc::now() + Duration::days(1), - } - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; +pub async fn bad_pats() { + let test_env = TestEnvironment::new().await; - // Icons and gallery images - let request_generator = || { - test::TestRequest::patch() - .uri("/v2/project/G9/icon?ext=png") - .set_payload(Bytes::from( - include_bytes!("../tests/files/200x200.png") as &[u8] - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - let request_generator = || test::TestRequest::delete().uri("/v2/project/G9/icon"); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - let request_generator = || { - test::TestRequest::post() - .uri("/v2/project/G9/gallery?ext=png&featured=true") - .set_payload(Bytes::from( - include_bytes!("../tests/files/200x200.png") as &[u8] - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - // Get project, as we need the gallery image url - let request_generator = test::TestRequest::get() - .uri("/v2/project/G9") - .append_header(("Authorization", "mrp_patuser")) + // Creating a PAT with no name should fail + let req = test::TestRequest::post() + .uri(&"/v2/pat".to_string()) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "scopes": Scopes::COLLECTION_CREATE, // Collection create as an easily tested example + "expires": Utc::now() + Duration::days(1), + })) .to_request(); - let resp = test::call_service(&test_app, request_generator).await; - let project: serde_json::Value = test::read_body_json(resp).await; - let gallery_url = project["gallery"][0]["url"].as_str().unwrap(); - - let request_generator = - || test::TestRequest::patch().uri(&format!("/v2/project/G9/gallery?url={gallery_url}")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - let request_generator = - || test::TestRequest::delete().uri(&format!("/v2/project/G9/gallery?url={gallery_url}")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - // Team scopes - add user 'friend' - let request_generator = || { - test::TestRequest::post() - .uri(&format!("/v2/team/1c/members")) + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 400); + + // Name too short or too long should fail + for name in ["n", "this_name_is_too_long".repeat(16).as_str()] { + let req = test::TestRequest::post() + .uri(&"/v2/pat".to_string()) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "user_id": "4" + "name": name, + "scopes": Scopes::COLLECTION_CREATE, // Collection create as an easily tested example + "expires": Utc::now() + Duration::days(1), })) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - // Accept team invite as 'friend' - let request_generator = || test::TestRequest::post().uri(&format!("/v2/team/1c/join")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - FRIEND_USER_ID, - 401, - ) - .await; + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 400); + } - // Patch 'friend' user - let request_generator = || { - test::TestRequest::patch() - .uri(&format!("/v2/team/1c/members/4")) + // Creating a PAT with an expiry in the past should fail + let req = test::TestRequest::post() + .uri(&"/v2/pat".to_string()) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "scopes": Scopes::COLLECTION_CREATE, // Collection create as an easily tested example + "name": "test_pat_scopes Test", + "expires": Utc::now() - Duration::days(1), + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 400); + + // Make a PAT with each scope, with the result varying by whether that scope is restricted + for i in 0..64 { + let scope = Scopes::from_bits_truncate(1 << i); + if !Scopes::ALL.contains(scope) { + continue; + } + let req = test::TestRequest::post() + .uri(&"/v2/pat".to_string()) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "permissions": 1 + "scopes": scope.bits(), + "name": format!("test_pat_scopes Name {}", i), + "expires": Utc::now() + Duration::days(1), })) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - // Transfer ownership to 'friend' - let request_generator = || { - test::TestRequest::patch() - .uri(&format!("/v2/team/1c/owner")) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!( + resp.status().as_u16(), + if scope.restricted() { 400 } else { 200 } + ); + } + + // Create a 'good' PAT for patching + let req = test::TestRequest::post() + .uri(&"/v2/pat".to_string()) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "scopes": Scopes::COLLECTION_CREATE, + "name": "test_pat_scopes Test", + "expires": Utc::now() + Duration::days(1), + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 200); + let success: serde_json::Value = test::read_body_json(resp).await; + let id = success["id"].as_str().unwrap(); + + // Patching to a bad name should fail + for name in ["n", "this_name_is_too_long".repeat(16).as_str()] { + let req = test::TestRequest::post() + .uri(&"/v2/pat".to_string()) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "user_id": "4" + "name": name, })) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - USER_USER_ID, - 401, - ) - .await; - - // Now as 'friend', delete 'user' - let request_generator = || test::TestRequest::delete().uri(&format!("/v2/team/1c/members/3")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_project), - write_project, - FRIEND_USER_ID, - 401, - ) - .await; - - // Delete project - // TODO: this route is currently broken, - // because the Project::get_id contained within Project::remove doesnt include hidden versions, meaning that if there - // is a hidden version, it will fail to delete the project (with a 500 error, as the versions of a project are not all deleted) - // let delete_version = Scopes::PROJECT_DELETE; - // let request_generator = || { - // test::TestRequest::delete() - // .uri(&format!("/v2/project/G9")) - // }; - // test_scope(&test_app, &db, request_generator, all_scopes_except(delete_version), delete_version, USER_USER_ID, 401).await; - - // Cleanup test db - db.cleanup().await; -} - -// Version write -#[actix_rt::test] -pub async fn test_version_write_scopes() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 400); + } - let write_version = Scopes::VERSION_WRITE; - - // Schedule version - let request_generator = || { - test::TestRequest::post() - .uri("/v2/version/Hk/schedule") // Hk is an *approved* version, so we can schedule it - .set_json(json!( - { - "requested_status": "archived", - "time": Utc::now() + Duration::days(1), - } - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_version), - write_version, - USER_USER_ID, - 401, - ) - .await; - - // Patch version - let request_generator = || { - test::TestRequest::patch() - .uri("/v2/version/Hk") - .set_json(json!( - { - "version_title": "test_version_write_scopes Title", - } - )) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_version), - write_version, - USER_USER_ID, - 401, - ) - .await; - - // Generate test project data. - // Basic json - let json_segment = common::actix::MultipartSegment { - name: "data".to_string(), - filename: None, - content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text( - serde_json::to_string(&json!( - { - "file_types": { - "simple-zip.zip": "required-resource-pack" - }, - } - )) - .unwrap(), - ), - }; - - // Differently named file, with different content - let content_segment = common::actix::MultipartSegment { - name: "simple-zip.zip".to_string(), - filename: Some("simple-zip.zip".to_string()), - content_type: Some("application/zip".to_string()), - data: common::actix::MultipartSegmentData::Binary( - include_bytes!("../tests/files/simple-zip.zip").to_vec(), - ), - }; - - // Upload version file - let request_generator = || { - test::TestRequest::post() - .uri(&format!("/v2/version/Hk/file")) - .set_multipart(vec![json_segment.clone(), content_segment.clone()]) - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_version), - write_version, - USER_USER_ID, - 401, - ) - .await; - - // Delete version file - // TODO: should this be VERSION_DELETE? - let request_generator = || { - test::TestRequest::delete().uri(&format!("/v2/version_file/000000000")) // Delete from Hk, as we uploaded to Hk, and it needs another file - }; - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(write_version), - write_version, - USER_USER_ID, - 401, - ) - .await; - - // Delete version - let delete_version = Scopes::VERSION_DELETE; - let request_generator = || test::TestRequest::delete().uri(&format!("/v2/version/Hk")); - test_scope( - &test_app, - &db, - request_generator, - all_scopes_except(delete_version), - delete_version, - USER_USER_ID, - 401, - ) - .await; - - // Cleanup test db - db.cleanup().await; -} - -// Report scopes - -// Thread scopes - -// Session scopes - -// Analytics scopes - -// Collection scopes - -// User authentication - -// Pat scopes - -// Organization scopes - -// Some hash/version files functions - -// Meta pat stuff - -#[actix_rt::test] -pub async fn test_user_auth_scopes() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; - - // TODO: Test user auth scopes - - // Cleanup test db - db.cleanup().await; -} - -// A reusable test that works for any scope test that: -// - returns a known 'expected_failure_code' if the scope is not present (probably 401) -// - returns a 200-299 if the scope is present -// - returns the failure and success bodies for requests that are 209 -// Some tests (ie: USER_READ_EMAIL) will still need to have additional checks (ie: email is present/absent) because it doesn't affect the response code -// test_app is the generated test app from init_service -// Closure generates a TestRequest. The authorization header (if any) will be overwritten by the generated PAT -async fn test_scope( - test_app: &impl actix_web::dev::Service< - actix_http::Request, - Response = ServiceResponse, - Error = actix_web::Error, - >, - db: &TemporaryDatabase, - request_generator: T, - failure_scopes: Scopes, - success_scopes: Scopes, - user_id: i64, - expected_failure_code: u16, -) -> (serde_json::Value, serde_json::Value) -where - T: Fn() -> TestRequest, -{ - // First, create a PAT with all OTHER scopes - let access_token_all_others = create_test_pat(failure_scopes, user_id, &db).await; - - // Create a PAT with the given scopes - let access_token = create_test_pat(success_scopes, user_id, &db).await; - - // Perform test twice, once with each PAT - // the first time, we expect a 401 - // the second time, we expect a 200 or 204, and it will return a JSON body of the response - let req = request_generator() - .append_header(("Authorization", access_token_all_others.as_str())) - .to_request(); - let resp = test::call_service(&test_app, req).await; - - assert_eq!(expected_failure_code, resp.status().as_u16()); - let failure_body = if resp.status() == 200 - && resp.headers().contains_key("Content-Type") - && resp.headers().get("Content-Type").unwrap() == "application/json" - { - test::read_body_json(resp).await - } else { - serde_json::Value::Null - }; - - let req = request_generator() - .append_header(("Authorization", access_token.as_str())) + // Patching to a bad expiry should fail + let req = test::TestRequest::patch() + .uri(&format!("/v2/pat/{}", id)) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "expires": Utc::now() - Duration::days(1), // Past + })) .to_request(); - let resp = test::call_service(&test_app, req).await; - println!( - "{}: {}", - resp.status().as_u16(), - resp.status().canonical_reason().unwrap() - ); - assert!(resp.status().is_success() || resp.status().is_redirection()); - let success_body = if resp.status() == 200 - && resp.headers().contains_key("Content-Type") - && resp.headers().get("Content-Type").unwrap() == "application/json" - { - test::read_body_json(resp).await - } else { - serde_json::Value::Null - }; - (failure_body, success_body) -} + let resp = test_env.call(req).await; + assert_eq!(resp.status().as_u16(), 400); + + // Similar to above with PAT creation, patching to a bad scope should fail + for i in 0..64 { + let scope = Scopes::from_bits_truncate(1 << i); + if !Scopes::ALL.contains(scope) { + continue; + } -// Creates a PAT with the given scopes, and returns the access token -// this allows us to make PATs with scopes that are not allowed to be created by PATs -async fn create_test_pat(scopes: Scopes, user_id: i64, db: &TemporaryDatabase) -> String { - let mut transaction = db.pool.begin().await.unwrap(); - let id = generate_pat_id(&mut transaction).await.unwrap(); - let pat = database::models::pat_item::PersonalAccessToken { - id, - name: format!("test_pat_{}", scopes.bits()), - access_token: format!("mrp_{}", id.0), - scopes, - user_id: database::models::ids::UserId(user_id), - created: Utc::now(), - expires: Utc::now() + chrono::Duration::days(1), - last_used: None, - }; - pat.insert(&mut transaction).await.unwrap(); - transaction.commit().await.unwrap(); - pat.access_token -} + let req = test::TestRequest::patch() + .uri(&format!("/v2/pat/{}", id)) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "scopes": scope.bits(), + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!( + resp.status().as_u16(), + if scope.restricted() { 400 } else { 204 } + ); + } -// Inversion of scopes for testing -// ie: To ensure that ONLY this scope is required, we need to create a PAT with all other scopes -fn all_scopes_except(success_scopes: Scopes) -> Scopes { - Scopes::ALL ^ success_scopes + // Cleanup test db + test_env.cleanup().await; } diff --git a/tests/project.rs b/tests/project.rs index 55157ed0..4050e04e 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -1,9 +1,10 @@ -use actix_web::{test, App}; -use common::database::TemporaryDatabase; +use actix_web::test; use labrinth::database::models::project_item::{PROJECTS_NAMESPACE, PROJECTS_SLUGS_NAMESPACE}; use serde_json::json; -use crate::common::{actix::AppendsMultipart, setup}; +use crate::common::database::*; + +use crate::common::{actix::AppendsMultipart, environment::TestEnvironment}; // importing common module. mod common; @@ -11,13 +12,10 @@ mod common; #[actix_rt::test] async fn test_get_project() { // Test setup and dummy data - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; + let test_env = TestEnvironment::new().await; // Cache should default to unpopulated - assert!(db + assert!(test_env.db .redis_pool .get::(PROJECTS_NAMESPACE, 1000) .await @@ -25,33 +23,31 @@ async fn test_get_project() { .is_none()); // Perform request on dummy data - println!("Sending request"); let req = test::TestRequest::get() - .uri("/v2/project/G8") - .append_header(("Authorization", "mrp_patuser")) + .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; let status = resp.status(); let body: serde_json::Value = test::read_body_json(resp).await; assert_eq!(status, 200); - assert_eq!(body["id"], json!("G8")); + assert_eq!(body["id"], json!(PROJECT_ALPHA_PROJECT_ID)); assert_eq!(body["slug"], json!("testslug")); let versions = body["versions"].as_array().unwrap(); - assert!(versions.len() > 0); - assert_eq!(versions[0], json!("Hk")); + assert!(!versions.is_empty()); + assert_eq!(versions[0], json!(PROJECT_ALPHA_VERSION_ID)); // Confirm that the request was cached - println!("Confirming cache"); assert_eq!( - db.redis_pool + test_env.db.redis_pool .get::(PROJECTS_SLUGS_NAMESPACE, "testslug") .await .unwrap(), Some(1000) ); - let cached_project = db + let cached_project = test_env.db .redis_pool .get::(PROJECTS_NAMESPACE, 1000) .await @@ -62,48 +58,43 @@ async fn test_get_project() { // Make the request again, this time it should be cached let req = test::TestRequest::get() - .uri("/v2/project/G8") - .append_header(("Authorization", "mrp_patuser")) + .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; let status = resp.status(); assert_eq!(status, 200); let body: serde_json::Value = test::read_body_json(resp).await; - assert_eq!(body["id"], json!("G8")); + assert_eq!(body["id"], json!(PROJECT_ALPHA_PROJECT_ID)); assert_eq!(body["slug"], json!("testslug")); // Request should fail on non-existent project - println!("Requesting non-existent project"); let req = test::TestRequest::get() .uri("/v2/project/nonexistent") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 404); // Similarly, request should fail on non-authorized user, on a yet-to-be-approved or hidden project, with a 404 (hiding the existence of the project) - println!("Requesting project as non-authorized user"); let req = test::TestRequest::get() - .uri("/v2/project/G9") - .append_header(("Authorization", "mrp_patenemy")) + .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .append_header(("Authorization", ENEMY_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 404); // Cleanup test db - db.cleanup().await; + test_env.cleanup().await; } #[actix_rt::test] async fn test_add_remove_project() { // Test setup and dummy data - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; + let test_env = TestEnvironment::new().await; // Generate test project data. let mut json_data = json!( @@ -185,10 +176,10 @@ async fn test_add_remove_project() { // Add a project- simple, should work. let req = test::TestRequest::post() .uri("/v2/project") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_multipart(vec![json_segment.clone(), file_segment.clone()]) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; let status = resp.status(); assert_eq!(status, 200); @@ -196,10 +187,10 @@ async fn test_add_remove_project() { // Get the project we just made, and confirm that it's correct let req = test::TestRequest::get() .uri("/v2/project/demo") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 200); let body: serde_json::Value = test::read_body_json(resp).await; @@ -208,15 +199,15 @@ async fn test_add_remove_project() { let uploaded_version_id = &versions[0]; // Checks files to ensure they were uploaded and correctly identify the file - let hash = sha1::Sha1::from(include_bytes!("../tests/files/basic-mod.jar").to_vec()) + let hash = sha1::Sha1::from(include_bytes!("../tests/files/basic-mod.jar")) .digest() .to_string(); let req = test::TestRequest::get() .uri(&format!("/v2/version_file/{hash}?algorithm=sha1")) - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 200); let body: serde_json::Value = test::read_body_json(resp).await; @@ -227,54 +218,48 @@ async fn test_add_remove_project() { // Even if that file is named differently let req = test::TestRequest::post() .uri("/v2/project") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_multipart(vec![ json_diff_slug_file_segment.clone(), // Different slug, different file name file_diff_name_segment.clone(), // Different file name, same content ]) .to_request(); - let resp = test::call_service(&test_app, req).await; - println!("Different slug, same file: {:?}", resp.response().body()); + let resp = test_env.call(req).await; assert_eq!(resp.status(), 400); // Reusing with the same slug and a different file should fail let req = test::TestRequest::post() .uri("/v2/project") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_multipart(vec![ json_diff_file_segment.clone(), // Same slug, different file name file_diff_name_content_segment.clone(), // Different file name, different content ]) .to_request(); - let resp = test::call_service(&test_app, req).await; - println!("Same slug, different file: {:?}", resp.response().body()); + let resp = test_env.call(req).await; assert_eq!(resp.status(), 400); // Different slug, different file should succeed let req = test::TestRequest::post() .uri("/v2/project") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_multipart(vec![ json_diff_slug_file_segment.clone(), // Different slug, different file name file_diff_name_content_segment.clone(), // Different file name, same content ]) .to_request(); - let resp = test::call_service(&test_app, req).await; - println!( - "Different slug, different file: {:?}", - resp.response().body() - ); + let resp = test_env.call(req).await; assert_eq!(resp.status(), 200); // Get let req = test::TestRequest::get() .uri("/v2/project/demo") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 200); let body: serde_json::Value = test::read_body_json(resp).await; let id = body["id"].to_string(); @@ -282,21 +267,21 @@ async fn test_add_remove_project() { // Remove the project let req = test::TestRequest::delete() .uri("/v2/project/demo") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 204); // Confirm that the project is gone from the cache assert_eq!( - db.redis_pool + test_env.db.redis_pool .get::(PROJECTS_SLUGS_NAMESPACE, "demo") .await .unwrap(), None ); assert_eq!( - db.redis_pool + test_env.db.redis_pool .get::(PROJECTS_SLUGS_NAMESPACE, id) .await .unwrap(), @@ -306,44 +291,41 @@ async fn test_add_remove_project() { // Old slug no longer works let req = test::TestRequest::get() .uri("/v2/project/demo") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 404); // Cleanup test db - db.cleanup().await; + test_env.cleanup().await; } #[actix_rt::test] pub async fn test_patch_project() { - let db = TemporaryDatabase::create_with_dummy().await; - let labrinth_config = setup(&db).await; - let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); - let test_app = test::init_service(app).await; + let test_env = TestEnvironment::new().await; // First, we do some patch requests that should fail. // Failure because the user is not authorized. let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patenemy")) + .append_header(("Authorization", ENEMY_USER_PAT)) .set_json(json!({ "title": "Test_Add_Project project - test 1", })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 401); // Failure because we are setting URL fields to invalid urls. for url_type in ["issues_url", "source_url", "wiki_url", "discord_url"] { let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ url_type: "w.fake.url", })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 400); } @@ -351,12 +333,12 @@ pub async fn test_patch_project() { for req in ["unknown", "processing", "withheld", "scheduled"] { let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "requested_status": req, })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 400); } @@ -364,52 +346,52 @@ pub async fn test_patch_project() { for key in ["moderation_message", "moderation_message_body"] { let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ key: "test", })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 401); // (should work for a mod, though) let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patmoderator")) + .append_header(("Authorization", MOD_USER_PAT)) .set_json(json!({ key: "test", })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 204); } // Failure because the slug is already taken. let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "slug": "testslug2", // the other dummy project has this slug })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 400); // Not allowed to directly set status, as 'testslug2' (the other project) is "processing" and cannot have its status changed like this. let req = test::TestRequest::patch() .uri("/v2/project/testslug2") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "status": "private" })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env .call(req).await; assert_eq!(resp.status(), 401); // Sucessful request to patch many fields. let req = test::TestRequest::patch() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "slug": "newslug", "title": "New successful title", @@ -429,23 +411,23 @@ pub async fn test_patch_project() { }] })) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 204); // Old slug no longer works let req = test::TestRequest::get() .uri("/v2/project/testslug") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 404); // Old slug no longer works let req = test::TestRequest::get() .uri("/v2/project/newslug") - .append_header(("Authorization", "mrp_patuser")) + .append_header(("Authorization", USER_USER_PAT)) .to_request(); - let resp = test::call_service(&test_app, req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 200); let body: serde_json::Value = test::read_body_json(resp).await; @@ -466,8 +448,8 @@ pub async fn test_patch_project() { ); // Cleanup test db - db.cleanup().await; + test_env.cleanup().await; } -// TODO: you are missing a lot of routes on projects here -// TODO: using permissions/scopes, can we SEE projects existence that we are not allowed to? (ie 401 isntead of 404) +// TODO: Missing routes on projects +// TODO: using permissions/scopes, can we SEE projects existence that we are not allowed to? (ie 401 instead of 404) diff --git a/tests/scopes.rs b/tests/scopes.rs new file mode 100644 index 00000000..8ab06e55 --- /dev/null +++ b/tests/scopes.rs @@ -0,0 +1,1001 @@ + +use actix_web::test::{TestRequest, self}; +use bytes::Bytes; +use chrono::{Duration, Utc}; +use common::{actix::AppendsMultipart, database::PROJECT_ALPHA_THREAD_ID}; +use labrinth::models::pats::Scopes; +use serde_json::json; + +use crate::common::{ + database::*, + environment::{TestEnvironment, ScopeTest}, +}; + +// importing common module. +mod common; + +// For each scope, we (using test_scope): +// - create a PAT with a given set of scopes for a function +// - create a PAT with all other scopes for a function +// - test the function with the PAT with the given scopes +// - test the function with the PAT with all other scopes + +// Test for users, emails, and payout scopes (not user auth scope or notifs) +#[actix_rt::test] +async fn user_scopes() { + // Test setup and dummy data + let test_env = TestEnvironment::new().await; + + // User reading + let read_user = Scopes::USER_READ; + let req_gen = || TestRequest::get().uri("/v2/user"); + let (_, success) = ScopeTest::new(&test_env).test(req_gen, read_user).await.unwrap(); + assert!(success["email"].as_str().is_none()); // email should not be present + assert!(success["payout_data"].as_object().is_none()); // payout should not be present + + // Email reading + let read_email = Scopes::USER_READ | Scopes::USER_READ_EMAIL; + let req_gen = || TestRequest::get().uri("/v2/user"); + let (_, success) = ScopeTest::new(&test_env).test(req_gen, read_email).await.unwrap(); + assert_eq!(success["email"], json!("user@modrinth.com")); // email should be present + + // Payout reading + let read_payout = Scopes::USER_READ | Scopes::PAYOUTS_READ; + let req_gen = || TestRequest::get().uri("/v2/user"); + let (_, success) = ScopeTest::new(&test_env).test(req_gen, read_payout).await.unwrap(); + assert!(success["payout_data"].as_object().is_some()); // payout should be present + + // User writing + // We use the Admin PAT for this test, on the 'user' user + let write_user = Scopes::USER_WRITE; + let req_gen = || { + TestRequest::patch() + .uri("/v2/user/user") + .set_json(json!( { + // Do not include 'username', as to not change the rest of the tests + "name": "NewName", + "bio": "New bio", + "location": "New location", + "role": "admin", + "badges": 5, + // Do not include payout info, different scope + })) + }; + ScopeTest::new(&test_env).with_user_id(ADMIN_USER_ID_PARSED).test(req_gen, write_user).await.unwrap(); + + // User payout info writing + let failure_write_user_payout = Scopes::ALL ^ Scopes::PAYOUTS_WRITE; // Failure case should include USER_WRITE + let write_user_payout = Scopes::USER_WRITE | Scopes::PAYOUTS_WRITE; + let req_gen = || { + TestRequest::patch() + .uri("/v2/user/user") + .set_json(json!( { + "payout_data": { + "payout_wallet": "paypal", + "payout_wallet_type": "email", + "payout_address": "test@modrinth.com" + } + })) + }; + ScopeTest::new(&test_env).with_failure_scopes(failure_write_user_payout).test(req_gen, write_user_payout).await.unwrap(); + + // User deletion + // (The failure is first, and this is the last test for this test function, we can delete it and use the same PAT for both tests) + let delete_user = Scopes::USER_DELETE; + let req_gen = || TestRequest::delete().uri("/v2/user/enemy"); + ScopeTest::new(&test_env).with_user_id(ENEMY_USER_ID_PARSED).test(req_gen, delete_user).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Notifications +#[actix_rt::test] +pub async fn notifications_scopes() { + let test_env = TestEnvironment::new().await; + + // We will invite user 'friend' to project team, and use that as a notification + // Get notifications + let req = TestRequest::post() + .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!( { + "user_id": FRIEND_USER_ID // friend + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + + // Notification get + let read_notifications = Scopes::NOTIFICATION_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{FRIEND_USER_ID}/notifications")); + let (_, success) = ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + let notification_id = success.as_array().unwrap()[0]["id"].as_str().unwrap(); + + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/notifications?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{notification_id}\"")) + )) + }; + ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + + let req_gen = || test::TestRequest::get().uri(&format!("/v2/notification/{notification_id}")); + ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + + // Notification mark as read + let write_notifications = Scopes::NOTIFICATION_WRITE; + let req_gen = || { + test::TestRequest::patch().uri(&format!( + "/v2/notifications?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{notification_id}\"")) + )) + }; + ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); + + let req_gen = || test::TestRequest::patch().uri(&format!("/v2/notification/{notification_id}")); + ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); + + // Notification delete + let req_gen = + || test::TestRequest::delete().uri(&format!("/v2/notification/{notification_id}")); + ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); + + // Mass notification delete + // We invite mod, get the notification ID, and do mass delete using that + let req = test::TestRequest::post() + .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!( { + "user_id": MOD_USER_ID // mod + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + let read_notifications = Scopes::NOTIFICATION_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{MOD_USER_ID}/notifications")); + let (_, success) = ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + let notification_id = success.as_array().unwrap()[0]["id"].as_str().unwrap(); + + let req_gen = || { + test::TestRequest::delete().uri(&format!( + "/v2/notifications?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{notification_id}\"")) + )) + }; + ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Project version creation scopes +#[actix_rt::test] +pub async fn project_version_create_scopes() { + let test_env = TestEnvironment::new().await; + + // Create project + let create_project = Scopes::PROJECT_CREATE; + let json_data = json!( + { + "title": "Test_Add_Project project", + "slug": "demo", + "description": "Example description.", + "body": "Example body.", + "client_side": "required", + "server_side": "optional", + "initial_versions": [{ + "file_parts": ["basic-mod.jar"], + "version_number": "1.2.3", + "version_title": "start", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + }], + "categories": [], + "license_id": "MIT" + } + ); + let json_segment = common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + }; + let file_segment = common::actix::MultipartSegment { + name: "basic-mod.jar".to_string(), + filename: Some("basic-mod.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/basic-mod.jar").to_vec(), + ), + }; + + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/project")) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + }; + let (_, success) = ScopeTest::new(&test_env).test(req_gen, create_project).await.unwrap(); + let project_id = success["id"].as_str().unwrap(); + + // Add version to project + let create_version = Scopes::VERSION_CREATE; + let json_data = json!( + { + "project_id": project_id, + "file_parts": ["basic-mod-different.jar"], + "version_number": "1.2.3.4", + "version_title": "start", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + } + ); + let json_segment = common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + }; + let file_segment = common::actix::MultipartSegment { + name: "basic-mod-different.jar".to_string(), + filename: Some("basic-mod.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/basic-mod-different.jar").to_vec(), + ), + }; + + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/version")) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + }; + ScopeTest::new(&test_env).test(req_gen, create_version).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Project management scopes +#[actix_rt::test] +pub async fn project_version_reads_scopes() { + let test_env = TestEnvironment::new().await; + + // Project reading + // Uses 404 as the expected failure code (or 200 and an empty list for mass reads) + let read_project = Scopes::PROJECT_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + + let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/dependencies")); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/projects?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{PROJECT_BETA_PROJECT_ID}\"")) + )) + }; + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); + assert!(failure.as_array().unwrap().is_empty()); + assert!(!success.as_array().unwrap().is_empty()); + + // Team project reading + let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/members")); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + + // Get team members + // In this case, as these are public endpoints, logging in only is relevant to showing permissions + // So for our test project (with 1 user, 'user') we will check the permissions before and after having the scope. + let req_gen = || test::TestRequest::get().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")); + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); + assert!(!failure.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); + assert!(success.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); + + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/teams?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{PROJECT_ALPHA_TEAM_ID}\"")) + )) + }; + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); + assert!(!failure.as_array().unwrap()[0].as_array().unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_number()); + assert!(success.as_array().unwrap()[0].as_array().unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_number()); + + // User project reading + // Test user has two projects, one public and one private + let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{USER_USER_ID}/projects")); + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); + assert!(failure + .as_array() + .unwrap() + .iter() + .find(|x| x["status"] == "processing") + .is_none()); + assert!(success + .as_array() + .unwrap() + .iter() + .find(|x| x["status"] == "processing") + .is_some()); + + // Project metadata reading + let req_gen = || test::TestRequest::get().uri(&format!("/maven/maven/modrinth/{PROJECT_BETA_PROJECT_ID}/maven-metadata.xml")); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + + // Version reading + // First, set version to hidden (which is when the scope is required to read it) + let read_version = Scopes::VERSION_READ; + let req = test::TestRequest::patch() + .uri(&format!("/v2/version/{PROJECT_BETA_VERSION_ID}")) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(json!({ + "status": "draft" + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + + let req_gen = || test::TestRequest::get().uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}")); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_version).await.unwrap(); + + let req_gen = || test::TestRequest::get().uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/download")); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_version).await.unwrap(); + + // TODO: Should this be /POST? Looks like /GET + // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope + // let req_gen = || { + // test::TestRequest::post() + // .uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/update")) + // .set_json(json!({})) + // }; + // ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_version).await.unwrap(); + + // TODO: Should this be /POST? Looks like /GET + let req_gen = || { + test::TestRequest::post() + .uri("/v2/version_files") + .set_json(json!({ + "hashes": [PROJECT_BETA_THREAD_FILE_HASH] + })) + }; + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); + assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + + // Update version file + // TODO: Should this be /POST? Looks like /GET + // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope + + // let req_gen = || { + // test::TestRequest::post() + // .uri(&format!("/v2/version_files/update_individual")) + // .set_json(json!({ + // "hashes": [{ + // "hash": PROJECT_BETA_THREAD_FILE_HASH, + // }] + // })) + // }; + // let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); + // assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + // assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + + // Update version file + // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope + // let req_gen = || { + // test::TestRequest::post() + // .uri(&format!("/v2/version_files/update")) + // .set_json(json!({ + // "hashes": [PROJECT_BETA_THREAD_FILE_HASH] + // })) + // }; + // let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); + // assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + // assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + + // Both project and version reading + let read_project_and_version = Scopes::PROJECT_READ | Scopes::VERSION_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version")); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project_and_version).await.unwrap(); + + // TODO: fails for the same reason as above + // let req_gen = || { + // test::TestRequest::get() + // .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version/{PROJECT_BETA_VERSION_ID}")) + // }; + // ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project_and_version).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Project writing +#[actix_rt::test] +pub async fn project_write_scopes() { + let test_env = TestEnvironment::new().await; + + // Projects writing + let write_project = Scopes::PROJECT_WRITE; + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .set_json(json!( + { + "title": "test_project_version_write_scopes Title", + } + )) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + let req_gen = || { + test::TestRequest::patch() + .uri(&format!( + "/v2/projects?ids=[{uri}]", + uri = urlencoding::encode(&format!("\"{PROJECT_BETA_PROJECT_ID}\"")) + )) + .set_json(json!( + { + "description": "test_project_version_write_scopes Description", + } + )) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}/schedule")) // PROJECT_ALPHA_PROJECT_ID is an *approved* project, so we can schedule it + .set_json(json!( + { + "requested_status": "private", + "time": Utc::now() + Duration::days(1), + } + )) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + // Icons and gallery images + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/icon?ext=png")) + .set_payload(Bytes::from( + include_bytes!("../tests/files/200x200.png") as &[u8] + )) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/icon")); + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?ext=png&featured=true")) + .set_payload(Bytes::from( + include_bytes!("../tests/files/200x200.png") as &[u8] + )) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + // Get project, as we need the gallery image url + let req_gen = test::TestRequest::get() + .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req_gen).await; + let project: serde_json::Value = test::read_body_json(resp).await; + let gallery_url = project["gallery"][0]["url"].as_str().unwrap(); + + let req_gen = + || test::TestRequest::patch().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}")); + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + let req_gen = + || test::TestRequest::delete().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}")); + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + // Team scopes - add user 'friend' + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .set_json(json!({ + "user_id": FRIEND_USER_ID + })) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + // Accept team invite as 'friend' + let req_gen = || test::TestRequest::post().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/join")); + ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_project).await.unwrap(); + + // Patch 'friend' user + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{FRIEND_USER_ID}")) + .set_json(json!({ + "permissions": 1 + })) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + // Transfer ownership to 'friend' + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/owner")) + .set_json(json!({ + "user_id": FRIEND_USER_ID + })) + }; + ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + + // Now as 'friend', delete 'user' + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{USER_USER_ID}")); + ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_project).await.unwrap(); + + // Delete project + // TODO: this route is currently broken, + // because the Project::get_id contained within Project::remove doesnt include hidden versions, meaning that if there + // is a hidden version, it will fail to delete the project (with a 500 error, as the versions of a project are not all deleted) + // let delete_version = Scopes::PROJECT_DELETE; + // let req_gen = || { + // test::TestRequest::delete() + // .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + // }; + // ScopeTest::new(&test_env).test(req_gen, delete_version).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Version write +#[actix_rt::test] +pub async fn version_write_scopes() { + let test_env = TestEnvironment::new().await; + + let write_version = Scopes::VERSION_WRITE; + + // Schedule version + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}/schedule")) // PROJECT_ALPHA_VERSION_ID is an *approved* version, so we can schedule it + .set_json(json!( + { + "requested_status": "archived", + "time": Utc::now() + Duration::days(1), + } + )) + }; + ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + + // Patch version + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")) + .set_json(json!( + { + "version_title": "test_version_write_scopes Title", + } + )) + }; + ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + + // Generate test project data. + // Basic json + let json_segment = common::actix::MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: common::actix::MultipartSegmentData::Text( + serde_json::to_string(&json!( + { + "file_types": { + "simple-zip.zip": "required-resource-pack" + }, + } + )) + .unwrap(), + ), + }; + + // Differently named file, with different content + let content_segment = common::actix::MultipartSegment { + name: "simple-zip.zip".to_string(), + filename: Some("simple-zip.zip".to_string()), + content_type: Some("application/zip".to_string()), + data: common::actix::MultipartSegmentData::Binary( + include_bytes!("../tests/files/simple-zip.zip").to_vec(), + ), + }; + + // Upload version file + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}/file")) + .set_multipart(vec![json_segment.clone(), content_segment.clone()]) + }; + ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + + // Delete version file + // TODO: Should this scope be VERSION_DELETE? + let req_gen = || { + test::TestRequest::delete().uri(&format!("/v2/version_file/{PROJECT_ALPHA_THREAD_FILE_HASH}")) // Delete from PROJECT_ALPHA_VERSION_ID, as we uploaded to PROJECT_ALPHA_VERSION_ID and it needs another file + }; + ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + + // Delete version + let delete_version = Scopes::VERSION_DELETE; + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")); + ScopeTest::new(&test_env).test(req_gen, delete_version).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Report scopes +#[actix_rt::test] +pub async fn report_scopes() { + let test_env = TestEnvironment::new().await; + + // Create report + let report_create = Scopes::REPORT_CREATE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/report")) + .set_json(json!({ + "report_type": "copyright", + "item_id": PROJECT_BETA_PROJECT_ID, + "item_type": "project", + "body": "This is a reupload of my mod, ", + })) + }; + ScopeTest::new(&test_env).test(req_gen, report_create).await.unwrap(); + + // Get reports + let report_read = Scopes::REPORT_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/report")); + let (_, success) = ScopeTest::new(&test_env).test(req_gen, report_read).await.unwrap(); + let report_id = success.as_array().unwrap()[0]["id"].as_str().unwrap(); + + let req_gen = || test::TestRequest::get().uri(&format!("/v2/report/{}", report_id)); + ScopeTest::new(&test_env).test(req_gen, report_read).await.unwrap(); + + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/reports?ids=[{}]", + urlencoding::encode(&format!("\"{}\"", report_id)) + )) + }; + ScopeTest::new(&test_env).test(req_gen, report_read).await.unwrap(); + + // Edit report + let report_edit = Scopes::REPORT_WRITE; + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/report/{}", report_id)) + .set_json(json!({ + "body": "This is a reupload of my mod, G8!", + })) + }; + ScopeTest::new(&test_env).test(req_gen, report_edit).await.unwrap(); + + // Delete report + // We use a moderator PAT here, as only moderators can delete reports + let report_delete = Scopes::REPORT_DELETE; + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/report/{}", report_id)); + ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, report_delete).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Thread scopes +#[actix_rt::test] +pub async fn thread_scopes() { + let test_env = TestEnvironment::new().await; + + // Thread read + let thread_read = Scopes::THREAD_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/{PROJECT_ALPHA_THREAD_ID}")); + ScopeTest::new(&test_env).test(req_gen, thread_read).await.unwrap(); + + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/threads?ids=[{}]", + urlencoding::encode(&format!("\"{}\"", "U")) + )) + }; + ScopeTest::new(&test_env).test(req_gen, thread_read).await.unwrap(); + + // Check moderation inbox + // Uses moderator PAT, as only moderators can see the moderation inbox + let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/inbox")); + let (_, success) = ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_read).await.unwrap(); + let thread = success.as_array().unwrap()[0].as_object().unwrap(); + let thread_id = thread["id"].as_str().unwrap(); + + // Moderator 'read' thread + // Uses moderator PAT, as only moderators can see the moderation inbox + let req_gen = || test::TestRequest::post().uri(&format!("/v2/thread/{thread_id}/read")); + ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_read).await.unwrap(); + + // Thread write + let thread_write = Scopes::THREAD_WRITE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/thread/{thread_id}")) + .set_json(json!({ + "body": { + "type": "text", + "body": "test_thread_scopes Body" + } + })) + }; + ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_write).await.unwrap(); + + // Delete that message + // First, get message id + let req_gen = test::TestRequest::get() + .uri(&format!("/v2/thread/{thread_id}")) + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req_gen).await; + let success: serde_json::Value = test::read_body_json(resp).await; + let thread_messages = success.as_object().unwrap()["messages"].as_array().unwrap(); + let thread_message_id = thread_messages[0].as_object().unwrap()["id"] + .as_str() + .unwrap(); + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/message/{thread_message_id}")); + ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_write).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Pat scopes +#[actix_rt::test] +pub async fn pat_scopes() { + let test_env = TestEnvironment::new().await; + + // Pat create + let pat_create = Scopes::PAT_CREATE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/pat")) + .set_json(json!({ + "scopes": 1, + "name": "test_pat_scopes Name", + "expires": Utc::now() + Duration::days(1), + })) + }; + let (_, success) = ScopeTest::new(&test_env).test(req_gen, pat_create).await.unwrap(); + let pat_id = success["id"].as_str().unwrap(); + + // Pat write + let pat_write = Scopes::PAT_WRITE; + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/pat/{pat_id}")) + .set_json(json!({})) + }; + ScopeTest::new(&test_env).test(req_gen, pat_write).await.unwrap(); + + // Pat read + let pat_read = Scopes::PAT_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/pat")); + ScopeTest::new(&test_env).test(req_gen, pat_read).await.unwrap(); + + // Pat delete + let pat_delete = Scopes::PAT_DELETE; + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/pat/{pat_id}")); + ScopeTest::new(&test_env).test(req_gen, pat_delete).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Collection scopes +#[actix_rt::test] +pub async fn collections_scopes() { + let test_env = TestEnvironment::new().await; + + // Create collection + let collection_create = Scopes::COLLECTION_CREATE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/collection")) + .set_json(json!({ + "title": "Test Collection", + "description": "Test Collection Description", + "projects": [PROJECT_ALPHA_PROJECT_ID] + })) + }; + let (_, success) = ScopeTest::new(&test_env).test(req_gen, collection_create).await.unwrap(); + let collection_id = success["id"].as_str().unwrap(); + + // Patch collection + // Collections always initialize to public, so we do patch before Get testing + let collection_write = Scopes::COLLECTION_WRITE; + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/collection/{collection_id}")) + .set_json(json!({ + "title": "Test Collection patch", + "status": "private", + })) + }; + ScopeTest::new(&test_env).test(req_gen, collection_write).await.unwrap(); + + // Read collection + let collection_read = Scopes::COLLECTION_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/collection/{}", collection_id)); + ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, collection_read).await.unwrap(); + + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/collections?ids=[{}]", + urlencoding::encode(&format!("\"{}\"", collection_id)) + )) + }; + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, collection_read).await.unwrap(); + assert_eq!(failure.as_array().unwrap().len(), 0); + assert_eq!(success.as_array().unwrap().len(), 1); + + let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{USER_USER_ID}/collections")); + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, collection_read).await.unwrap(); + assert_eq!(failure.as_array().unwrap().len(), 0); + assert_eq!(success.as_array().unwrap().len(), 1); + + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/collection/{collection_id}/icon?ext=png")) + .set_payload(Bytes::from( + include_bytes!("../tests/files/200x200.png") as &[u8] + )) + }; + ScopeTest::new(&test_env).test(req_gen, collection_write).await.unwrap(); + + let req_gen = + || test::TestRequest::delete().uri(&format!("/v2/collection/{collection_id}/icon")); + ScopeTest::new(&test_env).test(req_gen, collection_write).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// Organization scopes (and a couple PROJECT_WRITE scopes that are only allowed for orgs) +#[actix_rt::test] +pub async fn organization_scopes() { + let test_env = TestEnvironment::new().await; + + // Create organization + let organization_create = Scopes::ORGANIZATION_CREATE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/organization")) + .set_json(json!({ + "title": "TestOrg", + "description": "TestOrg Description", + })) + }; + let (_, success) = ScopeTest::new(&test_env).test(req_gen, organization_create).await.unwrap(); + let organization_id = success["id"].as_str().unwrap(); + + // Patch organization + let organization_edit = Scopes::ORGANIZATION_WRITE; + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/organization/{organization_id}")) + .set_json(json!({ + "description": "TestOrg Patch Description", + })) + }; + ScopeTest::new(&test_env).test(req_gen, organization_edit).await.unwrap(); + + let req_gen = || { + test::TestRequest::patch() + .uri(&format!("/v2/organization/{organization_id}/icon?ext=png")) + .set_payload(Bytes::from( + include_bytes!("../tests/files/200x200.png") as &[u8] + )) + }; + ScopeTest::new(&test_env).test(req_gen, organization_edit).await.unwrap(); + + let req_gen = + || test::TestRequest::delete().uri(&format!("/v2/organization/{organization_id}/icon")); + ScopeTest::new(&test_env).test(req_gen, organization_edit).await.unwrap(); + + // add project + let organization_project_edit = Scopes::PROJECT_WRITE | Scopes::ORGANIZATION_WRITE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/organization/{organization_id}/projects")) + .set_json(json!({ + "project_id": PROJECT_BETA_PROJECT_ID + })) + }; + ScopeTest::new(&test_env).with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE).test(req_gen, organization_project_edit).await.unwrap(); + + // Organization reads + let organization_read = Scopes::ORGANIZATION_READ; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/organization/{organization_id}")); + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, organization_read).await.unwrap(); + assert!( + failure.as_object().unwrap()["members"].as_array().unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_null() + ); + assert!( + !success.as_object().unwrap()["members"].as_array().unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_null() + ); + + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/organizations?ids=[{}]", + urlencoding::encode(&format!("\"{}\"", organization_id)) + )) + }; + + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, organization_read).await.unwrap(); + assert!( + failure.as_array().unwrap()[0].as_object().unwrap()["members"] + .as_array() + .unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_null() + ); + assert!( + !success.as_array().unwrap()[0].as_object().unwrap()["members"] + .as_array() + .unwrap()[0] + .as_object() + .unwrap()["permissions"] + .is_null() + ); + + let organization_project_read = Scopes::PROJECT_READ | Scopes::ORGANIZATION_READ; + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/organization/{organization_id}/projects")); + let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_READ).test(req_gen, organization_project_read).await.unwrap(); + assert!(failure.as_array().unwrap().len() == 0); + assert!(success.as_array().unwrap().len() == 1); + + // remove project (now that we've checked) + let req_gen = || { + test::TestRequest::delete().uri(&format!("/v2/organization/{organization_id}/projects/{PROJECT_BETA_PROJECT_ID}")) + }; + ScopeTest::new(&test_env).with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE).test(req_gen, organization_project_edit).await.unwrap(); + + // Delete organization + let organization_delete = Scopes::ORGANIZATION_DELETE; + let req_gen = + || test::TestRequest::delete().uri(&format!("/v2/organization/{organization_id}")); + ScopeTest::new(&test_env).test(req_gen, organization_delete).await.unwrap(); + + // Cleanup test db + test_env.cleanup().await; +} + +// TODO: Analytics scopes + +// TODO: User authentication, and Session scopes + +// TODO: Some hash/version files functions + +// TODO: Meta pat stuff + +// TODO: Image scopes \ No newline at end of file From bbe6cc94574882608ec08999d402480033ae43ff Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Wed, 4 Oct 2023 17:56:53 -0700 Subject: [PATCH 12/16] fmt, clippy --- src/database/models/organization_item.rs | 51 +- src/database/models/project_item.rs | 2 +- src/database/models/team_item.rs | 5 +- tests/common/database.rs | 38 +- tests/common/environment.rs | 53 +- tests/pats.rs | 18 +- tests/project.rs | 20 +- tests/scopes.rs | 637 +++++++++++++++++------ 8 files changed, 582 insertions(+), 242 deletions(-) diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index 64d0d2ba..9f435d8f 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -1,4 +1,7 @@ -use crate::{models::ids::base62_impl::{parse_base62, to_base62}, database::redis::RedisPool}; +use crate::{ + database::redis::RedisPool, + models::ids::base62_impl::{parse_base62, to_base62}, +}; use super::{ids::*, TeamMember}; use serde::{Deserialize, Serialize}; @@ -117,23 +120,23 @@ impl Organization { organization_ids.append( &mut redis - .multi_get::( - ORGANIZATIONS_TITLES_NAMESPACE, - organization_strings - .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), - ) - .await? - .into_iter() - .flatten() - .collect() + .multi_get::( + ORGANIZATIONS_TITLES_NAMESPACE, + organization_strings + .iter() + .map(|x| x.to_string().to_lowercase()) + .collect(), + ) + .await? + .into_iter() + .flatten() + .collect(), ); if !organization_ids.is_empty() { let organizations = redis - .multi_get::(ORGANIZATIONS_NAMESPACE, organization_ids) - .await?; + .multi_get::(ORGANIZATIONS_NAMESPACE, organization_ids) + .await?; for organization in organizations { if let Some(organization) = @@ -185,13 +188,13 @@ impl Organization { for organization in organizations { redis - .set( - ORGANIZATIONS_NAMESPACE, - organization.id.0, - serde_json::to_string(&organization)?, - None, - ) - .await?; + .set( + ORGANIZATIONS_NAMESPACE, + organization.id.0, + serde_json::to_string(&organization)?, + None, + ) + .await?; redis .set( ORGANIZATIONS_TITLES_NAMESPACE, @@ -316,14 +319,10 @@ impl Organization { title: Option, redis: &RedisPool, ) -> Result<(), super::DatabaseError> { - redis.delete(ORGANIZATIONS_NAMESPACE, id.0).await?; if let Some(title) = title { redis - .delete( - ORGANIZATIONS_TITLES_NAMESPACE, - title.to_lowercase(), - ) + .delete(ORGANIZATIONS_TITLES_NAMESPACE, title.to_lowercase()) .await?; } diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index bebe06af..bd59f124 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -571,7 +571,7 @@ impl Project { .try_filter_map(|e| async { Ok(e.right().map(|m| { let id = m.id; - + QueryProject { inner: Project { id: ProjectId(id), diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index 5bd3b266..31d60b20 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -1,5 +1,8 @@ use super::{ids::*, Organization, Project}; -use crate::{database::redis::RedisPool, models::teams::{OrganizationPermissions, ProjectPermissions}}; +use crate::{ + database::redis::RedisPool, + models::teams::{OrganizationPermissions, ProjectPermissions}, +}; use itertools::Itertools; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; diff --git a/tests/common/database.rs b/tests/common/database.rs index 2be236cd..5236b076 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -22,35 +22,35 @@ pub const FRIEND_USER_ID_PARSED: i64 = 4; pub const ENEMY_USER_ID_PARSED: i64 = 5; // These are full-scoped PATs- as if the user was logged in (including illegal scopes). -pub const ADMIN_USER_PAT : &str = "mrp_patadmin"; -pub const MOD_USER_PAT : &str = "mrp_patmoderator"; -pub const USER_USER_PAT : &str = "mrp_patuser"; -pub const FRIEND_USER_PAT : &str = "mrp_patfriend"; -pub const ENEMY_USER_PAT : &str = "mrp_patenemy"; +pub const ADMIN_USER_PAT: &str = "mrp_patadmin"; +pub const MOD_USER_PAT: &str = "mrp_patmoderator"; +pub const USER_USER_PAT: &str = "mrp_patuser"; +pub const FRIEND_USER_PAT: &str = "mrp_patfriend"; +pub const ENEMY_USER_PAT: &str = "mrp_patenemy"; // There are two test projects. They are both created by user 3 (USER_USER_ID). // They differ only in that 'ALPHA' is a public, approved project, and 'BETA' is a private, project in queue. // The same goes for their corresponding versions- one listed, one draft. -pub const PROJECT_ALPHA_TEAM_ID : &str = "1c"; -pub const PROJECT_BETA_TEAM_ID : &str = "1d"; +pub const PROJECT_ALPHA_TEAM_ID: &str = "1c"; +pub const PROJECT_BETA_TEAM_ID: &str = "1d"; -pub const PROJECT_ALPHA_PROJECT_ID : &str = "G8"; -pub const PROJECT_BETA_PROJECT_ID : &str = "G9"; +pub const PROJECT_ALPHA_PROJECT_ID: &str = "G8"; +pub const PROJECT_BETA_PROJECT_ID: &str = "G9"; -pub const PROJECT_ALPHA_PROJECT_SLUG : &str = "testslug"; -pub const PROJECT_BETA_PROJECT_SLUG : &str = "testslug2"; +pub const PROJECT_ALPHA_PROJECT_SLUG: &str = "testslug"; +pub const PROJECT_BETA_PROJECT_SLUG: &str = "testslug2"; -pub const PROJECT_ALPHA_VERSION_ID : &str = "Hk"; -pub const PROJECT_BETA_VERSION_ID : &str = "Hl"; +pub const PROJECT_ALPHA_VERSION_ID: &str = "Hk"; +pub const PROJECT_BETA_VERSION_ID: &str = "Hl"; // These are threads created alongside the projects. -pub const PROJECT_ALPHA_THREAD_ID : &str = "U"; -pub const PROJECT_BETA_THREAD_ID : &str = "V"; +pub const PROJECT_ALPHA_THREAD_ID: &str = "U"; +pub const PROJECT_BETA_THREAD_ID: &str = "V"; // These are the hashes of the files attached to their versions: they do not reflect a 'real' hash of data. // This can be used for /version_file/ type endpoints which get a project's data from its hash. -pub const PROJECT_ALPHA_THREAD_FILE_HASH : &str = "000000000"; -pub const PROJECT_BETA_THREAD_FILE_HASH : &str = "111111111"; +pub const PROJECT_ALPHA_THREAD_FILE_HASH: &str = "000000000"; +pub const PROJECT_BETA_THREAD_FILE_HASH: &str = "111111111"; pub struct TemporaryDatabase { pub pool: PgPool, @@ -65,7 +65,7 @@ impl TemporaryDatabase { // 3. Runs migrations on the new database // 4. (Optionally, by using create_with_dummy) adds dummy data to the database // If a db is created with create_with_dummy, it must be cleaned up with cleanup. - // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. + // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. pub async fn create() -> Self { let temp_database_name = generate_random_database_name(); println!("Creating temporary database: {}", &temp_database_name); @@ -120,7 +120,7 @@ impl TemporaryDatabase { // Deletes the temporary database // If a temporary db is created, it must be cleaned up with cleanup. - // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. + // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. pub async fn cleanup(mut self) { let database_url = dotenvy::var("DATABASE_URL").expect("No database URL"); self.pool.close().await; diff --git a/tests/common/environment.rs b/tests/common/environment.rs index b01dbb64..fb26f373 100644 --- a/tests/common/environment.rs +++ b/tests/common/environment.rs @@ -1,20 +1,20 @@ #![allow(dead_code)] +use super::database::{TemporaryDatabase, USER_USER_ID_PARSED}; +use crate::common::setup; use actix_web::{ dev::ServiceResponse, test::{self, TestRequest}, App, }; use chrono::Utc; -use super::database::{TemporaryDatabase, USER_USER_ID_PARSED}; use labrinth::{ database::{self, models::generate_pat_id}, models::pats::Scopes, }; -use crate::common::setup; // A complete test environment, with a test actix app and a database. -// Must be called in an #[actix_rt::test] context. It also simulates a +// Must be called in an #[actix_rt::test] context. It also simulates a // temporary sqlx db like #[sqlx::test] would. // Use .call(req) on it directly to make a test call as if test::call_service(req) were being used. pub struct TestEnvironment { @@ -28,7 +28,10 @@ impl TestEnvironment { let labrinth_config = setup(&db).await; let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; - Self { test_app: Box::new(test_app), db } + Self { + test_app: Box::new(test_app), + db, + } } pub async fn cleanup(self) { self.db.cleanup().await; @@ -39,16 +42,29 @@ impl TestEnvironment { } } - trait LocalService { - fn call(&self, req: actix_http::Request) -> std::pin::Pin>>>; + fn call( + &self, + req: actix_http::Request, + ) -> std::pin::Pin< + Box>>, + >; } impl LocalService for S where - S: actix_web::dev::Service, + S: actix_web::dev::Service< + actix_http::Request, + Response = ServiceResponse, + Error = actix_web::Error, + >, S::Future: 'static, { - fn call(&self, req: actix_http::Request) -> std::pin::Pin>>> { + fn call( + &self, + req: actix_http::Request, + ) -> std::pin::Pin< + Box>>, + > { Box::pin(self.call(req)) } } @@ -58,8 +74,7 @@ where // - returns a 200-299 if the scope is present // - returns failure and success JSON bodies for requests that are 200 (for performing non-simple follow-up tests on) // This uses a builder format, so you can chain methods to set the parameters to non-defaults (most will probably be not need to be set). -pub struct ScopeTest<'a> -{ +pub struct ScopeTest<'a> { test_env: &'a TestEnvironment, // Scopes expected to fail on this test. By default, this is all scopes except the success scopes. // (To ensure we have isolated the scope we are testing) @@ -70,8 +85,7 @@ pub struct ScopeTest<'a> expected_failure_code: u16, } -impl<'a> ScopeTest<'a> -{ +impl<'a> ScopeTest<'a> { pub fn new(test_env: &'a TestEnvironment) -> Self { Self { test_env, @@ -107,13 +121,18 @@ impl<'a> ScopeTest<'a> // success_scopes : the scopes that we are testing that should succeed // returns a tuple of (failure_body, success_body) // Should return a String error if on unexpected status code, allowing unwrapping in tests. - pub async fn test(&self, req_gen: T, success_scopes: Scopes) -> Result<(serde_json::Value, serde_json::Value), String> - where T: Fn() -> TestRequest + pub async fn test( + &self, + req_gen: T, + success_scopes: Scopes, + ) -> Result<(serde_json::Value, serde_json::Value), String> + where + T: Fn() -> TestRequest, { - // First, create a PAT with failure scopes let failure_scopes = self.failure_scopes.unwrap_or(Scopes::ALL ^ success_scopes); - let access_token_all_others = create_test_pat(failure_scopes, self.user_id, &self.test_env.db).await; + let access_token_all_others = + create_test_pat(failure_scopes, self.user_id, &self.test_env.db).await; // Create a PAT with the success scopes let access_token = create_test_pat(success_scopes, self.user_id, &self.test_env.db).await; @@ -164,9 +183,7 @@ impl<'a> ScopeTest<'a> serde_json::Value::Null }; Ok((failure_body, success_body)) - } - } // Creates a PAT with the given scopes, and returns the access token diff --git a/tests/pats.rs b/tests/pats.rs index c5f37ab4..6cbcb762 100644 --- a/tests/pats.rs +++ b/tests/pats.rs @@ -22,7 +22,7 @@ pub async fn pat_full_test() { // Create a PAT for a full test let req = test::TestRequest::post() - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "scopes": Scopes::COLLECTION_CREATE, // Collection create as an easily tested example @@ -46,7 +46,7 @@ pub async fn pat_full_test() { // Get PAT again let req = test::TestRequest::get() .append_header(("Authorization", USER_USER_PAT)) - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .to_request(); let resp = test_env.call(req).await; assert_eq!(resp.status().as_u16(), 200); @@ -62,7 +62,7 @@ pub async fn pat_full_test() { let token = token.to_string(); async { let req = test::TestRequest::post() - .uri(&"/v2/collection".to_string()) + .uri("/v2/collection") .append_header(("Authorization", token)) .set_json(json!({ "title": "Test Collection 1", @@ -167,7 +167,7 @@ pub async fn bad_pats() { // Creating a PAT with no name should fail let req = test::TestRequest::post() - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "scopes": Scopes::COLLECTION_CREATE, // Collection create as an easily tested example @@ -180,7 +180,7 @@ pub async fn bad_pats() { // Name too short or too long should fail for name in ["n", "this_name_is_too_long".repeat(16).as_str()] { let req = test::TestRequest::post() - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "name": name, @@ -194,7 +194,7 @@ pub async fn bad_pats() { // Creating a PAT with an expiry in the past should fail let req = test::TestRequest::post() - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "scopes": Scopes::COLLECTION_CREATE, // Collection create as an easily tested example @@ -212,7 +212,7 @@ pub async fn bad_pats() { continue; } let req = test::TestRequest::post() - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "scopes": scope.bits(), @@ -229,7 +229,7 @@ pub async fn bad_pats() { // Create a 'good' PAT for patching let req = test::TestRequest::post() - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "scopes": Scopes::COLLECTION_CREATE, @@ -245,7 +245,7 @@ pub async fn bad_pats() { // Patching to a bad name should fail for name in ["n", "this_name_is_too_long".repeat(16).as_str()] { let req = test::TestRequest::post() - .uri(&"/v2/pat".to_string()) + .uri("/v2/pat") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "name": name, diff --git a/tests/project.rs b/tests/project.rs index 4050e04e..f7a03008 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -15,7 +15,8 @@ async fn test_get_project() { let test_env = TestEnvironment::new().await; // Cache should default to unpopulated - assert!(test_env.db + assert!(test_env + .db .redis_pool .get::(PROJECTS_NAMESPACE, 1000) .await @@ -40,14 +41,17 @@ async fn test_get_project() { // Confirm that the request was cached assert_eq!( - test_env.db.redis_pool + test_env + .db + .redis_pool .get::(PROJECTS_SLUGS_NAMESPACE, "testslug") .await .unwrap(), Some(1000) ); - let cached_project = test_env.db + let cached_project = test_env + .db .redis_pool .get::(PROJECTS_NAMESPACE, 1000) .await @@ -274,14 +278,18 @@ async fn test_add_remove_project() { // Confirm that the project is gone from the cache assert_eq!( - test_env.db.redis_pool + test_env + .db + .redis_pool .get::(PROJECTS_SLUGS_NAMESPACE, "demo") .await .unwrap(), None ); assert_eq!( - test_env.db.redis_pool + test_env + .db + .redis_pool .get::(PROJECTS_SLUGS_NAMESPACE, id) .await .unwrap(), @@ -385,7 +393,7 @@ pub async fn test_patch_project() { "status": "private" })) .to_request(); - let resp = test_env .call(req).await; + let resp = test_env.call(req).await; assert_eq!(resp.status(), 401); // Sucessful request to patch many fields. diff --git a/tests/scopes.rs b/tests/scopes.rs index 8ab06e55..9145461a 100644 --- a/tests/scopes.rs +++ b/tests/scopes.rs @@ -1,5 +1,4 @@ - -use actix_web::test::{TestRequest, self}; +use actix_web::test::{self, TestRequest}; use bytes::Bytes; use chrono::{Duration, Utc}; use common::{actix::AppendsMultipart, database::PROJECT_ALPHA_THREAD_ID}; @@ -8,7 +7,7 @@ use serde_json::json; use crate::common::{ database::*, - environment::{TestEnvironment, ScopeTest}, + environment::{ScopeTest, TestEnvironment}, }; // importing common module. @@ -25,65 +24,82 @@ mod common; async fn user_scopes() { // Test setup and dummy data let test_env = TestEnvironment::new().await; - + // User reading let read_user = Scopes::USER_READ; let req_gen = || TestRequest::get().uri("/v2/user"); - let (_, success) = ScopeTest::new(&test_env).test(req_gen, read_user).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, read_user) + .await + .unwrap(); assert!(success["email"].as_str().is_none()); // email should not be present assert!(success["payout_data"].as_object().is_none()); // payout should not be present // Email reading let read_email = Scopes::USER_READ | Scopes::USER_READ_EMAIL; let req_gen = || TestRequest::get().uri("/v2/user"); - let (_, success) = ScopeTest::new(&test_env).test(req_gen, read_email).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, read_email) + .await + .unwrap(); assert_eq!(success["email"], json!("user@modrinth.com")); // email should be present // Payout reading let read_payout = Scopes::USER_READ | Scopes::PAYOUTS_READ; let req_gen = || TestRequest::get().uri("/v2/user"); - let (_, success) = ScopeTest::new(&test_env).test(req_gen, read_payout).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, read_payout) + .await + .unwrap(); assert!(success["payout_data"].as_object().is_some()); // payout should be present // User writing // We use the Admin PAT for this test, on the 'user' user let write_user = Scopes::USER_WRITE; let req_gen = || { - TestRequest::patch() - .uri("/v2/user/user") - .set_json(json!( { - // Do not include 'username', as to not change the rest of the tests - "name": "NewName", - "bio": "New bio", - "location": "New location", - "role": "admin", - "badges": 5, - // Do not include payout info, different scope - })) + TestRequest::patch().uri("/v2/user/user").set_json(json!( { + // Do not include 'username', as to not change the rest of the tests + "name": "NewName", + "bio": "New bio", + "location": "New location", + "role": "admin", + "badges": 5, + // Do not include payout info, different scope + })) }; - ScopeTest::new(&test_env).with_user_id(ADMIN_USER_ID_PARSED).test(req_gen, write_user).await.unwrap(); - + ScopeTest::new(&test_env) + .with_user_id(ADMIN_USER_ID_PARSED) + .test(req_gen, write_user) + .await + .unwrap(); + // User payout info writing let failure_write_user_payout = Scopes::ALL ^ Scopes::PAYOUTS_WRITE; // Failure case should include USER_WRITE let write_user_payout = Scopes::USER_WRITE | Scopes::PAYOUTS_WRITE; let req_gen = || { - TestRequest::patch() - .uri("/v2/user/user") - .set_json(json!( { - "payout_data": { - "payout_wallet": "paypal", - "payout_wallet_type": "email", - "payout_address": "test@modrinth.com" - } - })) + TestRequest::patch().uri("/v2/user/user").set_json(json!( { + "payout_data": { + "payout_wallet": "paypal", + "payout_wallet_type": "email", + "payout_address": "test@modrinth.com" + } + })) }; - ScopeTest::new(&test_env).with_failure_scopes(failure_write_user_payout).test(req_gen, write_user_payout).await.unwrap(); + ScopeTest::new(&test_env) + .with_failure_scopes(failure_write_user_payout) + .test(req_gen, write_user_payout) + .await + .unwrap(); // User deletion // (The failure is first, and this is the last test for this test function, we can delete it and use the same PAT for both tests) let delete_user = Scopes::USER_DELETE; let req_gen = || TestRequest::delete().uri("/v2/user/enemy"); - ScopeTest::new(&test_env).with_user_id(ENEMY_USER_ID_PARSED).test(req_gen, delete_user).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(ENEMY_USER_ID_PARSED) + .test(req_gen, delete_user) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -108,8 +124,13 @@ pub async fn notifications_scopes() { // Notification get let read_notifications = Scopes::NOTIFICATION_READ; - let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{FRIEND_USER_ID}/notifications")); - let (_, success) = ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/user/{FRIEND_USER_ID}/notifications")); + let (_, success) = ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, read_notifications) + .await + .unwrap(); let notification_id = success.as_array().unwrap()[0]["id"].as_str().unwrap(); let req_gen = || { @@ -118,10 +139,18 @@ pub async fn notifications_scopes() { uri = urlencoding::encode(&format!("\"{notification_id}\"")) )) }; - ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, read_notifications) + .await + .unwrap(); let req_gen = || test::TestRequest::get().uri(&format!("/v2/notification/{notification_id}")); - ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, read_notifications) + .await + .unwrap(); // Notification mark as read let write_notifications = Scopes::NOTIFICATION_WRITE; @@ -131,15 +160,27 @@ pub async fn notifications_scopes() { uri = urlencoding::encode(&format!("\"{notification_id}\"")) )) }; - ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); - + ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, write_notifications) + .await + .unwrap(); + let req_gen = || test::TestRequest::patch().uri(&format!("/v2/notification/{notification_id}")); - ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, write_notifications) + .await + .unwrap(); // Notification delete let req_gen = || test::TestRequest::delete().uri(&format!("/v2/notification/{notification_id}")); - ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, write_notifications) + .await + .unwrap(); // Mass notification delete // We invite mod, get the notification ID, and do mass delete using that @@ -154,7 +195,11 @@ pub async fn notifications_scopes() { assert_eq!(resp.status(), 204); let read_notifications = Scopes::NOTIFICATION_READ; let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{MOD_USER_ID}/notifications")); - let (_, success) = ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, read_notifications).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .with_user_id(MOD_USER_ID_PARSED) + .test(req_gen, read_notifications) + .await + .unwrap(); let notification_id = success.as_array().unwrap()[0]["id"].as_str().unwrap(); let req_gen = || { @@ -163,7 +208,11 @@ pub async fn notifications_scopes() { uri = urlencoding::encode(&format!("\"{notification_id}\"")) )) }; - ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, write_notifications).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(MOD_USER_ID_PARSED) + .test(req_gen, write_notifications) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -173,7 +222,7 @@ pub async fn notifications_scopes() { #[actix_rt::test] pub async fn project_version_create_scopes() { let test_env = TestEnvironment::new().await; - + // Create project let create_project = Scopes::PROJECT_CREATE; let json_data = json!( @@ -215,10 +264,13 @@ pub async fn project_version_create_scopes() { let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/project")) + .uri("/v2/project") .set_multipart(vec![json_segment.clone(), file_segment.clone()]) }; - let (_, success) = ScopeTest::new(&test_env).test(req_gen, create_project).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, create_project) + .await + .unwrap(); let project_id = success["id"].as_str().unwrap(); // Add version to project @@ -253,10 +305,13 @@ pub async fn project_version_create_scopes() { let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/version")) + .uri("/v2/version") .set_multipart(vec![json_segment.clone(), file_segment.clone()]) }; - ScopeTest::new(&test_env).test(req_gen, create_version).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, create_version) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -270,11 +325,24 @@ pub async fn project_version_reads_scopes() { // Project reading // Uses 404 as the expected failure code (or 200 and an empty list for mass reads) let read_project = Scopes::PROJECT_READ; - let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")); + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, read_project) + .await + .unwrap(); - let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/dependencies")); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/project/{PROJECT_BETA_PROJECT_ID}/dependencies" + )) + }; + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, read_project) + .await + .unwrap(); let req_gen = || { test::TestRequest::get().uri(&format!( @@ -282,19 +350,33 @@ pub async fn project_version_reads_scopes() { uri = urlencoding::encode(&format!("\"{PROJECT_BETA_PROJECT_ID}\"")) )) }; - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, read_project) + .await + .unwrap(); assert!(failure.as_array().unwrap().is_empty()); assert!(!success.as_array().unwrap().is_empty()); // Team project reading - let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/members")); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/members")); + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, read_project) + .await + .unwrap(); // Get team members // In this case, as these are public endpoints, logging in only is relevant to showing permissions // So for our test project (with 1 user, 'user') we will check the permissions before and after having the scope. - let req_gen = || test::TestRequest::get().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")); - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, read_project) + .await + .unwrap(); assert!(!failure.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); assert!(success.as_array().unwrap()[0].as_object().unwrap()["permissions"].is_number()); @@ -304,7 +386,11 @@ pub async fn project_version_reads_scopes() { uri = urlencoding::encode(&format!("\"{PROJECT_ALPHA_TEAM_ID}\"")) )) }; - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, read_project) + .await + .unwrap(); assert!(!failure.as_array().unwrap()[0].as_array().unwrap()[0] .as_object() .unwrap()["permissions"] @@ -317,23 +403,33 @@ pub async fn project_version_reads_scopes() { // User project reading // Test user has two projects, one public and one private let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{USER_USER_ID}/projects")); - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_project).await.unwrap(); - assert!(failure + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, read_project) + .await + .unwrap(); + assert!(!failure .as_array() .unwrap() .iter() - .find(|x| x["status"] == "processing") - .is_none()); + .any(|x| x["status"] == "processing")); assert!(success .as_array() .unwrap() .iter() - .find(|x| x["status"] == "processing") - .is_some()); + .any(|x| x["status"] == "processing")); // Project metadata reading - let req_gen = || test::TestRequest::get().uri(&format!("/maven/maven/modrinth/{PROJECT_BETA_PROJECT_ID}/maven-metadata.xml")); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project).await.unwrap(); + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/maven/maven/modrinth/{PROJECT_BETA_PROJECT_ID}/maven-metadata.xml" + )) + }; + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, read_project) + .await + .unwrap(); // Version reading // First, set version to hidden (which is when the scope is required to read it) @@ -348,11 +444,25 @@ pub async fn project_version_reads_scopes() { let resp = test_env.call(req).await; assert_eq!(resp.status(), 204); - let req_gen = || test::TestRequest::get().uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}")); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_version).await.unwrap(); + let req_gen = || { + test::TestRequest::get().uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}")) + }; + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, read_version) + .await + .unwrap(); - let req_gen = || test::TestRequest::get().uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/download")); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_version).await.unwrap(); + let req_gen = || { + test::TestRequest::get().uri(&format!( + "/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/download" + )) + }; + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, read_version) + .await + .unwrap(); // TODO: Should this be /POST? Looks like /GET // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope @@ -371,9 +481,19 @@ pub async fn project_version_reads_scopes() { "hashes": [PROJECT_BETA_THREAD_FILE_HASH] })) }; - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); - assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); - assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, read_version) + .await + .unwrap(); + assert!(!failure + .as_object() + .unwrap() + .contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + assert!(success + .as_object() + .unwrap() + .contains_key(PROJECT_BETA_THREAD_FILE_HASH)); // Update version file // TODO: Should this be /POST? Looks like /GET @@ -407,8 +527,13 @@ pub async fn project_version_reads_scopes() { // Both project and version reading let read_project_and_version = Scopes::PROJECT_READ | Scopes::VERSION_READ; - let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version")); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project_and_version).await.unwrap(); + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version")); + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, read_project_and_version) + .await + .unwrap(); // TODO: fails for the same reason as above // let req_gen = || { @@ -437,7 +562,10 @@ pub async fn project_write_scopes() { } )) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); let req_gen = || { test::TestRequest::patch() @@ -451,7 +579,10 @@ pub async fn project_write_scopes() { } )) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); let req_gen = || { test::TestRequest::post() @@ -463,29 +594,46 @@ pub async fn project_write_scopes() { } )) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); // Icons and gallery images let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/icon?ext=png")) + .uri(&format!( + "/v2/project/{PROJECT_BETA_PROJECT_ID}/icon?ext=png" + )) .set_payload(Bytes::from( include_bytes!("../tests/files/200x200.png") as &[u8] )) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); - let req_gen = || test::TestRequest::delete().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/icon")); - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + let req_gen = + || test::TestRequest::delete().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/icon")); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?ext=png&featured=true")) + .uri(&format!( + "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?ext=png&featured=true" + )) .set_payload(Bytes::from( include_bytes!("../tests/files/200x200.png") as &[u8] )) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); // Get project, as we need the gallery image url let req_gen = test::TestRequest::get() @@ -496,13 +644,25 @@ pub async fn project_write_scopes() { let project: serde_json::Value = test::read_body_json(resp).await; let gallery_url = project["gallery"][0]["url"].as_str().unwrap(); - let req_gen = - || test::TestRequest::patch().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}")); - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + let req_gen = || { + test::TestRequest::patch().uri(&format!( + "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}" + )) + }; + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); - let req_gen = - || test::TestRequest::delete().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}")); - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + let req_gen = || { + test::TestRequest::delete().uri(&format!( + "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}" + )) + }; + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); // Team scopes - add user 'friend' let req_gen = || { @@ -512,21 +672,34 @@ pub async fn project_write_scopes() { "user_id": FRIEND_USER_ID })) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); // Accept team invite as 'friend' - let req_gen = || test::TestRequest::post().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/join")); - ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_project).await.unwrap(); + let req_gen = + || test::TestRequest::post().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/join")); + ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, write_project) + .await + .unwrap(); // Patch 'friend' user let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{FRIEND_USER_ID}")) + .uri(&format!( + "/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{FRIEND_USER_ID}" + )) .set_json(json!({ "permissions": 1 })) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); // Transfer ownership to 'friend' let req_gen = || { @@ -536,11 +709,22 @@ pub async fn project_write_scopes() { "user_id": FRIEND_USER_ID })) }; - ScopeTest::new(&test_env).test(req_gen, write_project).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_project) + .await + .unwrap(); // Now as 'friend', delete 'user' - let req_gen = || test::TestRequest::delete().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{USER_USER_ID}")); - ScopeTest::new(&test_env).with_user_id(FRIEND_USER_ID_PARSED).test(req_gen, write_project).await.unwrap(); + let req_gen = || { + test::TestRequest::delete().uri(&format!( + "/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{USER_USER_ID}" + )) + }; + ScopeTest::new(&test_env) + .with_user_id(FRIEND_USER_ID_PARSED) + .test(req_gen, write_project) + .await + .unwrap(); // Delete project // TODO: this route is currently broken, @@ -575,7 +759,10 @@ pub async fn version_write_scopes() { } )) }; - ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_version) + .await + .unwrap(); // Patch version let req_gen = || { @@ -587,7 +774,10 @@ pub async fn version_write_scopes() { } )) }; - ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_version) + .await + .unwrap(); // Generate test project data. // Basic json @@ -623,19 +813,31 @@ pub async fn version_write_scopes() { .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}/file")) .set_multipart(vec![json_segment.clone(), content_segment.clone()]) }; - ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_version) + .await + .unwrap(); // Delete version file // TODO: Should this scope be VERSION_DELETE? let req_gen = || { - test::TestRequest::delete().uri(&format!("/v2/version_file/{PROJECT_ALPHA_THREAD_FILE_HASH}")) // Delete from PROJECT_ALPHA_VERSION_ID, as we uploaded to PROJECT_ALPHA_VERSION_ID and it needs another file + test::TestRequest::delete().uri(&format!( + "/v2/version_file/{PROJECT_ALPHA_THREAD_FILE_HASH}" + )) // Delete from PROJECT_ALPHA_VERSION_ID, as we uploaded to PROJECT_ALPHA_VERSION_ID and it needs another file }; - ScopeTest::new(&test_env).test(req_gen, write_version).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, write_version) + .await + .unwrap(); // Delete version let delete_version = Scopes::VERSION_DELETE; - let req_gen = || test::TestRequest::delete().uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")); - ScopeTest::new(&test_env).test(req_gen, delete_version).await.unwrap(); + let req_gen = + || test::TestRequest::delete().uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")); + ScopeTest::new(&test_env) + .test(req_gen, delete_version) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -649,25 +851,32 @@ pub async fn report_scopes() { // Create report let report_create = Scopes::REPORT_CREATE; let req_gen = || { - test::TestRequest::post() - .uri(&format!("/v2/report")) - .set_json(json!({ - "report_type": "copyright", - "item_id": PROJECT_BETA_PROJECT_ID, - "item_type": "project", - "body": "This is a reupload of my mod, ", - })) + test::TestRequest::post().uri("/v2/report").set_json(json!({ + "report_type": "copyright", + "item_id": PROJECT_BETA_PROJECT_ID, + "item_type": "project", + "body": "This is a reupload of my mod, ", + })) }; - ScopeTest::new(&test_env).test(req_gen, report_create).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, report_create) + .await + .unwrap(); // Get reports let report_read = Scopes::REPORT_READ; - let req_gen = || test::TestRequest::get().uri(&format!("/v2/report")); - let (_, success) = ScopeTest::new(&test_env).test(req_gen, report_read).await.unwrap(); + let req_gen = || test::TestRequest::get().uri("/v2/report"); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, report_read) + .await + .unwrap(); let report_id = success.as_array().unwrap()[0]["id"].as_str().unwrap(); let req_gen = || test::TestRequest::get().uri(&format!("/v2/report/{}", report_id)); - ScopeTest::new(&test_env).test(req_gen, report_read).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, report_read) + .await + .unwrap(); let req_gen = || { test::TestRequest::get().uri(&format!( @@ -675,7 +884,10 @@ pub async fn report_scopes() { urlencoding::encode(&format!("\"{}\"", report_id)) )) }; - ScopeTest::new(&test_env).test(req_gen, report_read).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, report_read) + .await + .unwrap(); // Edit report let report_edit = Scopes::REPORT_WRITE; @@ -686,13 +898,20 @@ pub async fn report_scopes() { "body": "This is a reupload of my mod, G8!", })) }; - ScopeTest::new(&test_env).test(req_gen, report_edit).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, report_edit) + .await + .unwrap(); // Delete report // We use a moderator PAT here, as only moderators can delete reports let report_delete = Scopes::REPORT_DELETE; let req_gen = || test::TestRequest::delete().uri(&format!("/v2/report/{}", report_id)); - ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, report_delete).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(MOD_USER_ID_PARSED) + .test(req_gen, report_delete) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -706,7 +925,10 @@ pub async fn thread_scopes() { // Thread read let thread_read = Scopes::THREAD_READ; let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/{PROJECT_ALPHA_THREAD_ID}")); - ScopeTest::new(&test_env).test(req_gen, thread_read).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, thread_read) + .await + .unwrap(); let req_gen = || { test::TestRequest::get().uri(&format!( @@ -714,19 +936,30 @@ pub async fn thread_scopes() { urlencoding::encode(&format!("\"{}\"", "U")) )) }; - ScopeTest::new(&test_env).test(req_gen, thread_read).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, thread_read) + .await + .unwrap(); // Check moderation inbox // Uses moderator PAT, as only moderators can see the moderation inbox - let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/inbox")); - let (_, success) = ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_read).await.unwrap(); + let req_gen = || test::TestRequest::get().uri("/v2/thread/inbox"); + let (_, success) = ScopeTest::new(&test_env) + .with_user_id(MOD_USER_ID_PARSED) + .test(req_gen, thread_read) + .await + .unwrap(); let thread = success.as_array().unwrap()[0].as_object().unwrap(); let thread_id = thread["id"].as_str().unwrap(); // Moderator 'read' thread // Uses moderator PAT, as only moderators can see the moderation inbox let req_gen = || test::TestRequest::post().uri(&format!("/v2/thread/{thread_id}/read")); - ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_read).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(MOD_USER_ID_PARSED) + .test(req_gen, thread_read) + .await + .unwrap(); // Thread write let thread_write = Scopes::THREAD_WRITE; @@ -740,7 +973,11 @@ pub async fn thread_scopes() { } })) }; - ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_write).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(MOD_USER_ID_PARSED) + .test(req_gen, thread_write) + .await + .unwrap(); // Delete that message // First, get message id @@ -755,7 +992,11 @@ pub async fn thread_scopes() { .as_str() .unwrap(); let req_gen = || test::TestRequest::delete().uri(&format!("/v2/message/{thread_message_id}")); - ScopeTest::new(&test_env).with_user_id(MOD_USER_ID_PARSED).test(req_gen, thread_write).await.unwrap(); + ScopeTest::new(&test_env) + .with_user_id(MOD_USER_ID_PARSED) + .test(req_gen, thread_write) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -769,15 +1010,16 @@ pub async fn pat_scopes() { // Pat create let pat_create = Scopes::PAT_CREATE; let req_gen = || { - test::TestRequest::post() - .uri(&format!("/v2/pat")) - .set_json(json!({ - "scopes": 1, - "name": "test_pat_scopes Name", - "expires": Utc::now() + Duration::days(1), - })) + test::TestRequest::post().uri("/v2/pat").set_json(json!({ + "scopes": 1, + "name": "test_pat_scopes Name", + "expires": Utc::now() + Duration::days(1), + })) }; - let (_, success) = ScopeTest::new(&test_env).test(req_gen, pat_create).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, pat_create) + .await + .unwrap(); let pat_id = success["id"].as_str().unwrap(); // Pat write @@ -787,17 +1029,26 @@ pub async fn pat_scopes() { .uri(&format!("/v2/pat/{pat_id}")) .set_json(json!({})) }; - ScopeTest::new(&test_env).test(req_gen, pat_write).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, pat_write) + .await + .unwrap(); // Pat read let pat_read = Scopes::PAT_READ; - let req_gen = || test::TestRequest::get().uri(&format!("/v2/pat")); - ScopeTest::new(&test_env).test(req_gen, pat_read).await.unwrap(); + let req_gen = || test::TestRequest::get().uri("/v2/pat"); + ScopeTest::new(&test_env) + .test(req_gen, pat_read) + .await + .unwrap(); // Pat delete let pat_delete = Scopes::PAT_DELETE; let req_gen = || test::TestRequest::delete().uri(&format!("/v2/pat/{pat_id}")); - ScopeTest::new(&test_env).test(req_gen, pat_delete).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, pat_delete) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -812,14 +1063,17 @@ pub async fn collections_scopes() { let collection_create = Scopes::COLLECTION_CREATE; let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/collection")) + .uri("/v2/collection") .set_json(json!({ "title": "Test Collection", "description": "Test Collection Description", "projects": [PROJECT_ALPHA_PROJECT_ID] })) }; - let (_, success) = ScopeTest::new(&test_env).test(req_gen, collection_create).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, collection_create) + .await + .unwrap(); let collection_id = success["id"].as_str().unwrap(); // Patch collection @@ -833,12 +1087,19 @@ pub async fn collections_scopes() { "status": "private", })) }; - ScopeTest::new(&test_env).test(req_gen, collection_write).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, collection_write) + .await + .unwrap(); // Read collection let collection_read = Scopes::COLLECTION_READ; let req_gen = || test::TestRequest::get().uri(&format!("/v2/collection/{}", collection_id)); - ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, collection_read).await.unwrap(); + ScopeTest::new(&test_env) + .with_failure_code(404) + .test(req_gen, collection_read) + .await + .unwrap(); let req_gen = || { test::TestRequest::get().uri(&format!( @@ -846,12 +1107,20 @@ pub async fn collections_scopes() { urlencoding::encode(&format!("\"{}\"", collection_id)) )) }; - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, collection_read).await.unwrap(); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, collection_read) + .await + .unwrap(); assert_eq!(failure.as_array().unwrap().len(), 0); assert_eq!(success.as_array().unwrap().len(), 1); let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{USER_USER_ID}/collections")); - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, collection_read).await.unwrap(); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, collection_read) + .await + .unwrap(); assert_eq!(failure.as_array().unwrap().len(), 0); assert_eq!(success.as_array().unwrap().len(), 1); @@ -862,11 +1131,17 @@ pub async fn collections_scopes() { include_bytes!("../tests/files/200x200.png") as &[u8] )) }; - ScopeTest::new(&test_env).test(req_gen, collection_write).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, collection_write) + .await + .unwrap(); let req_gen = || test::TestRequest::delete().uri(&format!("/v2/collection/{collection_id}/icon")); - ScopeTest::new(&test_env).test(req_gen, collection_write).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, collection_write) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -875,19 +1150,22 @@ pub async fn collections_scopes() { // Organization scopes (and a couple PROJECT_WRITE scopes that are only allowed for orgs) #[actix_rt::test] pub async fn organization_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::new().await; // Create organization let organization_create = Scopes::ORGANIZATION_CREATE; let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/organization")) + .uri("/v2/organization") .set_json(json!({ "title": "TestOrg", "description": "TestOrg Description", })) }; - let (_, success) = ScopeTest::new(&test_env).test(req_gen, organization_create).await.unwrap(); + let (_, success) = ScopeTest::new(&test_env) + .test(req_gen, organization_create) + .await + .unwrap(); let organization_id = success["id"].as_str().unwrap(); // Patch organization @@ -899,7 +1177,10 @@ pub async fn organization_scopes() { "description": "TestOrg Patch Description", })) }; - ScopeTest::new(&test_env).test(req_gen, organization_edit).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, organization_edit) + .await + .unwrap(); let req_gen = || { test::TestRequest::patch() @@ -908,11 +1189,17 @@ pub async fn organization_scopes() { include_bytes!("../tests/files/200x200.png") as &[u8] )) }; - ScopeTest::new(&test_env).test(req_gen, organization_edit).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, organization_edit) + .await + .unwrap(); let req_gen = || test::TestRequest::delete().uri(&format!("/v2/organization/{organization_id}/icon")); - ScopeTest::new(&test_env).test(req_gen, organization_edit).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, organization_edit) + .await + .unwrap(); // add project let organization_project_edit = Scopes::PROJECT_WRITE | Scopes::ORGANIZATION_WRITE; @@ -923,12 +1210,20 @@ pub async fn organization_scopes() { "project_id": PROJECT_BETA_PROJECT_ID })) }; - ScopeTest::new(&test_env).with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE).test(req_gen, organization_project_edit).await.unwrap(); + ScopeTest::new(&test_env) + .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE) + .test(req_gen, organization_project_edit) + .await + .unwrap(); // Organization reads let organization_read = Scopes::ORGANIZATION_READ; let req_gen = || test::TestRequest::get().uri(&format!("/v2/organization/{organization_id}")); - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, organization_read).await.unwrap(); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, organization_read) + .await + .unwrap(); assert!( failure.as_object().unwrap()["members"].as_array().unwrap()[0] .as_object() @@ -948,8 +1243,12 @@ pub async fn organization_scopes() { urlencoding::encode(&format!("\"{}\"", organization_id)) )) }; - - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, organization_read).await.unwrap(); + + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .test(req_gen, organization_read) + .await + .unwrap(); assert!( failure.as_array().unwrap()[0].as_object().unwrap()["members"] .as_array() @@ -970,21 +1269,35 @@ pub async fn organization_scopes() { let organization_project_read = Scopes::PROJECT_READ | Scopes::ORGANIZATION_READ; let req_gen = || test::TestRequest::get().uri(&format!("/v2/organization/{organization_id}/projects")); - let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_READ).test(req_gen, organization_project_read).await.unwrap(); - assert!(failure.as_array().unwrap().len() == 0); - assert!(success.as_array().unwrap().len() == 1); + let (failure, success) = ScopeTest::new(&test_env) + .with_failure_code(200) + .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_READ) + .test(req_gen, organization_project_read) + .await + .unwrap(); + assert!(failure.as_array().unwrap().is_empty()); + assert!(!success.as_array().unwrap().is_empty()); // remove project (now that we've checked) let req_gen = || { - test::TestRequest::delete().uri(&format!("/v2/organization/{organization_id}/projects/{PROJECT_BETA_PROJECT_ID}")) + test::TestRequest::delete().uri(&format!( + "/v2/organization/{organization_id}/projects/{PROJECT_BETA_PROJECT_ID}" + )) }; - ScopeTest::new(&test_env).with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE).test(req_gen, organization_project_edit).await.unwrap(); + ScopeTest::new(&test_env) + .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE) + .test(req_gen, organization_project_edit) + .await + .unwrap(); // Delete organization let organization_delete = Scopes::ORGANIZATION_DELETE; let req_gen = || test::TestRequest::delete().uri(&format!("/v2/organization/{organization_id}")); - ScopeTest::new(&test_env).test(req_gen, organization_delete).await.unwrap(); + ScopeTest::new(&test_env) + .test(req_gen, organization_delete) + .await + .unwrap(); // Cleanup test db test_env.cleanup().await; @@ -998,4 +1311,4 @@ pub async fn organization_scopes() { // TODO: Meta pat stuff -// TODO: Image scopes \ No newline at end of file +// TODO: Image scopes From 87a3c35a9c5f3790ae10703ad755ce8bcf095643 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Wed, 4 Oct 2023 18:16:39 -0700 Subject: [PATCH 13/16] sqlx-data --- sqlx-data.json | 68 +++++++++++++++++++++++++------------------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/sqlx-data.json b/sqlx-data.json index 46c3d051..eabec5b7 100644 --- a/sqlx-data.json +++ b/sqlx-data.json @@ -1,5 +1,26 @@ { "db": "PostgreSQL", + "009bce5eee6ed65d9dc0899a4e24da528507a3f00b7ec997fa9ccdd7599655b1": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + } + }, + "query": "\n SELECT m.id FROM organizations o\n INNER JOIN mods m ON m.organization_id = o.id\n WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL)\n " + }, "010cafcafb6adc25b00e3c81d844736b0245e752a90334c58209d8a02536c800": { "describe": { "columns": [], @@ -3669,6 +3690,19 @@ }, "query": "\n SELECT n.id FROM notifications n\n WHERE n.user_id = $1\n " }, + "7b6b76f383adcbe2afbd2a2e87e66fd2a0d9d05b68b27823c1395e7cc3b8c0a2": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Varchar", + "Int8" + ] + } + }, + "query": "\n UPDATE collections\n SET status = $1\n WHERE (id = $2)\n " + }, "7c0cdacf0898155c94008a96a0b918550df4475b9e3362a926d4d00e001880c1": { "describe": { "columns": [ @@ -3821,19 +3855,6 @@ }, "query": "\n SELECT name FROM side_types\n " }, - "86049f204c9eda5241403d22b5f8ffe13b258ddfffb81a1a9ee8602e21c64723": { - "describe": { - "columns": [], - "nullable": [], - "parameters": { - "Left": [ - "Varchar", - "Int8" - ] - } - }, - "query": "\n UPDATE collections\n SET status = $1\n WHERE (id = $2)\n " - }, "868ee76d507cc9e94cd3c2e44770faff127e2b3c5f49b8100a9a37ac4d7b1f1d": { "describe": { "columns": [], @@ -6131,27 +6152,6 @@ }, "query": "\n UPDATE versions\n SET featured = $1\n WHERE (id = $2)\n " }, - "e60561aeefbc2bed1f77ff4bbca763b5be84bd6bc3eff75ca57e3590be286d45": { - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Int8" - } - ], - "nullable": [ - false - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - } - }, - "query": "\n SELECT m.id FROM organizations o\n LEFT JOIN mods m ON m.id = o.id\n WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL)\n " - }, "e60ea75112db37d3e73812e21b1907716e4762e06aa883af878e3be82e3f87d3": { "describe": { "columns": [ From e4e86ba39a577231f6b9ff3aa321504d6952c564 Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Thu, 5 Oct 2023 17:53:35 -0700 Subject: [PATCH 14/16] revs --- Cargo.toml | 6 +- src/auth/pats.rs | 4 +- src/auth/validate.rs | 4 +- src/database/models/collection_item.rs | 5 +- src/database/models/organization_item.rs | 3 +- src/database/models/pat_item.rs | 18 +- src/database/models/project_item.rs | 29 +-- src/database/models/session_item.rs | 18 +- src/database/models/thread_item.rs | 6 +- src/database/models/user_item.rs | 5 +- src/database/redis.rs | 22 +- src/lib.rs | 120 ++++++++++- src/main.rs | 122 +----------- src/models/pats.rs | 52 ++--- src/routes/v2/collections.rs | 17 +- src/routes/v2/threads.rs | 1 - src/routes/v2/version_file.rs | 7 +- tests/common/database.rs | 33 ++- tests/common/dummy_data.rs | 229 +++++++++++++++++++++ tests/common/environment.rs | 165 ++------------- tests/common/mod.rs | 104 +--------- tests/common/pats.rs | 30 +++ tests/common/scopes.rs | 124 ++++++++++++ tests/files/dummy-project-alpha.jar | Bin 0 -> 680 bytes tests/files/dummy-project-beta.jar | Bin 0 -> 678 bytes tests/files/dummy_data.sql | 80 ++++---- tests/pats.rs | 16 +- tests/project.rs | 68 +++---- tests/scopes.rs | 243 ++++++++++++----------- 29 files changed, 845 insertions(+), 686 deletions(-) create mode 100644 tests/common/dummy_data.rs create mode 100644 tests/common/pats.rs create mode 100644 tests/common/scopes.rs create mode 100644 tests/files/dummy-project-alpha.jar create mode 100644 tests/files/dummy-project-beta.jar diff --git a/Cargo.toml b/Cargo.toml index 55dff9a0..8e464ad5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,7 +18,6 @@ actix-multipart = "0.6.0" actix-cors = "0.6.4" actix-ws = "0.2.5" actix-files = "0.6.2" -actix-http = "3.4.0" tokio = { version = "1.29.1", features = ["sync"] } tokio-stream = "0.1.14" @@ -92,4 +91,7 @@ color-thief = "0.2.2" woothee = "0.13.0" -lettre = "0.10.4" \ No newline at end of file +lettre = "0.10.4" + +[dev-dependencies] +actix-http = "3.4.0" diff --git a/src/auth/pats.rs b/src/auth/pats.rs index 15ff23dc..b8b2d918 100644 --- a/src/auth/pats.rs +++ b/src/auth/pats.rs @@ -81,7 +81,7 @@ pub async fn create_pat( .validate() .map_err(|err| ApiError::InvalidInput(validation_errors_to_string(err, None)))?; - if info.scopes.restricted() { + if info.scopes.is_restricted() { return Err(ApiError::InvalidInput( "Invalid scopes requested!".to_string(), )); @@ -181,7 +181,7 @@ pub async fn edit_pat( let mut transaction = pool.begin().await?; if let Some(scopes) = &info.scopes { - if scopes.restricted() { + if scopes.is_restricted() { return Err(ApiError::InvalidInput( "Invalid scopes requested!".to_string(), )); diff --git a/src/auth/validate.rs b/src/auth/validate.rs index e37d1415..34a0d128 100644 --- a/src/auth/validate.rs +++ b/src/auth/validate.rs @@ -141,7 +141,7 @@ where session_queue.add_session(session.id, metadata).await; } - user.map(|x| (Scopes::ALL, x)) + user.map(|x| (Scopes::all(), x)) } Some(("github", _)) | Some(("gho", _)) | Some(("ghp", _)) => { let user = AuthProvider::GitHub.get_user(token).await?; @@ -154,7 +154,7 @@ where ) .await?; - user.map(|x| (Scopes::NOT_RESTRICTED, x)) + user.map(|x| ((Scopes::all() ^ Scopes::restricted()), x)) } _ => return Err(AuthenticationError::InvalidAuthMethod), }; diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index 4aacfe1c..12ff7838 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -166,10 +166,7 @@ impl Collection { if !collection_ids.is_empty() { let collections = redis - .multi_get::( - COLLECTIONS_NAMESPACE, - collection_ids.iter().map(|x| x.0).collect(), - ) + .multi_get::(COLLECTIONS_NAMESPACE, collection_ids.iter().map(|x| x.0)) .await?; for collection in collections { diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index 9f435d8f..5e910bf6 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -124,8 +124,7 @@ impl Organization { ORGANIZATIONS_TITLES_NAMESPACE, organization_strings .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + .map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index cf458ea0..bfb881a9 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -108,7 +108,7 @@ impl PersonalAccessToken { &mut redis .multi_get::( PATS_TOKENS_NAMESPACE, - pat_strings.iter().map(|x| x.to_string()).collect(), + pat_strings.iter().map(|x| x.to_string()), ) .await? .into_iter() @@ -238,15 +238,13 @@ impl PersonalAccessToken { } for (id, token, user_id) in clear_pats { - if let Some(id) = id { - redis.delete(PATS_NAMESPACE, id.0).await?; - } - if let Some(token) = token { - redis.delete(PATS_TOKENS_NAMESPACE, token).await?; - } - if let Some(user_id) = user_id { - redis.delete(PATS_USERS_NAMESPACE, user_id.0).await?; - } + redis + .delete_many([ + (PATS_NAMESPACE, id.map(|i| i.0.to_string())), + (PATS_TOKENS_NAMESPACE, token), + (PATS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())), + ]) + .await?; } Ok(()) diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index bd59f124..f841f934 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -499,10 +499,7 @@ impl Project { &mut redis .multi_get::( PROJECTS_SLUGS_NAMESPACE, - project_strings - .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + project_strings.iter().map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() @@ -795,16 +792,20 @@ impl Project { clear_dependencies: Option, redis: &RedisPool, ) -> Result<(), DatabaseError> { - redis.delete(PROJECTS_NAMESPACE, id.0).await?; - if let Some(slug) = slug { - redis - .delete(PROJECTS_SLUGS_NAMESPACE, slug.to_lowercase()) - .await?; - } - if clear_dependencies.unwrap_or(false) { - redis.delete(PROJECTS_DEPENDENCIES_NAMESPACE, id.0).await?; - } - + redis + .delete_many([ + (PROJECTS_NAMESPACE, Some(id.0.to_string())), + (PROJECTS_SLUGS_NAMESPACE, slug.map(|x| x.to_lowercase())), + ( + PROJECTS_DEPENDENCIES_NAMESPACE, + if clear_dependencies.unwrap_or(false) { + Some(id.0.to_string()) + } else { + None + }, + ), + ]) + .await?; Ok(()) } } diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index aeb2c849..b14e2dba 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -149,7 +149,7 @@ impl Session { &mut redis .multi_get::( SESSIONS_IDS_NAMESPACE, - session_strings.iter().map(|x| x.to_string()).collect(), + session_strings.iter().map(|x| x.to_string()), ) .await? .into_iter() @@ -288,15 +288,13 @@ impl Session { } for (id, session, user_id) in clear_sessions { - if let Some(id) = id { - redis.delete(SESSIONS_NAMESPACE, id.0).await?; - } - if let Some(session) = session { - redis.delete(SESSIONS_IDS_NAMESPACE, session).await?; - } - if let Some(user_id) = user_id { - redis.delete(SESSIONS_USERS_NAMESPACE, user_id.0).await?; - } + redis + .delete_many([ + (SESSIONS_NAMESPACE, id.map(|i| i.0.to_string())), + (SESSIONS_IDS_NAMESPACE, session), + (SESSIONS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())), + ]) + .await?; } Ok(()) diff --git a/src/database/models/thread_item.rs b/src/database/models/thread_item.rs index 091eece3..c81b2db4 100644 --- a/src/database/models/thread_item.rs +++ b/src/database/models/thread_item.rs @@ -2,7 +2,7 @@ use super::ids::*; use crate::database::models::DatabaseError; use crate::models::threads::{MessageBody, ThreadType}; use chrono::{DateTime, Utc}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; pub struct ThreadBuilder { pub type_: ThreadType, @@ -11,7 +11,7 @@ pub struct ThreadBuilder { pub report_id: Option, } -#[derive(Clone)] +#[derive(Clone, Serialize)] pub struct Thread { pub id: ThreadId, @@ -30,7 +30,7 @@ pub struct ThreadMessageBuilder { pub thread_id: ThreadId, } -#[derive(Deserialize, Clone)] +#[derive(Serialize, Deserialize, Clone)] pub struct ThreadMessage { pub id: ThreadMessageId, pub thread_id: ThreadId, diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 42de0ea0..04cac010 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -153,10 +153,7 @@ impl User { &mut redis .multi_get::( USER_USERNAMES_NAMESPACE, - users_strings - .iter() - .map(|x| x.to_string().to_lowercase()) - .collect(), + users_strings.iter().map(|x| x.to_string().to_lowercase()), ) .await? .into_iter() diff --git a/src/database/redis.rs b/src/database/redis.rs index 941cdcd7..35a17c5f 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -76,7 +76,7 @@ impl RedisPool { pub async fn multi_get( &self, namespace: &str, - ids: Vec, + ids: impl IntoIterator, ) -> Result>, DatabaseError> where T1: Display, @@ -85,7 +85,7 @@ impl RedisPool { let mut redis_connection = self.pool.get().await?; let res = cmd("MGET") .arg( - ids.iter() + ids.into_iter() .map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x)) .collect::>(), ) @@ -107,4 +107,22 @@ impl RedisPool { Ok(()) } + + pub async fn delete_many( + &self, + iter: impl IntoIterator)>, + ) -> Result<(), DatabaseError> +where { + let mut redis_connection = self.pool.get().await?; + + let mut cmd = cmd("DEL"); + for (namespace, id) in iter { + if let Some(id) = id { + cmd.arg(format!("{}_{}:{}", self.meta_namespace, namespace, id)); + } + } + cmd.query_async::<_, ()>(&mut redis_connection).await?; + + Ok(()) + } } diff --git a/src/lib.rs b/src/lib.rs index 26ef471c..01ff0bcd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,7 +16,9 @@ use clickhouse_crate::Client; use util::cors::default_cors; use crate::{ - queue::payouts::process_payout, search::indexing::index_projects, util::env::parse_var, + queue::payouts::process_payout, + search::indexing::index_projects, + util::env::{parse_strings_from_var, parse_var}, }; pub mod auth; @@ -293,3 +295,119 @@ pub fn app_config(cfg: &mut web::ServiceConfig, labrinth_config: LabrinthConfig) .configure(routes::root_config) .default_service(web::get().wrap(default_cors()).to(routes::not_found)); } + +// This is so that env vars not used immediately don't panic at runtime +pub fn check_env_vars() -> bool { + let mut failed = false; + + fn check_var(var: &'static str) -> bool { + let check = parse_var::(var).is_none(); + if check { + warn!( + "Variable `{}` missing in dotenv or not of type `{}`", + var, + std::any::type_name::() + ); + } + check + } + + failed |= check_var::("SITE_URL"); + failed |= check_var::("CDN_URL"); + failed |= check_var::("LABRINTH_ADMIN_KEY"); + failed |= check_var::("RATE_LIMIT_IGNORE_KEY"); + failed |= check_var::("DATABASE_URL"); + failed |= check_var::("MEILISEARCH_ADDR"); + failed |= check_var::("MEILISEARCH_KEY"); + failed |= check_var::("REDIS_URL"); + failed |= check_var::("BIND_ADDR"); + failed |= check_var::("SELF_ADDR"); + + failed |= check_var::("STORAGE_BACKEND"); + + let storage_backend = dotenvy::var("STORAGE_BACKEND").ok(); + match storage_backend.as_deref() { + Some("backblaze") => { + failed |= check_var::("BACKBLAZE_KEY_ID"); + failed |= check_var::("BACKBLAZE_KEY"); + failed |= check_var::("BACKBLAZE_BUCKET_ID"); + } + Some("s3") => { + failed |= check_var::("S3_ACCESS_TOKEN"); + failed |= check_var::("S3_SECRET"); + failed |= check_var::("S3_URL"); + failed |= check_var::("S3_REGION"); + failed |= check_var::("S3_BUCKET_NAME"); + } + Some("local") => { + failed |= check_var::("MOCK_FILE_PATH"); + } + Some(backend) => { + warn!("Variable `STORAGE_BACKEND` contains an invalid value: {}. Expected \"backblaze\", \"s3\", or \"local\".", backend); + failed |= true; + } + _ => { + warn!("Variable `STORAGE_BACKEND` is not set!"); + failed |= true; + } + } + + failed |= check_var::("LOCAL_INDEX_INTERVAL"); + failed |= check_var::("VERSION_INDEX_INTERVAL"); + + if parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").is_none() { + warn!("Variable `WHITELISTED_MODPACK_DOMAINS` missing in dotenv or not a json array of strings"); + failed |= true; + } + + if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { + warn!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings"); + failed |= true; + } + + failed |= check_var::("PAYPAL_API_URL"); + failed |= check_var::("PAYPAL_CLIENT_ID"); + failed |= check_var::("PAYPAL_CLIENT_SECRET"); + + failed |= check_var::("GITHUB_CLIENT_ID"); + failed |= check_var::("GITHUB_CLIENT_SECRET"); + failed |= check_var::("GITLAB_CLIENT_ID"); + failed |= check_var::("GITLAB_CLIENT_SECRET"); + failed |= check_var::("DISCORD_CLIENT_ID"); + failed |= check_var::("DISCORD_CLIENT_SECRET"); + failed |= check_var::("MICROSOFT_CLIENT_ID"); + failed |= check_var::("MICROSOFT_CLIENT_SECRET"); + failed |= check_var::("GOOGLE_CLIENT_ID"); + failed |= check_var::("GOOGLE_CLIENT_SECRET"); + failed |= check_var::("STEAM_API_KEY"); + + failed |= check_var::("TURNSTILE_SECRET"); + + failed |= check_var::("SMTP_USERNAME"); + failed |= check_var::("SMTP_PASSWORD"); + failed |= check_var::("SMTP_HOST"); + + failed |= check_var::("SITE_VERIFY_EMAIL_PATH"); + failed |= check_var::("SITE_RESET_PASSWORD_PATH"); + + failed |= check_var::("BEEHIIV_PUBLICATION_ID"); + failed |= check_var::("BEEHIIV_API_KEY"); + + if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { + warn!( + "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" + ); + failed |= true; + } + + failed |= check_var::("CLICKHOUSE_URL"); + failed |= check_var::("CLICKHOUSE_USER"); + failed |= check_var::("CLICKHOUSE_PASSWORD"); + failed |= check_var::("CLICKHOUSE_DATABASE"); + + failed |= check_var::("MAXMIND_LICENSE_KEY"); + + failed |= check_var::("PAYOUTS_BUDGET"); + + failed +} diff --git a/src/main.rs b/src/main.rs index f25f7c2a..e0d0e0ff 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,9 +5,9 @@ use labrinth::file_hosting::S3Host; use labrinth::ratelimit::errors::ARError; use labrinth::ratelimit::memory::{MemoryStore, MemoryStoreActor}; use labrinth::ratelimit::middleware::RateLimiter; -use labrinth::util::env::{parse_strings_from_var, parse_var}; -use labrinth::{clickhouse, database, file_hosting, queue}; -use log::{error, info, warn}; +use labrinth::util::env::parse_var; +use labrinth::{check_env_vars, clickhouse, database, file_hosting, queue}; +use log::{error, info}; use std::sync::Arc; @@ -130,119 +130,3 @@ async fn main() -> std::io::Result<()> { .run() .await } - -// This is so that env vars not used immediately don't panic at runtime -fn check_env_vars() -> bool { - let mut failed = false; - - fn check_var(var: &'static str) -> bool { - let check = parse_var::(var).is_none(); - if check { - warn!( - "Variable `{}` missing in dotenv or not of type `{}`", - var, - std::any::type_name::() - ); - } - check - } - - failed |= check_var::("SITE_URL"); - failed |= check_var::("CDN_URL"); - failed |= check_var::("LABRINTH_ADMIN_KEY"); - failed |= check_var::("RATE_LIMIT_IGNORE_KEY"); - failed |= check_var::("DATABASE_URL"); - failed |= check_var::("MEILISEARCH_ADDR"); - failed |= check_var::("MEILISEARCH_KEY"); - failed |= check_var::("REDIS_URL"); - failed |= check_var::("BIND_ADDR"); - failed |= check_var::("SELF_ADDR"); - - failed |= check_var::("STORAGE_BACKEND"); - - let storage_backend = dotenvy::var("STORAGE_BACKEND").ok(); - match storage_backend.as_deref() { - Some("backblaze") => { - failed |= check_var::("BACKBLAZE_KEY_ID"); - failed |= check_var::("BACKBLAZE_KEY"); - failed |= check_var::("BACKBLAZE_BUCKET_ID"); - } - Some("s3") => { - failed |= check_var::("S3_ACCESS_TOKEN"); - failed |= check_var::("S3_SECRET"); - failed |= check_var::("S3_URL"); - failed |= check_var::("S3_REGION"); - failed |= check_var::("S3_BUCKET_NAME"); - } - Some("local") => { - failed |= check_var::("MOCK_FILE_PATH"); - } - Some(backend) => { - warn!("Variable `STORAGE_BACKEND` contains an invalid value: {}. Expected \"backblaze\", \"s3\", or \"local\".", backend); - failed |= true; - } - _ => { - warn!("Variable `STORAGE_BACKEND` is not set!"); - failed |= true; - } - } - - failed |= check_var::("LOCAL_INDEX_INTERVAL"); - failed |= check_var::("VERSION_INDEX_INTERVAL"); - - if parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").is_none() { - warn!("Variable `WHITELISTED_MODPACK_DOMAINS` missing in dotenv or not a json array of strings"); - failed |= true; - } - - if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { - warn!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings"); - failed |= true; - } - - failed |= check_var::("PAYPAL_API_URL"); - failed |= check_var::("PAYPAL_CLIENT_ID"); - failed |= check_var::("PAYPAL_CLIENT_SECRET"); - - failed |= check_var::("GITHUB_CLIENT_ID"); - failed |= check_var::("GITHUB_CLIENT_SECRET"); - failed |= check_var::("GITLAB_CLIENT_ID"); - failed |= check_var::("GITLAB_CLIENT_SECRET"); - failed |= check_var::("DISCORD_CLIENT_ID"); - failed |= check_var::("DISCORD_CLIENT_SECRET"); - failed |= check_var::("MICROSOFT_CLIENT_ID"); - failed |= check_var::("MICROSOFT_CLIENT_SECRET"); - failed |= check_var::("GOOGLE_CLIENT_ID"); - failed |= check_var::("GOOGLE_CLIENT_SECRET"); - failed |= check_var::("STEAM_API_KEY"); - - failed |= check_var::("TURNSTILE_SECRET"); - - failed |= check_var::("SMTP_USERNAME"); - failed |= check_var::("SMTP_PASSWORD"); - failed |= check_var::("SMTP_HOST"); - - failed |= check_var::("SITE_VERIFY_EMAIL_PATH"); - failed |= check_var::("SITE_RESET_PASSWORD_PATH"); - - failed |= check_var::("BEEHIIV_PUBLICATION_ID"); - failed |= check_var::("BEEHIIV_API_KEY"); - - if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { - warn!( - "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" - ); - failed |= true; - } - - failed |= check_var::("CLICKHOUSE_URL"); - failed |= check_var::("CLICKHOUSE_USER"); - failed |= check_var::("CLICKHOUSE_PASSWORD"); - failed |= check_var::("CLICKHOUSE_DATABASE"); - - failed |= check_var::("MAXMIND_LICENSE_KEY"); - - failed |= check_var::("PAYOUTS_BUDGET"); - - failed -} diff --git a/src/models/pats.rs b/src/models/pats.rs index 44b9ee9c..5d3f65ca 100644 --- a/src/models/pats.rs +++ b/src/models/pats.rs @@ -103,26 +103,26 @@ bitflags::bitflags! { // delete an organization const ORGANIZATION_DELETE = 1 << 38; - const ALL = 0b111111111111111111111111111111111111111; - const NOT_RESTRICTED = 0b1111111110000000111111111111111111100111; const NONE = 0b0; } } impl Scopes { // these scopes cannot be specified in a personal access token - pub fn restricted(&self) -> bool { - self.intersects( - Scopes::PAT_CREATE - | Scopes::PAT_READ - | Scopes::PAT_WRITE - | Scopes::PAT_DELETE - | Scopes::SESSION_READ - | Scopes::SESSION_DELETE - | Scopes::USER_AUTH_WRITE - | Scopes::USER_DELETE - | Scopes::PERFORM_ANALYTICS, - ) + pub fn restricted() -> Scopes { + Scopes::PAT_CREATE + | Scopes::PAT_READ + | Scopes::PAT_WRITE + | Scopes::PAT_DELETE + | Scopes::SESSION_READ + | Scopes::SESSION_DELETE + | Scopes::USER_AUTH_WRITE + | Scopes::USER_DELETE + | Scopes::PERFORM_ANALYTICS + } + + pub fn is_restricted(&self) -> bool { + self.intersects(Self::restricted()) } } @@ -159,27 +159,3 @@ impl PersonalAccessToken { } } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - pub fn pat_sanity() { - assert_eq!(Scopes::NONE, Scopes::empty()); - - // Ensure PATs add up and match - // (Such as NOT_RESTRICTED lining up with is_restricted()) - let mut calculated_not_restricted = Scopes::NONE; - let mut calculated_all = Scopes::NONE; - for i in 0..64 { - let scope = Scopes::from_bits_truncate(1 << i); - if !scope.restricted() { - calculated_not_restricted |= scope; - } - calculated_all |= scope; - } - assert_eq!(Scopes::ALL | Scopes::NOT_RESTRICTED, calculated_all); - assert_eq!(Scopes::NOT_RESTRICTED, calculated_not_restricted); - } -} diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs index 56b658c1..01372b0e 100644 --- a/src/routes/v2/collections.rs +++ b/src/routes/v2/collections.rs @@ -1,6 +1,5 @@ use crate::auth::checks::{filter_authorized_collections, is_authorized_collection}; use crate::auth::get_user_from_headers; -use crate::database; use crate::database::models::{collection_item, generate_collection_id, project_item}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; @@ -12,6 +11,7 @@ use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::util::routes::read_from_payload; use crate::util::validate::validation_errors_to_string; +use crate::{database, models}; use actix_web::web::Data; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use chrono::Utc; @@ -231,7 +231,7 @@ pub async fn collection_edit( let result = database::models::Collection::get(id, &**pool, &redis).await?; if let Some(collection_item) = result { - if collection_item.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection_item, &user) { return Ok(HttpResponse::Unauthorized().body("")); } @@ -371,7 +371,7 @@ pub async fn collection_icon_edit( ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if collection_item.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection_item, &user) { return Ok(HttpResponse::Unauthorized().body("")); } @@ -452,7 +452,7 @@ pub async fn delete_collection_icon( .ok_or_else(|| { ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if collection_item.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection_item, &user) { return Ok(HttpResponse::Unauthorized().body("")); } @@ -510,7 +510,7 @@ pub async fn collection_delete( .ok_or_else(|| { ApiError::InvalidInput("The specified collection does not exist!".to_string()) })?; - if collection.user_id != user.id.into() && !user.role.is_mod() { + if !can_modify_collection(&collection, &user) { return Ok(HttpResponse::Unauthorized().body("")); } let mut transaction = pool.begin().await?; @@ -527,3 +527,10 @@ pub async fn collection_delete( Ok(HttpResponse::NotFound().body("")) } } + +fn can_modify_collection( + collection: &database::models::Collection, + user: &models::users::User, +) -> bool { + collection.user_id == user.id.into() || user.role.is_mod() +} diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index 79930a0b..af2a5782 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -535,7 +535,6 @@ pub async fn moderation_inbox( let threads_data = database::models::Thread::get_many(&ids, &**pool).await?; let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?; - Ok(HttpResponse::Ok().json(threads)) } diff --git a/src/routes/v2/version_file.rs b/src/routes/v2/version_file.rs index 392d5fe3..171788b1 100644 --- a/src/routes/v2/version_file.rs +++ b/src/routes/v2/version_file.rs @@ -23,7 +23,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { .service(get_version_from_hash) .service(download_version) .service(get_update_from_hash) - .service(get_projects_from_hashes), // TODO: confirm this should be added + .service(get_projects_from_hashes), ); cfg.service( @@ -34,7 +34,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { ); } -#[derive(Deserialize)] +#[derive(Serialize, Deserialize)] pub struct HashQuery { #[serde(default = "default_algorithm")] pub algorithm: String, @@ -65,7 +65,6 @@ pub async fn get_version_from_hash( .await .map(|x| x.1) .ok(); - let hash = info.into_inner().0.to_lowercase(); let file = database::models::Version::get_file_from_hash( hash_query.algorithm.clone(), @@ -75,10 +74,8 @@ pub async fn get_version_from_hash( &redis, ) .await?; - if let Some(file) = file { let version = database::models::Version::get(file.version_id, &**pool, &redis).await?; - if let Some(version) = version { if !is_authorized_version(&version.inner, &user_option, &pool).await? { return Ok(HttpResponse::NotFound().body("")); diff --git a/tests/common/database.rs b/tests/common/database.rs index 5236b076..c1208d51 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -31,26 +31,26 @@ pub const ENEMY_USER_PAT: &str = "mrp_patenemy"; // There are two test projects. They are both created by user 3 (USER_USER_ID). // They differ only in that 'ALPHA' is a public, approved project, and 'BETA' is a private, project in queue. // The same goes for their corresponding versions- one listed, one draft. -pub const PROJECT_ALPHA_TEAM_ID: &str = "1c"; -pub const PROJECT_BETA_TEAM_ID: &str = "1d"; +// pub const PROJECT_ALPHA_TEAM_ID: &str = "1c"; +// pub const PROJECT_BETA_TEAM_ID: &str = "1d"; -pub const PROJECT_ALPHA_PROJECT_ID: &str = "G8"; -pub const PROJECT_BETA_PROJECT_ID: &str = "G9"; +// pub const PROJECT_ALPHA_PROJECT_ID: &str = "G8"; +// pub const PROJECT_BETA_PROJECT_ID: &str = "G9"; -pub const PROJECT_ALPHA_PROJECT_SLUG: &str = "testslug"; -pub const PROJECT_BETA_PROJECT_SLUG: &str = "testslug2"; +// pub const PROJECT_ALPHA_PROJECT_SLUG: &str = "testslug"; +// pub const PROJECT_BETA_PROJECT_SLUG: &str = "testslug2"; -pub const PROJECT_ALPHA_VERSION_ID: &str = "Hk"; -pub const PROJECT_BETA_VERSION_ID: &str = "Hl"; +// pub const alpha_version_id: &str = "Hk"; +// pub const beta_version_id: &str = "Hl"; // These are threads created alongside the projects. -pub const PROJECT_ALPHA_THREAD_ID: &str = "U"; -pub const PROJECT_BETA_THREAD_ID: &str = "V"; +// pub const alpha_thread_id: &str = "U"; +// pub const PROJECT_BETA_THREAD_ID: &str = "V"; // These are the hashes of the files attached to their versions: they do not reflect a 'real' hash of data. // This can be used for /version_file/ type endpoints which get a project's data from its hash. -pub const PROJECT_ALPHA_THREAD_FILE_HASH: &str = "000000000"; -pub const PROJECT_BETA_THREAD_FILE_HASH: &str = "111111111"; +// pub const alpha_file_hash: &str = "000000000"; +// pub const beta_file_hash: &str = "111111111"; pub struct TemporaryDatabase { pub pool: PgPool, @@ -112,12 +112,6 @@ impl TemporaryDatabase { } } - pub async fn create_with_dummy() -> Self { - let db = Self::create().await; - db.add_dummy_data().await; - db - } - // Deletes the temporary database // If a temporary db is created, it must be cleaned up with cleanup. // This means that dbs will only 'remain' if a test fails (for examination of the db), and will be cleaned up otherwise. @@ -149,10 +143,13 @@ impl TemporaryDatabase { } pub async fn add_dummy_data(&self) { + // Adds basic dummy data to the database directly with sql (user, pats) let pool = &self.pool.clone(); pool.execute(include_str!("../files/dummy_data.sql")) .await .unwrap(); + + // Adds dummy data to the database with sqlx (projects, versions, threads) } } diff --git a/tests/common/dummy_data.rs b/tests/common/dummy_data.rs new file mode 100644 index 00000000..d3cd9667 --- /dev/null +++ b/tests/common/dummy_data.rs @@ -0,0 +1,229 @@ +use actix_web::test::{self, TestRequest}; +use labrinth::{models::projects::Project, models::projects::Version}; +use serde_json::json; +use sqlx::Executor; + +use crate::common::{ + actix::AppendsMultipart, + database::{MOD_USER_PAT, USER_USER_PAT}, +}; + +use super::{ + actix::{MultipartSegment, MultipartSegmentData}, + environment::TestEnvironment, +}; + +pub struct DummyData { + pub alpha_team_id: String, + pub beta_team_id: String, + + pub alpha_project_id: String, + pub beta_project_id: String, + + pub alpha_project_slug: String, + pub beta_project_slug: String, + + pub alpha_version_id: String, + pub beta_version_id: String, + + pub alpha_thread_id: String, + pub beta_thread_id: String, + + pub alpha_file_hash: String, + pub beta_file_hash: String, +} + +pub async fn add_dummy_data(test_env: &TestEnvironment) -> DummyData { + // Adds basic dummy data to the database directly with sql (user, pats) + let pool = &test_env.db.pool.clone(); + pool.execute(include_str!("../files/dummy_data.sql")) + .await + .unwrap(); + + let (alpha_project, alpha_version) = add_project_alpha(test_env).await; + let (beta_project, beta_version) = add_project_beta(test_env).await; + + DummyData { + alpha_team_id: alpha_project.team.to_string(), + beta_team_id: beta_project.team.to_string(), + + alpha_project_id: alpha_project.id.to_string(), + beta_project_id: beta_project.id.to_string(), + + alpha_project_slug: alpha_project.slug.unwrap(), + beta_project_slug: beta_project.slug.unwrap(), + + alpha_version_id: alpha_version.id.to_string(), + beta_version_id: beta_version.id.to_string(), + + alpha_thread_id: alpha_project.thread_id.to_string(), + beta_thread_id: beta_project.thread_id.to_string(), + + alpha_file_hash: alpha_version.files[0].hashes["sha1"].clone(), + beta_file_hash: beta_version.files[0].hashes["sha1"].clone(), + } +} + +pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) { + // Adds dummy data to the database with sqlx (projects, versions, threads) + // Generate test project data. + let json_data = json!( + { + "title": "Test Project Alpha", + "slug": "alpha", + "description": "A dummy project for testing with.", + "body": "This project is approved, and versions are listed.", + "client_side": "required", + "server_side": "optional", + "initial_versions": [{ + "file_parts": ["dummy-project-alpha.jar"], + "version_number": "1.2.3", + "version_title": "start", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + }], + "categories": [], + "license_id": "MIT" + } + ); + + // Basic json + let json_segment = MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + }; + + // Basic file + let file_segment = MultipartSegment { + name: "dummy-project-alpha.jar".to_string(), + filename: Some("dummy-project-alpha.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: MultipartSegmentData::Binary( + include_bytes!("../../tests/files/dummy-project-alpha.jar").to_vec(), + ), + }; + + // Add a project. + let req = TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization", USER_USER_PAT)) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 200); + + // Approve as a moderator. + let req = TestRequest::patch() + .uri("/v2/project/alpha") + .append_header(("Authorization", MOD_USER_PAT)) + .set_json(json!( + { + "status": "approved" + } + )) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + + // Get project + let req = TestRequest::get() + .uri("/v2/project/alpha") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let project: Project = test::read_body_json(resp).await; + + // Get project's versions + let req = TestRequest::get() + .uri("/v2/project/alpha/version") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let versions: Vec = test::read_body_json(resp).await; + let version = versions.into_iter().next().unwrap(); + + (project, version) +} + +pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version) { + // Adds dummy data to the database with sqlx (projects, versions, threads) + // Generate test project data. + let json_data = json!( + { + "title": "Test Project Beta", + "slug": "beta", + "description": "A dummy project for testing with.", + "body": "This project is not-yet-approved, and versions are draft.", + "client_side": "required", + "server_side": "optional", + "initial_versions": [{ + "file_parts": ["dummy-project-beta.jar"], + "version_number": "1.2.3", + "version_title": "start", + "status": "unlisted", + "requested_status": "unlisted", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + }], + "status": "private", + "requested_status": "private", + "categories": [], + "license_id": "MIT" + } + ); + + // Basic json + let json_segment = MultipartSegment { + name: "data".to_string(), + filename: None, + content_type: Some("application/json".to_string()), + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + }; + + // Basic file + let file_segment = MultipartSegment { + name: "dummy-project-beta.jar".to_string(), + filename: Some("dummy-project-beta.jar".to_string()), + content_type: Some("application/java-archive".to_string()), + data: MultipartSegmentData::Binary( + include_bytes!("../../tests/files/dummy-project-beta.jar").to_vec(), + ), + }; + + // Add a project. + let req = TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization", USER_USER_PAT)) + .set_multipart(vec![json_segment.clone(), file_segment.clone()]) + .to_request(); + let resp = test_env.call(req).await; + + assert_eq!(resp.status(), 200); + + // Get project + let req = TestRequest::get() + .uri("/v2/project/beta") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let project: Project = test::read_body_json(resp).await; + + // Get project's versions + let req = TestRequest::get() + .uri("/v2/project/beta/version") + .append_header(("Authorization", USER_USER_PAT)) + .to_request(); + let resp = test_env.call(req).await; + let versions: Vec = test::read_body_json(resp).await; + let version = versions.into_iter().next().unwrap(); + + (project, version) +} diff --git a/tests/common/environment.rs b/tests/common/environment.rs index fb26f373..bcf5c686 100644 --- a/tests/common/environment.rs +++ b/tests/common/environment.rs @@ -1,17 +1,8 @@ #![allow(dead_code)] -use super::database::{TemporaryDatabase, USER_USER_ID_PARSED}; +use super::{database::TemporaryDatabase, dummy_data}; use crate::common::setup; -use actix_web::{ - dev::ServiceResponse, - test::{self, TestRequest}, - App, -}; -use chrono::Utc; -use labrinth::{ - database::{self, models::generate_pat_id}, - models::pats::Scopes, -}; +use actix_web::{dev::ServiceResponse, test, App}; // A complete test environment, with a test actix app and a database. // Must be called in an #[actix_rt::test] context. It also simulates a @@ -20,17 +11,27 @@ use labrinth::{ pub struct TestEnvironment { test_app: Box, pub db: TemporaryDatabase, + + pub dummy: Option, } impl TestEnvironment { - pub async fn new() -> Self { - let db = TemporaryDatabase::create_with_dummy().await; + pub async fn build_with_dummy() -> Self { + let mut test_env = Self::build().await; + let dummy = dummy_data::add_dummy_data(&test_env).await; + test_env.dummy = Some(dummy); + test_env + } + + pub async fn build() -> Self { + let db = TemporaryDatabase::create().await; let labrinth_config = setup(&db).await; let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone())); let test_app = test::init_service(app).await; Self { test_app: Box::new(test_app), db, + dummy: None, } } pub async fn cleanup(self) { @@ -68,141 +69,3 @@ where Box::pin(self.call(req)) } } - -// A reusable test type that works for any scope test testing an endpoint that: -// - returns a known 'expected_failure_code' if the scope is not present (defaults to 401) -// - returns a 200-299 if the scope is present -// - returns failure and success JSON bodies for requests that are 200 (for performing non-simple follow-up tests on) -// This uses a builder format, so you can chain methods to set the parameters to non-defaults (most will probably be not need to be set). -pub struct ScopeTest<'a> { - test_env: &'a TestEnvironment, - // Scopes expected to fail on this test. By default, this is all scopes except the success scopes. - // (To ensure we have isolated the scope we are testing) - failure_scopes: Option, - // User ID to use for the PATs. By default, this is the USER_USER_ID_PARSED constant. - user_id: i64, - // The code that is expected to be returned if the scope is not present. By default, this is 401 (Unauthorized) - expected_failure_code: u16, -} - -impl<'a> ScopeTest<'a> { - pub fn new(test_env: &'a TestEnvironment) -> Self { - Self { - test_env, - failure_scopes: None, - user_id: USER_USER_ID_PARSED, - expected_failure_code: 401, - } - } - - // Set non-standard failure scopes - // If not set, it will be set to all scopes except the success scopes - // (eg: if a combination of scopes is needed, but you want to make sure that the endpoint does not work with all-but-one of them) - pub fn with_failure_scopes(mut self, scopes: Scopes) -> Self { - self.failure_scopes = Some(scopes); - self - } - - // Set the user ID to use - // (eg: a moderator, or friend) - pub fn with_user_id(mut self, user_id: i64) -> Self { - self.user_id = user_id; - self - } - - // If a non-401 code is expected. - // (eg: a 404 for a hidden resource, or 200 for a resource with hidden values deeper in) - pub fn with_failure_code(mut self, code: u16) -> Self { - self.expected_failure_code = code; - self - } - - // Call the endpoint generated by req_gen twice, once with a PAT with the failure scopes, and once with the success scopes. - // success_scopes : the scopes that we are testing that should succeed - // returns a tuple of (failure_body, success_body) - // Should return a String error if on unexpected status code, allowing unwrapping in tests. - pub async fn test( - &self, - req_gen: T, - success_scopes: Scopes, - ) -> Result<(serde_json::Value, serde_json::Value), String> - where - T: Fn() -> TestRequest, - { - // First, create a PAT with failure scopes - let failure_scopes = self.failure_scopes.unwrap_or(Scopes::ALL ^ success_scopes); - let access_token_all_others = - create_test_pat(failure_scopes, self.user_id, &self.test_env.db).await; - - // Create a PAT with the success scopes - let access_token = create_test_pat(success_scopes, self.user_id, &self.test_env.db).await; - - // Perform test twice, once with each PAT - // the first time, we expect a 401 (or known failure code) - let req = req_gen() - .append_header(("Authorization", access_token_all_others.as_str())) - .to_request(); - let resp = self.test_env.test_app.call(req).await.unwrap(); - - if resp.status().as_u16() != self.expected_failure_code { - return Err(format!( - "Expected failure code {}, got {}", - self.expected_failure_code, - resp.status().as_u16() - )); - } - - let failure_body = if resp.status() == 200 - && resp.headers().contains_key("Content-Type") - && resp.headers().get("Content-Type").unwrap() == "application/json" - { - test::read_body_json(resp).await - } else { - serde_json::Value::Null - }; - - // The second time, we expect a success code - let req = req_gen() - .append_header(("Authorization", access_token.as_str())) - .to_request(); - let resp = self.test_env.test_app.call(req).await.unwrap(); - - if !(resp.status().is_success() || resp.status().is_redirection()) { - return Err(format!( - "Expected success code, got {}", - resp.status().as_u16() - )); - } - - let success_body = if resp.status() == 200 - && resp.headers().contains_key("Content-Type") - && resp.headers().get("Content-Type").unwrap() == "application/json" - { - test::read_body_json(resp).await - } else { - serde_json::Value::Null - }; - Ok((failure_body, success_body)) - } -} - -// Creates a PAT with the given scopes, and returns the access token -// Interfacing with the db directly, rather than using a ourte, -// allows us to test with scopes that are not allowed to be created by PATs -async fn create_test_pat(scopes: Scopes, user_id: i64, db: &TemporaryDatabase) -> String { - let mut transaction = db.pool.begin().await.unwrap(); - let id = generate_pat_id(&mut transaction).await.unwrap(); - let pat = database::models::pat_item::PersonalAccessToken { - id, - name: format!("test_pat_{}", scopes.bits()), - access_token: format!("mrp_{}", id.0), - scopes, - user_id: database::models::ids::UserId(user_id), - created: Utc::now(), - expires: Utc::now() + chrono::Duration::days(1), - last_used: None, - }; - pat.insert(&mut transaction).await.unwrap(); - transaction.commit().await.unwrap(); - pat.access_token -} diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 122849ea..cde6fc8d 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1,25 +1,24 @@ -use labrinth::clickhouse; -use labrinth::{ - file_hosting, queue, - util::env::{parse_strings_from_var, parse_var}, - LabrinthConfig, -}; +use labrinth::{check_env_vars, clickhouse}; +use labrinth::{file_hosting, queue, LabrinthConfig}; use std::sync::Arc; use self::database::TemporaryDatabase; pub mod actix; pub mod database; +pub mod dummy_data; pub mod environment; +pub mod pats; +pub mod scopes; // Testing equivalent to 'setup' function, producing a LabrinthConfig -// If making a test, you should probably use environment::TestEnvironment::new() (which calls this) +// If making a test, you should probably use environment::TestEnvironment::build_with_dummy() (which calls this) pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { println!("Setting up labrinth config"); dotenvy::dotenv().ok(); - if check_test_vars() { + if check_env_vars() { println!("Some environment variables are missing!"); } @@ -39,92 +38,3 @@ pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { maxmind_reader.clone(), ) } - -// This is so that env vars not used immediately don't panic at runtime -// Currently, these are the same as main.rs ones. -// TODO: go through after all tests are created and remove any that are not used -// Low priority as .env file should include all of these anyway -fn check_test_vars() -> bool { - let mut failed = false; - - fn check_var(var: &'static str) -> bool { - let check = parse_var::(var).is_none(); - if check { - println!( - "Variable `{}` missing in dotenv or not of type `{}`", - var, - std::any::type_name::() - ); - } - check - } - - failed |= check_var::("DATABASE_URL"); - failed |= check_var::("MEILISEARCH_ADDR"); - failed |= check_var::("MEILISEARCH_KEY"); - failed |= check_var::("BIND_ADDR"); - failed |= check_var::("SELF_ADDR"); - - failed |= check_var::("MOCK_FILE_PATH"); - - failed |= check_var::("LOCAL_INDEX_INTERVAL"); - failed |= check_var::("VERSION_INDEX_INTERVAL"); - - if parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").is_none() { - println!("Variable `WHITELISTED_MODPACK_DOMAINS` missing in dotenv or not a json array of strings"); - failed |= true; - } - - if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() { - println!( - "Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings" - ); - failed |= true; - } - - failed |= check_var::("PAYPAL_API_URL"); - failed |= check_var::("PAYPAL_CLIENT_ID"); - failed |= check_var::("PAYPAL_CLIENT_SECRET"); - - failed |= check_var::("GITHUB_CLIENT_ID"); - failed |= check_var::("GITHUB_CLIENT_SECRET"); - failed |= check_var::("GITLAB_CLIENT_ID"); - failed |= check_var::("GITLAB_CLIENT_SECRET"); - failed |= check_var::("DISCORD_CLIENT_ID"); - failed |= check_var::("DISCORD_CLIENT_SECRET"); - failed |= check_var::("MICROSOFT_CLIENT_ID"); - failed |= check_var::("MICROSOFT_CLIENT_SECRET"); - failed |= check_var::("GOOGLE_CLIENT_ID"); - failed |= check_var::("GOOGLE_CLIENT_SECRET"); - failed |= check_var::("STEAM_API_KEY"); - - failed |= check_var::("TURNSTILE_SECRET"); - - failed |= check_var::("SMTP_USERNAME"); - failed |= check_var::("SMTP_PASSWORD"); - failed |= check_var::("SMTP_HOST"); - - failed |= check_var::("SITE_VERIFY_EMAIL_PATH"); - failed |= check_var::("SITE_RESET_PASSWORD_PATH"); - - failed |= check_var::("BEEHIIV_PUBLICATION_ID"); - failed |= check_var::("BEEHIIV_API_KEY"); - - if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { - println!( - "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" - ); - failed |= true; - } - - failed |= check_var::("CLICKHOUSE_URL"); - failed |= check_var::("CLICKHOUSE_USER"); - failed |= check_var::("CLICKHOUSE_PASSWORD"); - failed |= check_var::("CLICKHOUSE_DATABASE"); - - failed |= check_var::("MAXMIND_LICENSE_KEY"); - - failed |= check_var::("PAYOUTS_BUDGET"); - - failed -} diff --git a/tests/common/pats.rs b/tests/common/pats.rs new file mode 100644 index 00000000..d63517cf --- /dev/null +++ b/tests/common/pats.rs @@ -0,0 +1,30 @@ +#![allow(dead_code)] + +use chrono::Utc; +use labrinth::{ + database::{self, models::generate_pat_id}, + models::pats::Scopes, +}; + +use super::database::TemporaryDatabase; + +// Creates a PAT with the given scopes, and returns the access token +// Interfacing with the db directly, rather than using a ourte, +// allows us to test with scopes that are not allowed to be created by PATs +pub async fn create_test_pat(scopes: Scopes, user_id: i64, db: &TemporaryDatabase) -> String { + let mut transaction = db.pool.begin().await.unwrap(); + let id = generate_pat_id(&mut transaction).await.unwrap(); + let pat = database::models::pat_item::PersonalAccessToken { + id, + name: format!("test_pat_{}", scopes.bits()), + access_token: format!("mrp_{}", id.0), + scopes, + user_id: database::models::ids::UserId(user_id), + created: Utc::now(), + expires: Utc::now() + chrono::Duration::days(1), + last_used: None, + }; + pat.insert(&mut transaction).await.unwrap(); + transaction.commit().await.unwrap(); + pat.access_token +} diff --git a/tests/common/scopes.rs b/tests/common/scopes.rs new file mode 100644 index 00000000..44a4b7df --- /dev/null +++ b/tests/common/scopes.rs @@ -0,0 +1,124 @@ +#![allow(dead_code)] +use actix_web::test::{self, TestRequest}; +use labrinth::models::pats::Scopes; + +use super::{database::USER_USER_ID_PARSED, environment::TestEnvironment, pats::create_test_pat}; + +// A reusable test type that works for any scope test testing an endpoint that: +// - returns a known 'expected_failure_code' if the scope is not present (defaults to 401) +// - returns a 200-299 if the scope is present +// - returns failure and success JSON bodies for requests that are 200 (for performing non-simple follow-up tests on) +// This uses a builder format, so you can chain methods to set the parameters to non-defaults (most will probably be not need to be set). +pub struct ScopeTest<'a> { + test_env: &'a TestEnvironment, + // Scopes expected to fail on this test. By default, this is all scopes except the success scopes. + // (To ensure we have isolated the scope we are testing) + failure_scopes: Option, + // User ID to use for the PATs. By default, this is the USER_USER_ID_PARSED constant. + user_id: i64, + // The code that is expected to be returned if the scope is not present. By default, this is 401 (Unauthorized) + expected_failure_code: u16, +} + +impl<'a> ScopeTest<'a> { + pub fn new(test_env: &'a TestEnvironment) -> Self { + Self { + test_env, + failure_scopes: None, + user_id: USER_USER_ID_PARSED, + expected_failure_code: 401, + } + } + + // Set non-standard failure scopes + // If not set, it will be set to all scopes except the success scopes + // (eg: if a combination of scopes is needed, but you want to make sure that the endpoint does not work with all-but-one of them) + pub fn with_failure_scopes(mut self, scopes: Scopes) -> Self { + self.failure_scopes = Some(scopes); + self + } + + // Set the user ID to use + // (eg: a moderator, or friend) + pub fn with_user_id(mut self, user_id: i64) -> Self { + self.user_id = user_id; + self + } + + // If a non-401 code is expected. + // (eg: a 404 for a hidden resource, or 200 for a resource with hidden values deeper in) + pub fn with_failure_code(mut self, code: u16) -> Self { + self.expected_failure_code = code; + self + } + + // Call the endpoint generated by req_gen twice, once with a PAT with the failure scopes, and once with the success scopes. + // success_scopes : the scopes that we are testing that should succeed + // returns a tuple of (failure_body, success_body) + // Should return a String error if on unexpected status code, allowing unwrapping in tests. + pub async fn test( + &self, + req_gen: T, + success_scopes: Scopes, + ) -> Result<(serde_json::Value, serde_json::Value), String> + where + T: Fn() -> TestRequest, + { + // First, create a PAT with failure scopes + let failure_scopes = self + .failure_scopes + .unwrap_or(Scopes::all() ^ success_scopes); + let access_token_all_others = + create_test_pat(failure_scopes, self.user_id, &self.test_env.db).await; + + // Create a PAT with the success scopes + let access_token = create_test_pat(success_scopes, self.user_id, &self.test_env.db).await; + + // Perform test twice, once with each PAT + // the first time, we expect a 401 (or known failure code) + let req = req_gen() + .append_header(("Authorization", access_token_all_others.as_str())) + .to_request(); + let resp = self.test_env.call(req).await; + + if resp.status().as_u16() != self.expected_failure_code { + return Err(format!( + "Expected failure code {}, got {}", + self.expected_failure_code, + resp.status().as_u16() + )); + } + + let failure_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + + // The second time, we expect a success code + let req = req_gen() + .append_header(("Authorization", access_token.as_str())) + .to_request(); + let resp = self.test_env.call(req).await; + + if !(resp.status().is_success() || resp.status().is_redirection()) { + return Err(format!( + "Expected success code, got {}", + resp.status().as_u16() + )); + } + + let success_body = if resp.status() == 200 + && resp.headers().contains_key("Content-Type") + && resp.headers().get("Content-Type").unwrap() == "application/json" + { + test::read_body_json(resp).await + } else { + serde_json::Value::Null + }; + Ok((failure_body, success_body)) + } +} diff --git a/tests/files/dummy-project-alpha.jar b/tests/files/dummy-project-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..61f82078c78f2bd825f2f329f4b588a7102a6f0c GIT binary patch literal 680 zcmWIWW@Zs#U|`^2h>mp)*AnHP^9RU#$H>6I52Vu)lZrBv^>Xu5^s^ zX!&h(^kiv4(Uge`7A^D;zqml;*!DT$R_~+Y;{7yE?S4F8!_nkIZ@Sy-zu)$}vQoX) zXr3>?I`hi9*Vm68RrF3>_Mm6gv6o>1d+)un+kI5+@x$<9hK+*GySBwkEK&C3%gkXl zRzDtC^kd%qn=@D!o%$4!e0oZcuch7g8ynjbj{WNQ);y87IdYn7wVYB=(XCDECv)?j zzu5F=d6&{>`Li*5OwMuYPPiiaZGHQ(#%+Q^!Y=||cJY=RI`O2Dt9g^K*w(zjuK32! zH9H?yHka&nl{{T_t53g@ z4uA*Lc8j%PlPyd>?*Il+A~2vp!RPB5;;8HC=ce!L=;!I?8XThM>lWMRnBU|e(E48G z5Bs|`b)Ai&J7-PvetB@lJ$H#SD(~K;9dP+q8z>yPXW#qT=Ih-a_We5eXX3XvOV8*$ z$^6u-{`6Q#*W1>#ZLHkiT? z-!a{6sGDdxJHF%!d(Mh85o|&3x{LcVC#n4p@MdHZ0s9Jjq5yiAfssK0L;(R>5<%9C gEucVJfMEs%nm`I$umyOtvVr88fUq7&3p0Uu01;dcb^rhX literal 0 HcmV?d00001 diff --git a/tests/files/dummy-project-beta.jar b/tests/files/dummy-project-beta.jar new file mode 100644 index 0000000000000000000000000000000000000000..1b072b2076b1e97727e1716a99ff6949a7a2ab6c GIT binary patch literal 678 zcmWIWW@Zs#U|`^2NQrd~_uaZt`xlV+hLM4RA4sPqCKY8S>*eOB=w%h>=k*5c&O2y3cm4FHjPuDZUVhQvOm~(%w2YeY zKO!*Y-{G0P)5AV@h-vzCVqRf^o-7v%ul`Q zPmhIky=_g~#>)N8wSG_e$5_VM;;VliOZs+?bspEgJm!(raHT_Wg- zn#NiFex1dZ6AlY_JYJ?-a0^?E>>6LU%b9n;N* zx`~#v<4dlv=d3sr!4~AMySOiNlG^_OZ$>5&u&=Nu37~fw7#S2m6cC^#5M<5R0t%!B c7-m4A4y3RJTYxt!8%UlB2(PROJECTS_NAMESPACE, 1000) - .await - .unwrap() - .is_none()); + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_project_id = &test_env.dummy.as_ref().unwrap().alpha_project_id; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id; + let alpha_project_slug = &test_env.dummy.as_ref().unwrap().alpha_project_slug; + let alpha_version_id = &test_env.dummy.as_ref().unwrap().alpha_version_id; // Perform request on dummy data let req = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}")) + .uri(&format!("/v2/project/{alpha_project_id}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; @@ -33,36 +29,36 @@ async fn test_get_project() { let body: serde_json::Value = test::read_body_json(resp).await; assert_eq!(status, 200); - assert_eq!(body["id"], json!(PROJECT_ALPHA_PROJECT_ID)); - assert_eq!(body["slug"], json!("testslug")); + assert_eq!(body["id"], json!(alpha_project_id)); + assert_eq!(body["slug"], json!(alpha_project_slug)); let versions = body["versions"].as_array().unwrap(); assert!(!versions.is_empty()); - assert_eq!(versions[0], json!(PROJECT_ALPHA_VERSION_ID)); + assert_eq!(versions[0], json!(alpha_version_id)); // Confirm that the request was cached assert_eq!( test_env .db .redis_pool - .get::(PROJECTS_SLUGS_NAMESPACE, "testslug") + .get::(PROJECTS_SLUGS_NAMESPACE, alpha_project_slug) .await .unwrap(), - Some(1000) + Some(parse_base62(alpha_project_id).unwrap() as i64) ); let cached_project = test_env .db .redis_pool - .get::(PROJECTS_NAMESPACE, 1000) + .get::(PROJECTS_NAMESPACE, parse_base62(alpha_project_id).unwrap()) .await .unwrap() .unwrap(); let cached_project: serde_json::Value = serde_json::from_str(&cached_project).unwrap(); - assert_eq!(cached_project["inner"]["slug"], json!("testslug")); + assert_eq!(cached_project["inner"]["slug"], json!(alpha_project_slug)); // Make the request again, this time it should be cached let req = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}")) + .uri(&format!("/v2/project/{alpha_project_id}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; @@ -70,8 +66,8 @@ async fn test_get_project() { assert_eq!(status, 200); let body: serde_json::Value = test::read_body_json(resp).await; - assert_eq!(body["id"], json!(PROJECT_ALPHA_PROJECT_ID)); - assert_eq!(body["slug"], json!("testslug")); + assert_eq!(body["id"], json!(alpha_project_id)); + assert_eq!(body["slug"], json!(alpha_project_slug)); // Request should fail on non-existent project let req = test::TestRequest::get() @@ -84,7 +80,7 @@ async fn test_get_project() { // Similarly, request should fail on non-authorized user, on a yet-to-be-approved or hidden project, with a 404 (hiding the existence of the project) let req = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .uri(&format!("/v2/project/{beta_project_id}")) .append_header(("Authorization", ENEMY_USER_PAT)) .to_request(); @@ -98,7 +94,7 @@ async fn test_get_project() { #[actix_rt::test] async fn test_add_remove_project() { // Test setup and dummy data - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // Generate test project data. let mut json_data = json!( @@ -310,12 +306,14 @@ async fn test_add_remove_project() { #[actix_rt::test] pub async fn test_patch_project() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_project_slug = &test_env.dummy.as_ref().unwrap().alpha_project_slug; + let beta_project_slug = &test_env.dummy.as_ref().unwrap().beta_project_slug; // First, we do some patch requests that should fail. // Failure because the user is not authorized. let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", ENEMY_USER_PAT)) .set_json(json!({ "title": "Test_Add_Project project - test 1", @@ -327,7 +325,7 @@ pub async fn test_patch_project() { // Failure because we are setting URL fields to invalid urls. for url_type in ["issues_url", "source_url", "wiki_url", "discord_url"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ url_type: "w.fake.url", @@ -340,7 +338,7 @@ pub async fn test_patch_project() { // Failure because these are illegal requested statuses for a normal user. for req in ["unknown", "processing", "withheld", "scheduled"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "requested_status": req, @@ -353,7 +351,7 @@ pub async fn test_patch_project() { // Failure because these should not be able to be set by a non-mod for key in ["moderation_message", "moderation_message_body"] { let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ key: "test", @@ -364,7 +362,7 @@ pub async fn test_patch_project() { // (should work for a mod, though) let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", MOD_USER_PAT)) .set_json(json!({ key: "test", @@ -376,18 +374,18 @@ pub async fn test_patch_project() { // Failure because the slug is already taken. let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "slug": "testslug2", // the other dummy project has this slug + "slug": beta_project_slug, // the other dummy project has this slug })) .to_request(); let resp = test_env.call(req).await; assert_eq!(resp.status(), 400); - // Not allowed to directly set status, as 'testslug2' (the other project) is "processing" and cannot have its status changed like this. + // Not allowed to directly set status, as 'beta_project_slug' (the other project) is "processing" and cannot have its status changed like this. let req = test::TestRequest::patch() - .uri("/v2/project/testslug2") + .uri(&format!("/v2/project/{beta_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "status": "private" @@ -398,7 +396,7 @@ pub async fn test_patch_project() { // Sucessful request to patch many fields. let req = test::TestRequest::patch() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "slug": "newslug", @@ -424,7 +422,7 @@ pub async fn test_patch_project() { // Old slug no longer works let req = test::TestRequest::get() - .uri("/v2/project/testslug") + .uri(&format!("/v2/project/{alpha_project_slug}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; diff --git a/tests/scopes.rs b/tests/scopes.rs index 9145461a..806905ab 100644 --- a/tests/scopes.rs +++ b/tests/scopes.rs @@ -1,14 +1,11 @@ use actix_web::test::{self, TestRequest}; use bytes::Bytes; use chrono::{Duration, Utc}; -use common::{actix::AppendsMultipart, database::PROJECT_ALPHA_THREAD_ID}; +use common::actix::AppendsMultipart; use labrinth::models::pats::Scopes; use serde_json::json; -use crate::common::{ - database::*, - environment::{ScopeTest, TestEnvironment}, -}; +use crate::common::{database::*, environment::TestEnvironment, scopes::ScopeTest}; // importing common module. mod common; @@ -23,7 +20,7 @@ mod common; #[actix_rt::test] async fn user_scopes() { // Test setup and dummy data - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // User reading let read_user = Scopes::USER_READ; @@ -74,7 +71,7 @@ async fn user_scopes() { .unwrap(); // User payout info writing - let failure_write_user_payout = Scopes::ALL ^ Scopes::PAYOUTS_WRITE; // Failure case should include USER_WRITE + let failure_write_user_payout = Scopes::all() ^ Scopes::PAYOUTS_WRITE; // Failure case should include USER_WRITE let write_user_payout = Scopes::USER_WRITE | Scopes::PAYOUTS_WRITE; let req_gen = || { TestRequest::patch().uri("/v2/user/user").set_json(json!( { @@ -108,12 +105,13 @@ async fn user_scopes() { // Notifications #[actix_rt::test] pub async fn notifications_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id.clone(); // We will invite user 'friend' to project team, and use that as a notification // Get notifications let req = TestRequest::post() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .uri(&format!("/v2/team/{alpha_team_id}/members")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!( { "user_id": FRIEND_USER_ID // friend @@ -185,7 +183,7 @@ pub async fn notifications_scopes() { // Mass notification delete // We invite mod, get the notification ID, and do mass delete using that let req = test::TestRequest::post() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .uri(&format!("/v2/team/{alpha_team_id}/members")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!( { "user_id": MOD_USER_ID // mod @@ -221,7 +219,7 @@ pub async fn notifications_scopes() { // Project version creation scopes #[actix_rt::test] pub async fn project_version_create_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // Create project let create_project = Scopes::PROJECT_CREATE; @@ -320,24 +318,24 @@ pub async fn project_version_create_scopes() { // Project management scopes #[actix_rt::test] pub async fn project_version_reads_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); + let beta_version_id = &test_env.dummy.as_ref().unwrap().beta_version_id.clone(); + let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id.clone(); + let beta_file_hash = &test_env.dummy.as_ref().unwrap().beta_file_hash.clone(); // Project reading // Uses 404 as the expected failure code (or 200 and an empty list for mass reads) let read_project = Scopes::PROJECT_READ; - let req_gen = - || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")); + let req_gen = || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project) .await .unwrap(); - let req_gen = || { - test::TestRequest::get().uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/dependencies" - )) - }; + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}/dependencies")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project) @@ -347,7 +345,7 @@ pub async fn project_version_reads_scopes() { let req_gen = || { test::TestRequest::get().uri(&format!( "/v2/projects?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{PROJECT_BETA_PROJECT_ID}\"")) + uri = urlencoding::encode(&format!("\"{beta_project_id}\"")) )) }; let (failure, success) = ScopeTest::new(&test_env) @@ -360,7 +358,7 @@ pub async fn project_version_reads_scopes() { // Team project reading let req_gen = - || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/members")); + || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}/members")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project) @@ -370,8 +368,7 @@ pub async fn project_version_reads_scopes() { // Get team members // In this case, as these are public endpoints, logging in only is relevant to showing permissions // So for our test project (with 1 user, 'user') we will check the permissions before and after having the scope. - let req_gen = - || test::TestRequest::get().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")); + let req_gen = || test::TestRequest::get().uri(&format!("/v2/team/{alpha_team_id}/members")); let (failure, success) = ScopeTest::new(&test_env) .with_failure_code(200) .test(req_gen, read_project) @@ -383,7 +380,7 @@ pub async fn project_version_reads_scopes() { let req_gen = || { test::TestRequest::get().uri(&format!( "/v2/teams?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{PROJECT_ALPHA_TEAM_ID}\"")) + uri = urlencoding::encode(&format!("\"{alpha_team_id}\"")) )) }; let (failure, success) = ScopeTest::new(&test_env) @@ -422,7 +419,7 @@ pub async fn project_version_reads_scopes() { // Project metadata reading let req_gen = || { test::TestRequest::get().uri(&format!( - "/maven/maven/modrinth/{PROJECT_BETA_PROJECT_ID}/maven-metadata.xml" + "/maven/maven/modrinth/{beta_project_id}/maven-metadata.xml" )) }; ScopeTest::new(&test_env) @@ -435,7 +432,7 @@ pub async fn project_version_reads_scopes() { // First, set version to hidden (which is when the scope is required to read it) let read_version = Scopes::VERSION_READ; let req = test::TestRequest::patch() - .uri(&format!("/v2/version/{PROJECT_BETA_VERSION_ID}")) + .uri(&format!("/v2/version/{beta_version_id}")) .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ "status": "draft" @@ -444,20 +441,15 @@ pub async fn project_version_reads_scopes() { let resp = test_env.call(req).await; assert_eq!(resp.status(), 204); - let req_gen = || { - test::TestRequest::get().uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}")) - }; + let req_gen = || test::TestRequest::get().uri(&format!("/v2/version_file/{beta_file_hash}")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_version) .await .unwrap(); - let req_gen = || { - test::TestRequest::get().uri(&format!( - "/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/download" - )) - }; + let req_gen = + || test::TestRequest::get().uri(&format!("/v2/version_file/{beta_file_hash}/download")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_version) @@ -468,7 +460,7 @@ pub async fn project_version_reads_scopes() { // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope // let req_gen = || { // test::TestRequest::post() - // .uri(&format!("/v2/version_file/{PROJECT_BETA_THREAD_FILE_HASH}/update")) + // .uri(&format!("/v2/version_file/{beta_file_hash}/update")) // .set_json(json!({})) // }; // ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_version).await.unwrap(); @@ -478,7 +470,7 @@ pub async fn project_version_reads_scopes() { test::TestRequest::post() .uri("/v2/version_files") .set_json(json!({ - "hashes": [PROJECT_BETA_THREAD_FILE_HASH] + "hashes": [beta_file_hash] })) }; let (failure, success) = ScopeTest::new(&test_env) @@ -486,14 +478,8 @@ pub async fn project_version_reads_scopes() { .test(req_gen, read_version) .await .unwrap(); - assert!(!failure - .as_object() - .unwrap() - .contains_key(PROJECT_BETA_THREAD_FILE_HASH)); - assert!(success - .as_object() - .unwrap() - .contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + assert!(!failure.as_object().unwrap().contains_key(beta_file_hash)); + assert!(success.as_object().unwrap().contains_key(beta_file_hash)); // Update version file // TODO: Should this be /POST? Looks like /GET @@ -504,13 +490,13 @@ pub async fn project_version_reads_scopes() { // .uri(&format!("/v2/version_files/update_individual")) // .set_json(json!({ // "hashes": [{ - // "hash": PROJECT_BETA_THREAD_FILE_HASH, + // "hash": beta_file_hash, // }] // })) // }; // let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); - // assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); - // assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + // assert!(!failure.as_object().unwrap().contains_key(beta_file_hash)); + // assert!(success.as_object().unwrap().contains_key(beta_file_hash)); // Update version file // TODO: this scope doesn't actually affect anything, because the Project::get_id contained within disallows hidden versions, which is the point of this scope @@ -518,17 +504,17 @@ pub async fn project_version_reads_scopes() { // test::TestRequest::post() // .uri(&format!("/v2/version_files/update")) // .set_json(json!({ - // "hashes": [PROJECT_BETA_THREAD_FILE_HASH] + // "hashes": [beta_file_hash] // })) // }; // let (failure, success) = ScopeTest::new(&test_env).with_failure_code(200).test(req_gen, read_version).await.unwrap(); - // assert!(!failure.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); - // assert!(success.as_object().unwrap().contains_key(PROJECT_BETA_THREAD_FILE_HASH)); + // assert!(!failure.as_object().unwrap().contains_key(beta_file_hash)); + // assert!(success.as_object().unwrap().contains_key(beta_file_hash)); // Both project and version reading let read_project_and_version = Scopes::PROJECT_READ | Scopes::VERSION_READ; let req_gen = - || test::TestRequest::get().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version")); + || test::TestRequest::get().uri(&format!("/v2/project/{beta_project_id}/version")); ScopeTest::new(&test_env) .with_failure_code(404) .test(req_gen, read_project_and_version) @@ -538,7 +524,7 @@ pub async fn project_version_reads_scopes() { // TODO: fails for the same reason as above // let req_gen = || { // test::TestRequest::get() - // .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/version/{PROJECT_BETA_VERSION_ID}")) + // .uri(&format!("/v2/project/{beta_project_id}/version/{beta_version_id}")) // }; // ScopeTest::new(&test_env).with_failure_code(404).test(req_gen, read_project_and_version).await.unwrap(); @@ -549,13 +535,16 @@ pub async fn project_version_reads_scopes() { // Project writing #[actix_rt::test] pub async fn project_write_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); + let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id.clone(); // Projects writing let write_project = Scopes::PROJECT_WRITE; let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .uri(&format!("/v2/project/{beta_project_id}")) .set_json(json!( { "title": "test_project_version_write_scopes Title", @@ -571,7 +560,7 @@ pub async fn project_write_scopes() { test::TestRequest::patch() .uri(&format!( "/v2/projects?ids=[{uri}]", - uri = urlencoding::encode(&format!("\"{PROJECT_BETA_PROJECT_ID}\"")) + uri = urlencoding::encode(&format!("\"{beta_project_id}\"")) )) .set_json(json!( { @@ -584,9 +573,20 @@ pub async fn project_write_scopes() { .await .unwrap(); + // Approve beta as private so we can schedule it + let req = test::TestRequest::patch() + .uri(&format!("/v2/project/{beta_project_id}")) + .append_header(("Authorization", MOD_USER_PAT)) + .set_json(json!({ + "status": "private" + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/project/{PROJECT_ALPHA_PROJECT_ID}/schedule")) // PROJECT_ALPHA_PROJECT_ID is an *approved* project, so we can schedule it + .uri(&format!("/v2/project/{beta_project_id}/schedule")) // beta_project_id is an unpublished can schedule it .set_json(json!( { "requested_status": "private", @@ -602,9 +602,7 @@ pub async fn project_write_scopes() { // Icons and gallery images let req_gen = || { test::TestRequest::patch() - .uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/icon?ext=png" - )) + .uri(&format!("/v2/project/{beta_project_id}/icon?ext=png")) .set_payload(Bytes::from( include_bytes!("../tests/files/200x200.png") as &[u8] )) @@ -615,7 +613,7 @@ pub async fn project_write_scopes() { .unwrap(); let req_gen = - || test::TestRequest::delete().uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}/icon")); + || test::TestRequest::delete().uri(&format!("/v2/project/{beta_project_id}/icon")); ScopeTest::new(&test_env) .test(req_gen, write_project) .await @@ -624,7 +622,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::post() .uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?ext=png&featured=true" + "/v2/project/{beta_project_id}/gallery?ext=png&featured=true" )) .set_payload(Bytes::from( include_bytes!("../tests/files/200x200.png") as &[u8] @@ -637,7 +635,7 @@ pub async fn project_write_scopes() { // Get project, as we need the gallery image url let req_gen = test::TestRequest::get() - .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + .uri(&format!("/v2/project/{beta_project_id}")) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req_gen).await; @@ -646,7 +644,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::patch().uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}" + "/v2/project/{beta_project_id}/gallery?url={gallery_url}" )) }; ScopeTest::new(&test_env) @@ -656,7 +654,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::delete().uri(&format!( - "/v2/project/{PROJECT_BETA_PROJECT_ID}/gallery?url={gallery_url}" + "/v2/project/{beta_project_id}/gallery?url={gallery_url}" )) }; ScopeTest::new(&test_env) @@ -667,7 +665,7 @@ pub async fn project_write_scopes() { // Team scopes - add user 'friend' let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/members")) + .uri(&format!("/v2/team/{alpha_team_id}/members")) .set_json(json!({ "user_id": FRIEND_USER_ID })) @@ -678,8 +676,7 @@ pub async fn project_write_scopes() { .unwrap(); // Accept team invite as 'friend' - let req_gen = - || test::TestRequest::post().uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/join")); + let req_gen = || test::TestRequest::post().uri(&format!("/v2/team/{alpha_team_id}/join")); ScopeTest::new(&test_env) .with_user_id(FRIEND_USER_ID_PARSED) .test(req_gen, write_project) @@ -690,7 +687,7 @@ pub async fn project_write_scopes() { let req_gen = || { test::TestRequest::patch() .uri(&format!( - "/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{FRIEND_USER_ID}" + "/v2/team/{alpha_team_id}/members/{FRIEND_USER_ID}" )) .set_json(json!({ "permissions": 1 @@ -704,7 +701,7 @@ pub async fn project_write_scopes() { // Transfer ownership to 'friend' let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/team/{PROJECT_ALPHA_TEAM_ID}/owner")) + .uri(&format!("/v2/team/{alpha_team_id}/owner")) .set_json(json!({ "user_id": FRIEND_USER_ID })) @@ -716,9 +713,7 @@ pub async fn project_write_scopes() { // Now as 'friend', delete 'user' let req_gen = || { - test::TestRequest::delete().uri(&format!( - "/v2/team/{PROJECT_ALPHA_TEAM_ID}/members/{USER_USER_ID}" - )) + test::TestRequest::delete().uri(&format!("/v2/team/{alpha_team_id}/members/{USER_USER_ID}")) }; ScopeTest::new(&test_env) .with_user_id(FRIEND_USER_ID_PARSED) @@ -733,7 +728,7 @@ pub async fn project_write_scopes() { // let delete_version = Scopes::PROJECT_DELETE; // let req_gen = || { // test::TestRequest::delete() - // .uri(&format!("/v2/project/{PROJECT_BETA_PROJECT_ID}")) + // .uri(&format!("/v2/project/{beta_project_id}")) // }; // ScopeTest::new(&test_env).test(req_gen, delete_version).await.unwrap(); @@ -744,14 +739,29 @@ pub async fn project_write_scopes() { // Version write #[actix_rt::test] pub async fn version_write_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_version_id = &test_env.dummy.as_ref().unwrap().beta_version_id.clone(); + let beta_version_id = &test_env.dummy.as_ref().unwrap().beta_version_id.clone(); + let alpha_file_hash = &test_env.dummy.as_ref().unwrap().beta_file_hash.clone(); let write_version = Scopes::VERSION_WRITE; + // Approve beta version as private so we can schedule it + let req = test::TestRequest::patch() + .uri(&format!("/v2/version/{beta_version_id}")) + .append_header(("Authorization", MOD_USER_PAT)) + .set_json(json!({ + "status": "unlisted" + })) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + // Schedule version let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}/schedule")) // PROJECT_ALPHA_VERSION_ID is an *approved* version, so we can schedule it + .uri(&format!("/v2/version/{beta_version_id}/schedule")) // beta_version_id is an *approved* version, so we can schedule it .set_json(json!( { "requested_status": "archived", @@ -767,7 +777,7 @@ pub async fn version_write_scopes() { // Patch version let req_gen = || { test::TestRequest::patch() - .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")) + .uri(&format!("/v2/version/{alpha_version_id}")) .set_json(json!( { "version_title": "test_version_write_scopes Title", @@ -810,7 +820,7 @@ pub async fn version_write_scopes() { // Upload version file let req_gen = || { test::TestRequest::post() - .uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}/file")) + .uri(&format!("/v2/version/{alpha_version_id}/file")) .set_multipart(vec![json_segment.clone(), content_segment.clone()]) }; ScopeTest::new(&test_env) @@ -821,9 +831,8 @@ pub async fn version_write_scopes() { // Delete version file // TODO: Should this scope be VERSION_DELETE? let req_gen = || { - test::TestRequest::delete().uri(&format!( - "/v2/version_file/{PROJECT_ALPHA_THREAD_FILE_HASH}" - )) // Delete from PROJECT_ALPHA_VERSION_ID, as we uploaded to PROJECT_ALPHA_VERSION_ID and it needs another file + test::TestRequest::delete().uri(&format!("/v2/version_file/{alpha_file_hash}")) + // Delete from alpha_version_id, as we uploaded to alpha_version_id and it needs another file }; ScopeTest::new(&test_env) .test(req_gen, write_version) @@ -832,8 +841,7 @@ pub async fn version_write_scopes() { // Delete version let delete_version = Scopes::VERSION_DELETE; - let req_gen = - || test::TestRequest::delete().uri(&format!("/v2/version/{PROJECT_ALPHA_VERSION_ID}")); + let req_gen = || test::TestRequest::delete().uri(&format!("/v2/version/{alpha_version_id}")); ScopeTest::new(&test_env) .test(req_gen, delete_version) .await @@ -846,14 +854,16 @@ pub async fn version_write_scopes() { // Report scopes #[actix_rt::test] pub async fn report_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); // Create report let report_create = Scopes::REPORT_CREATE; let req_gen = || { test::TestRequest::post().uri("/v2/report").set_json(json!({ "report_type": "copyright", - "item_id": PROJECT_BETA_PROJECT_ID, + "item_id": beta_project_id, "item_type": "project", "body": "This is a reupload of my mod, ", })) @@ -920,11 +930,14 @@ pub async fn report_scopes() { // Thread scopes #[actix_rt::test] pub async fn thread_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_thread_id = &test_env.dummy.as_ref().unwrap().alpha_thread_id.clone(); + let beta_thread_id = &test_env.dummy.as_ref().unwrap().beta_thread_id.clone(); // Thread read let thread_read = Scopes::THREAD_READ; - let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/{PROJECT_ALPHA_THREAD_ID}")); + let req_gen = || test::TestRequest::get().uri(&format!("/v2/thread/{alpha_thread_id}")); ScopeTest::new(&test_env) .test(req_gen, thread_read) .await @@ -941,6 +954,24 @@ pub async fn thread_scopes() { .await .unwrap(); + // Thread write (to also push to moderator inbox) + let thread_write = Scopes::THREAD_WRITE; + let req_gen = || { + test::TestRequest::post() + .uri(&format!("/v2/thread/{beta_thread_id}")) + .set_json(json!({ + "body": { + "type": "text", + "body": "test_thread_scopes Body" + } + })) + }; + ScopeTest::new(&test_env) + .with_user_id(USER_USER_ID_PARSED) + .test(req_gen, thread_write) + .await + .unwrap(); + // Check moderation inbox // Uses moderator PAT, as only moderators can see the moderation inbox let req_gen = || test::TestRequest::get().uri("/v2/thread/inbox"); @@ -961,24 +992,6 @@ pub async fn thread_scopes() { .await .unwrap(); - // Thread write - let thread_write = Scopes::THREAD_WRITE; - let req_gen = || { - test::TestRequest::post() - .uri(&format!("/v2/thread/{thread_id}")) - .set_json(json!({ - "body": { - "type": "text", - "body": "test_thread_scopes Body" - } - })) - }; - ScopeTest::new(&test_env) - .with_user_id(MOD_USER_ID_PARSED) - .test(req_gen, thread_write) - .await - .unwrap(); - // Delete that message // First, get message id let req_gen = test::TestRequest::get() @@ -1005,7 +1018,7 @@ pub async fn thread_scopes() { // Pat scopes #[actix_rt::test] pub async fn pat_scopes() { - let test_env = TestEnvironment::new().await; + let test_env = TestEnvironment::build_with_dummy().await; // Pat create let pat_create = Scopes::PAT_CREATE; @@ -1057,7 +1070,9 @@ pub async fn pat_scopes() { // Collection scopes #[actix_rt::test] pub async fn collections_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let alpha_project_id = &test_env.dummy.as_ref().unwrap().alpha_project_id.clone(); // Create collection let collection_create = Scopes::COLLECTION_CREATE; @@ -1067,7 +1082,7 @@ pub async fn collections_scopes() { .set_json(json!({ "title": "Test Collection", "description": "Test Collection Description", - "projects": [PROJECT_ALPHA_PROJECT_ID] + "projects": [alpha_project_id] })) }; let (_, success) = ScopeTest::new(&test_env) @@ -1150,7 +1165,9 @@ pub async fn collections_scopes() { // Organization scopes (and a couple PROJECT_WRITE scopes that are only allowed for orgs) #[actix_rt::test] pub async fn organization_scopes() { - let test_env = TestEnvironment::new().await; + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let beta_project_id = &test_env.dummy.as_ref().unwrap().beta_project_id.clone(); // Create organization let organization_create = Scopes::ORGANIZATION_CREATE; @@ -1207,11 +1224,11 @@ pub async fn organization_scopes() { test::TestRequest::post() .uri(&format!("/v2/organization/{organization_id}/projects")) .set_json(json!({ - "project_id": PROJECT_BETA_PROJECT_ID + "project_id": beta_project_id })) }; ScopeTest::new(&test_env) - .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE) + .with_failure_scopes(Scopes::all() ^ Scopes::ORGANIZATION_WRITE) .test(req_gen, organization_project_edit) .await .unwrap(); @@ -1271,7 +1288,7 @@ pub async fn organization_scopes() { || test::TestRequest::get().uri(&format!("/v2/organization/{organization_id}/projects")); let (failure, success) = ScopeTest::new(&test_env) .with_failure_code(200) - .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_READ) + .with_failure_scopes(Scopes::all() ^ Scopes::ORGANIZATION_READ) .test(req_gen, organization_project_read) .await .unwrap(); @@ -1281,11 +1298,11 @@ pub async fn organization_scopes() { // remove project (now that we've checked) let req_gen = || { test::TestRequest::delete().uri(&format!( - "/v2/organization/{organization_id}/projects/{PROJECT_BETA_PROJECT_ID}" + "/v2/organization/{organization_id}/projects/{beta_project_id}" )) }; ScopeTest::new(&test_env) - .with_failure_scopes(Scopes::ALL ^ Scopes::ORGANIZATION_WRITE) + .with_failure_scopes(Scopes::all() ^ Scopes::ORGANIZATION_WRITE) .test(req_gen, organization_project_edit) .await .unwrap(); From 79df302c026384ba89b818c6b895f9d160e36b1c Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Thu, 5 Oct 2023 17:55:52 -0700 Subject: [PATCH 15/16] removed comments --- tests/common/database.rs | 39 +++------------------------- tests/files/dummy_data.sql | 53 +------------------------------------- 2 files changed, 4 insertions(+), 88 deletions(-) diff --git a/tests/common/database.rs b/tests/common/database.rs index c1208d51..63535125 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -1,12 +1,13 @@ #![allow(dead_code)] use labrinth::database::redis::RedisPool; -use sqlx::{postgres::PgPoolOptions, Executor, PgPool}; +use sqlx::{postgres::PgPoolOptions, PgPool}; use std::time::Duration; use url::Url; // The dummy test database adds a fair bit of 'dummy' data to test with. -// These constants are used to refer to that data, and are described here. +// Some constants are used to refer to that data, and are described here. +// The rest can be accessed in the TestEnvironment 'dummy' field. // The user IDs are as follows: pub const ADMIN_USER_ID: &str = "1"; @@ -28,30 +29,6 @@ pub const USER_USER_PAT: &str = "mrp_patuser"; pub const FRIEND_USER_PAT: &str = "mrp_patfriend"; pub const ENEMY_USER_PAT: &str = "mrp_patenemy"; -// There are two test projects. They are both created by user 3 (USER_USER_ID). -// They differ only in that 'ALPHA' is a public, approved project, and 'BETA' is a private, project in queue. -// The same goes for their corresponding versions- one listed, one draft. -// pub const PROJECT_ALPHA_TEAM_ID: &str = "1c"; -// pub const PROJECT_BETA_TEAM_ID: &str = "1d"; - -// pub const PROJECT_ALPHA_PROJECT_ID: &str = "G8"; -// pub const PROJECT_BETA_PROJECT_ID: &str = "G9"; - -// pub const PROJECT_ALPHA_PROJECT_SLUG: &str = "testslug"; -// pub const PROJECT_BETA_PROJECT_SLUG: &str = "testslug2"; - -// pub const alpha_version_id: &str = "Hk"; -// pub const beta_version_id: &str = "Hl"; - -// These are threads created alongside the projects. -// pub const alpha_thread_id: &str = "U"; -// pub const PROJECT_BETA_THREAD_ID: &str = "V"; - -// These are the hashes of the files attached to their versions: they do not reflect a 'real' hash of data. -// This can be used for /version_file/ type endpoints which get a project's data from its hash. -// pub const alpha_file_hash: &str = "000000000"; -// pub const beta_file_hash: &str = "111111111"; - pub struct TemporaryDatabase { pub pool: PgPool, pub redis_pool: RedisPool, @@ -141,16 +118,6 @@ impl TemporaryDatabase { .await .expect("Database deletion failed"); } - - pub async fn add_dummy_data(&self) { - // Adds basic dummy data to the database directly with sql (user, pats) - let pool = &self.pool.clone(); - pool.execute(include_str!("../files/dummy_data.sql")) - .await - .unwrap(); - - // Adds dummy data to the database with sqlx (projects, versions, threads) - } } fn generate_random_database_name() -> String { diff --git a/tests/files/dummy_data.sql b/tests/files/dummy_data.sql index c80a55af..59391f48 100644 --- a/tests/files/dummy_data.sql +++ b/tests/files/dummy_data.sql @@ -33,55 +33,4 @@ INSERT INTO categories (id, category, project_type) VALUES (3, 'economy', 1); INSERT INTO categories (id, category, project_type) VALUES (4, 'combat', 2); INSERT INTO categories (id, category, project_type) VALUES (5, 'decoration', 2); -INSERT INTO categories (id, category, project_type) VALUES (6, 'economy', 2); - --- -- Inserts 2 dummy projects for testing, with slight differences --- ------------------------------------------------------------ --- INSERT INTO teams (id) VALUES (100); -- ID: 100, 1c --- INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (200, 100, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); - --- -- ID: 1000, G8 --- -- This project is approved, viewable --- INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) --- VALUES (1000, 100, 'Test Mod', 'Test mod description', 'Test mod body', timezone('utc', now()), 0, 'approved', 'approved', 1, 2, 'MIT', 'testslug', 1, 'monetized'); - --- -- ID: 1100, Hk --- -- This version is listed, viewable --- INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) --- VALUES (1100, 1000, 3, 'v1', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'listed'); - --- INSERT INTO loaders_versions (loader_id, version_id) VALUES (1, 1100); --- INSERT INTO game_versions_versions (game_version_id, joining_version_id) VALUES (20000, 1100); - --- -- not real hash or file --- INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) --- VALUES (800, 1100, 'http://www.url.to/myfile.jar', 'myfile.jar', true, 1, 'required-resource-pack'); --- INSERT INTO hashes (file_id, algorithm, hash) VALUES (800, 'sha1', '000000000'); - --- -- ID: 30, U --- INSERT INTO threads (id, thread_type, mod_id, report_id) VALUES (30, 'project', 1000, null); - --- ------------------------------------------------------------ --- INSERT INTO teams (id) VALUES (101); -- ID: 101, 1d --- INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted, payouts_split, ordering) VALUES (201, 101, 3, 'Owner', B'1111111111'::BIGINT, true, 100.0, 0); - --- -- ID: 1001, G9 --- -- This project is processing, and therefore not publically viewable --- INSERT INTO mods (id, team_id, title, description, body, published, downloads, status, requested_status, client_side, server_side, license, slug, project_type, monetization_status) --- VALUES (1001, 101, 'Test Mod 2', 'Test mod description 2', 'Test mod body 2', timezone('utc', now()), 0, 'processing', 'approved', 1, 2, 'MIT', 'testslug2', 1, 'monetized'); - --- -- ID: 1101, Hl --- -- This version is a draft, and therefore not publically viewable --- INSERT INTO versions ( id, mod_id, author_id, name, version_number, changelog, date_published, downloads, version_type, featured, status) --- VALUES (1101, 1001, 3, 'v1.0', 'v1.2.1', 'No changes', timezone('utc', now()), 0,'released', true, 'draft'); - --- INSERT INTO loaders_versions (loader_id, version_id) VALUES (1, 1101); --- INSERT INTO game_versions_versions (game_version_id, joining_version_id) VALUES (20000, 1101); - --- -- not real hash or file --- INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type) --- VALUES (801, 1101, 'http://www.url.to/myfile2.jar', 'myfile2.jar', true, 1, 'required-resource-pack'); --- INSERT INTO hashes (file_id, algorithm, hash) VALUES (801, 'sha1', '111111111'); - --- -- ID: 31, V --- INSERT INTO threads (id, thread_type, mod_id, report_id, show_in_mod_inbox) VALUES (31, 'project', 1001, null, true); \ No newline at end of file +INSERT INTO categories (id, category, project_type) VALUES (6, 'economy', 2); \ No newline at end of file From a2ad467ef985aef25065a717ceb99a2ed57baded Mon Sep 17 00:00:00 2001 From: Wyatt Verchere Date: Fri, 6 Oct 2023 09:27:25 -0700 Subject: [PATCH 16/16] delete revs --- src/database/models/organization_item.rs | 16 +++++++++------- src/database/models/pat_item.rs | 12 ++++++------ src/database/models/session_item.rs | 23 +++++++++++++---------- src/database/models/user_item.rs | 21 +++++++++++---------- src/database/models/version_item.rs | 22 ++++++++++++---------- 5 files changed, 51 insertions(+), 43 deletions(-) diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index 5e910bf6..15e880cd 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -318,13 +318,15 @@ impl Organization { title: Option, redis: &RedisPool, ) -> Result<(), super::DatabaseError> { - redis.delete(ORGANIZATIONS_NAMESPACE, id.0).await?; - if let Some(title) = title { - redis - .delete(ORGANIZATIONS_TITLES_NAMESPACE, title.to_lowercase()) - .await?; - } - + redis + .delete_many([ + (ORGANIZATIONS_NAMESPACE, Some(id.0.to_string())), + ( + ORGANIZATIONS_TITLES_NAMESPACE, + title.map(|x| x.to_lowercase()), + ), + ]) + .await?; Ok(()) } } diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index bfb881a9..ac1a17e9 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -237,15 +237,15 @@ impl PersonalAccessToken { return Ok(()); } - for (id, token, user_id) in clear_pats { - redis - .delete_many([ + redis + .delete_many(clear_pats.into_iter().flat_map(|(id, token, user_id)| { + [ (PATS_NAMESPACE, id.map(|i| i.0.to_string())), (PATS_TOKENS_NAMESPACE, token), (PATS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())), - ]) - .await?; - } + ] + })) + .await?; Ok(()) } diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index b14e2dba..3cf7d2b8 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -287,16 +287,19 @@ impl Session { return Ok(()); } - for (id, session, user_id) in clear_sessions { - redis - .delete_many([ - (SESSIONS_NAMESPACE, id.map(|i| i.0.to_string())), - (SESSIONS_IDS_NAMESPACE, session), - (SESSIONS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())), - ]) - .await?; - } - + redis + .delete_many( + clear_sessions + .into_iter() + .flat_map(|(id, session, user_id)| { + [ + (SESSIONS_NAMESPACE, id.map(|i| i.0.to_string())), + (SESSIONS_IDS_NAMESPACE, session), + (SESSIONS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())), + ] + }), + ) + .await?; Ok(()) } diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 04cac010..5f732e2c 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -351,16 +351,17 @@ impl User { user_ids: &[(UserId, Option)], redis: &RedisPool, ) -> Result<(), DatabaseError> { - for (id, username) in user_ids { - redis.delete(USERS_NAMESPACE, id.0).await?; - - if let Some(username) = username { - redis - .delete(USER_USERNAMES_NAMESPACE, username.to_lowercase()) - .await?; - } - } - + redis + .delete_many(user_ids.into_iter().flat_map(|(id, username)| { + [ + (USERS_NAMESPACE, Some(id.0.to_string())), + ( + USER_USERNAMES_NAMESPACE, + username.clone().map(|i| i.to_lowercase()), + ), + ] + })) + .await?; Ok(()) } diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index cf48dfc8..f917b20d 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -7,6 +7,7 @@ use itertools::Itertools; use serde::{Deserialize, Serialize}; use std::cmp::Ordering; use std::collections::HashMap; +use std::iter; const VERSIONS_NAMESPACE: &str = "versions"; const VERSION_FILES_NAMESPACE: &str = "versions_files"; @@ -736,16 +737,17 @@ impl Version { version: &QueryVersion, redis: &RedisPool, ) -> Result<(), DatabaseError> { - redis.delete(VERSIONS_NAMESPACE, version.inner.id.0).await?; - - for file in &version.files { - for (algo, hash) in &file.hashes { - redis - .delete(VERSION_FILES_NAMESPACE, format!("{}_{}", algo, hash)) - .await?; - } - } - + redis + .delete_many( + iter::once((VERSIONS_NAMESPACE, Some(version.inner.id.0.to_string()))).chain( + version.files.iter().flat_map(|file| { + file.hashes.iter().map(|(algo, hash)| { + (VERSION_FILES_NAMESPACE, Some(format!("{}_{}", algo, hash))) + }) + }), + ), + ) + .await?; Ok(()) } }