diff --git a/migrations/20231005230721_dynamic-fields.sql b/migrations/20231005230721_dynamic-fields.sql index d85bea57..3887ed74 100644 --- a/migrations/20231005230721_dynamic-fields.sql +++ b/migrations/20231005230721_dynamic-fields.sql @@ -1,23 +1,26 @@ +CREATE TABLE loader_field_enums ( + id serial PRIMARY KEY, + enum_name varchar(64) NOT NULL, + ordering int NULL, + hidable BOOLEAN NOT NULL DEFAULT FALSE, + metadata varchar(128) +); + CREATE TABLE loader_fields ( id serial PRIMARY KEY, loader_id integer REFERENCES loaders ON UPDATE CASCADE NOT NULL, field varchar(64) NOT NULL, + -- "int", "text", "enum", "bool", + -- "array(int)", "array(text)", "array(enum)", "array(bool)" field_type varchar(64) NOT NULL, + -- only for enum enum_type integer REFERENCES loader_field_enums ON UPDATE CASCADE NULL, optional BOOLEAN NOT NULL DEFAULT true, - -- for int- min/max val, for text- min len, for enum- min items, for bool- nth + -- for int- min/max val, for text- min len, for enum- min items, for bool- nothing min_val integer NULL, max_val integer NULL ); -CREATE TABLE loader_field_enums ( - id serial PRIMARY KEY, - enum_name varchar(64) NOT NULL, - ordering int NULL, - hidable BOOLEAN NOT NULL DEFAULT FALSE, - metadata -); - ALTER TABLE loaders ADD COLUMN hidable boolean NOT NULL default false; CREATE TABLE version_fields ( @@ -28,4 +31,7 @@ CREATE TABLE version_fields ( int_value integer NULL, enum_value integer REFERENCES loader_field_enums ON UPDATE CASCADE NULL, string_value text NULL -); \ No newline at end of file +); + +-- DROP TABLE side_types; +-- DROP TABLE game_versions; diff --git a/src/routes/v2/admin.rs b/src/routes/v2/admin.rs index cc5bd9e6..a8889277 100644 --- a/src/routes/v2/admin.rs +++ b/src/routes/v2/admin.rs @@ -10,6 +10,7 @@ use crate::queue::download::DownloadQueue; use crate::queue::maxmind::MaxMindIndexer; use crate::queue::session::AuthQueue; use crate::routes::ApiError; +use crate::search::SearchConfig; use crate::util::date::get_current_tenths_of_ms; use crate::util::guards::admin_key_guard; use crate::util::routes::read_from_payload; @@ -28,7 +29,8 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("admin") .service(count_download) - .service(trolley_webhook), + .service(trolley_webhook) + .service(force_reindex) ); } @@ -319,3 +321,13 @@ pub async fn trolley_webhook( Ok(HttpResponse::NoContent().finish()) } + +#[post("/_force_reindex")] +pub async fn force_reindex( + pool: web::Data, + config: web::Data, +) -> Result { + use crate::search::indexing::index_projects; + index_projects(pool.as_ref().clone(), &config).await?; + Ok(HttpResponse::NoContent().finish()) +} \ No newline at end of file diff --git a/tests/common/api_v2.rs b/tests/common/api_v2.rs index 3bd98576..d182a758 100644 --- a/tests/common/api_v2.rs +++ b/tests/common/api_v2.rs @@ -19,6 +19,7 @@ use labrinth::models::{ use serde_json::json; use std::rc::Rc; +#[derive(Clone)] pub struct ApiV2 { pub test_app: Rc>, } diff --git a/tests/common/database.rs b/tests/common/database.rs index 483a44d9..40c778f4 100644 --- a/tests/common/database.rs +++ b/tests/common/database.rs @@ -70,7 +70,7 @@ impl TemporaryDatabase { let pool = PgPoolOptions::new() .min_connections(0) - .max_connections(4) + .max_connections(16) .max_lifetime(Some(Duration::from_secs(60 * 60))) .connect(&temp_db_url) .await diff --git a/tests/common/dummy_data.rs b/tests/common/dummy_data.rs index e66a88bc..ccc0a69f 100644 --- a/tests/common/dummy_data.rs +++ b/tests/common/dummy_data.rs @@ -1,7 +1,10 @@ +use std::io::{Write, Cursor}; + use actix_web::test::{self, TestRequest}; use labrinth::{models::projects::Project, models::projects::Version}; use serde_json::json; use sqlx::Executor; +use zip::{write::FileOptions, ZipWriter, CompressionMethod}; use crate::common::{actix::AppendsMultipart, database::USER_USER_PAT}; @@ -23,13 +26,110 @@ pub const DUMMY_CATEGORIES: &'static [&str] = &[ ]; #[allow(dead_code)] -pub enum DummyJarFile { +pub enum TestFile { DummyProjectAlpha, DummyProjectBeta, BasicMod, BasicModDifferent, + // Randomly generates a valid .jar with a random hash. + // Unlike the other dummy jar files, this one is not a static file. + // and BasicModRandom.bytes() will return a different file each time. + BasicModRandom { + filename: String, + bytes: Vec, + }, + BasicModpackRandom { + filename: String, + bytes: Vec, + }, } +impl TestFile { + pub fn build_random_jar() -> Self { + let filename = format!("random-mod-{}.jar", rand::random::()); + + let fabric_mod_json = serde_json::json!({ + "schemaVersion": 1, + "id": filename, + "version": "1.0.1", + + "name": filename, + "description": "Does nothing", + "authors": [ + "user" + ], + "contact": { + "homepage": "https://www.modrinth.com", + "sources": "https://www.modrinth.com", + "issues": "https://www.modrinth.com" + }, + + "license": "MIT", + "icon": "none.png", + + "environment": "client", + "entrypoints": { + "main": [ + "io.github.modrinth.Modrinth" + ] + }, + "depends": { + "minecraft": ">=1.20-" + } + } + ).to_string(); + + // Create a simulated zip file + let mut cursor = Cursor::new(Vec::new()); + { + let mut zip = ZipWriter::new(&mut cursor); + zip.start_file("fabric.mod.json", FileOptions::default().compression_method(CompressionMethod::Stored)).unwrap(); + zip.write_all(fabric_mod_json.as_bytes()).unwrap(); + zip.finish().unwrap(); + } + let bytes = cursor.into_inner(); + + TestFile::BasicModRandom { + filename, + bytes, + } + } + + pub fn build_random_mrpack() -> Self { + let filename = format!("random-modpack-{}.mrpack", rand::random::()); + + let modrinth_index_json = serde_json::json!({ + "formatVersion": 1, + "game": "minecraft", + "versionId": "1.20.1-9.6", + "name": filename, + "files": [], + "dependencies": { + "fabric-loader": "0.14.22", + "minecraft": "1.20.1" + } + } + ).to_string(); + + // Create a simulated zip file + let mut cursor = Cursor::new(Vec::new()); + { + let mut zip = ZipWriter::new(&mut cursor); + zip.start_file("modrinth.index.json", FileOptions::default().compression_method(CompressionMethod::Stored)).unwrap(); + zip.write_all(modrinth_index_json.as_bytes()).unwrap(); + zip.finish().unwrap(); + } + let bytes = cursor.into_inner(); + + TestFile::BasicModpackRandom { + filename, + bytes, + } + } + +} + +#[derive(Clone)] pub struct DummyData { pub alpha_team_id: String, pub beta_team_id: String, @@ -86,7 +186,7 @@ pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) .v2 .add_public_project(get_public_project_creation_data( "alpha", - DummyJarFile::DummyProjectAlpha, + TestFile::DummyProjectAlpha, )) .await } @@ -94,7 +194,7 @@ pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version) { // Adds dummy data to the database with sqlx (projects, versions, threads) // Generate test project data. - let jar = DummyJarFile::DummyProjectBeta; + let jar = TestFile::DummyProjectBeta; let json_data = json!( { "title": "Test Project Beta", @@ -168,29 +268,45 @@ pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version) (project, version) } -impl DummyJarFile { +impl TestFile { pub fn filename(&self) -> String { match self { - DummyJarFile::DummyProjectAlpha => "dummy-project-alpha.jar", - DummyJarFile::DummyProjectBeta => "dummy-project-beta.jar", - DummyJarFile::BasicMod => "basic-mod.jar", - DummyJarFile::BasicModDifferent => "basic-mod-different.jar", + TestFile::DummyProjectAlpha => "dummy-project-alpha.jar", + TestFile::DummyProjectBeta => "dummy-project-beta.jar", + TestFile::BasicMod => "basic-mod.jar", + TestFile::BasicModDifferent => "basic-mod-different.jar", + TestFile::BasicModRandom { filename, .. } => filename, + TestFile::BasicModpackRandom { filename, .. } => filename, } .to_string() } pub fn bytes(&self) -> Vec { match self { - DummyJarFile::DummyProjectAlpha => { + TestFile::DummyProjectAlpha => { include_bytes!("../../tests/files/dummy-project-alpha.jar").to_vec() } - DummyJarFile::DummyProjectBeta => { + TestFile::DummyProjectBeta => { include_bytes!("../../tests/files/dummy-project-beta.jar").to_vec() } - DummyJarFile::BasicMod => include_bytes!("../../tests/files/basic-mod.jar").to_vec(), - DummyJarFile::BasicModDifferent => { + TestFile::BasicMod => include_bytes!("../../tests/files/basic-mod.jar").to_vec(), + TestFile::BasicModDifferent => { include_bytes!("../../tests/files/basic-mod-different.jar").to_vec() - } + }, + TestFile::BasicModRandom { bytes, .. } => bytes.clone(), + TestFile::BasicModpackRandom { bytes, .. } => bytes.clone(), } } + + pub fn project_type(&self) -> String { + match self { + TestFile::DummyProjectAlpha => "mod", + TestFile::DummyProjectBeta => "mod", + TestFile::BasicMod => "mod", + TestFile::BasicModDifferent => "mod", + TestFile::BasicModRandom { .. } => "mod", + + TestFile::BasicModpackRandom { .. } => "modpack", + }.to_string() + } } diff --git a/tests/common/environment.rs b/tests/common/environment.rs index e3aa2ca9..a1a242e7 100644 --- a/tests/common/environment.rs +++ b/tests/common/environment.rs @@ -29,6 +29,7 @@ where // Must be called in an #[actix_rt::test] context. It also simulates a // temporary sqlx db like #[sqlx::test] would. // Use .call(req) on it directly to make a test call as if test::call_service(req) were being used. +#[derive(Clone)] pub struct TestEnvironment { test_app: Rc>, pub db: TemporaryDatabase, diff --git a/tests/common/request_data.rs b/tests/common/request_data.rs index 85dce64b..d0342b86 100644 --- a/tests/common/request_data.rs +++ b/tests/common/request_data.rs @@ -1,22 +1,36 @@ use serde_json::json; -use super::{actix::MultipartSegment, dummy_data::DummyJarFile}; +use super::{actix::MultipartSegment, dummy_data::TestFile}; use crate::common::actix::MultipartSegmentData; pub struct ProjectCreationRequestData { pub slug: String, - pub jar: DummyJarFile, + pub jar: TestFile, pub segment_data: Vec, } pub fn get_public_project_creation_data( slug: &str, - jar: DummyJarFile, + jar: TestFile, ) -> ProjectCreationRequestData { - let json_data = json!( + let json_data = get_public_project_creation_data_json(slug, &jar); + let multipart_data = get_public_project_creation_data_multipart(&json_data, &jar); + ProjectCreationRequestData { + slug: slug.to_string(), + jar, + segment_data: multipart_data, + } +} + +pub fn get_public_project_creation_data_json( + slug: &str, + jar: &TestFile, +) -> serde_json::Value { + json!( { "title": format!("Test Project {slug}"), "slug": slug, + "project_type": jar.project_type(), "description": "A dummy project for testing with.", "body": "This project is approved, and versions are listed.", "client_side": "required", @@ -32,16 +46,21 @@ pub fn get_public_project_creation_data( "featured": true }], "categories": [], - "license_id": "MIT" + "license_id": "MIT", } - ); + ) +} +pub fn get_public_project_creation_data_multipart( + json_data: &serde_json::Value, + jar: &TestFile, +) -> Vec { // Basic json let json_segment = MultipartSegment { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + data: MultipartSegmentData::Text(serde_json::to_string(json_data).unwrap()), }; // Basic file @@ -52,9 +71,5 @@ pub fn get_public_project_creation_data( data: MultipartSegmentData::Binary(jar.bytes()), }; - ProjectCreationRequestData { - slug: slug.to_string(), - jar, - segment_data: vec![json_segment.clone(), file_segment.clone()], - } -} + vec![json_segment, file_segment] +} \ No newline at end of file diff --git a/tests/search.rs b/tests/search.rs new file mode 100644 index 00000000..07615a26 --- /dev/null +++ b/tests/search.rs @@ -0,0 +1,216 @@ +use std::collections::HashMap; +use std::sync::Arc; +use actix_web::test; +use common::dummy_data::TestFile; +use common::request_data; +use labrinth::models::ids::base62_impl::parse_base62; +use labrinth::models::projects::Project; +use labrinth::search::SearchResults; +use serde_json::json; +use futures::stream::StreamExt; +use crate::common::database::*; +use crate::common::dummy_data::DUMMY_CATEGORIES; +use crate::common::{actix::AppendsMultipart, environment::TestEnvironment}; + +// importing common module. +mod common; + +#[actix_rt::test] +async fn search_projects() { + // Test setup and dummy data + let test_env = TestEnvironment::build_with_dummy().await; + let test_name = test_env.db.database_name.clone(); + // Add dummy projects of various categories for searchability + let mut project_creation_futures = vec![]; + + let create_async_future = |id: u64, pat: &'static str, is_modpack : bool, modify_json : Box| { + let test_env = test_env.clone(); + let slug = format!("{test_name}-searchable-project-{id}"); + + let jar = if is_modpack { + TestFile::build_random_mrpack() + } else { + TestFile::build_random_jar() + }; + let mut basic_project_json = request_data::get_public_project_creation_data_json(&slug, &jar); + modify_json(&mut basic_project_json); + + let basic_project_multipart = + request_data::get_public_project_creation_data_multipart(&basic_project_json, &jar); + // Add a project- simple, should work. + let req = test::TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization", pat)) + .set_multipart(basic_project_multipart) + .to_request(); + + async move { + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 200); + + let project : Project = test::read_body_json(resp).await; + + // Approve, so that the project is searchable + let req = test::TestRequest::patch() + .uri(&format!("/v2/project/{project_id}", project_id = project.id)) + .append_header(("Authorization", MOD_USER_PAT)) + .set_json(json!({ + "status": "approved" + })) + .to_request(); + + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + (project.id.0, id) + }}; + + let id = 0; + let modify_json = | json : &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[4..6]); + json["server_side"] = json!("required"); + json["license_id"] = json!("LGPL-3.0-or-later"); + }; + project_creation_futures.push(create_async_future(id, USER_USER_PAT, false, Box::new(modify_json))); + + let id = 1; + let modify_json = | json : &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..2]); + json["client_side"] = json!("optional"); + }; + project_creation_futures.push(create_async_future(id, USER_USER_PAT, false, Box::new(modify_json))); + + let id = 2; + let modify_json = | json : &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..2]); + json["server_side"] = json!("required"); + json["title"] = json!("Mysterious Project"); + }; + project_creation_futures.push(create_async_future(id, USER_USER_PAT, false, Box::new(modify_json))); + + let id = 3; + let modify_json = | json : &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..3]); + json["server_side"] = json!("required"); + json["initial_versions"][0]["version_number"] = json!("1.2.4"); + json["title"] = json!("Mysterious Project"); + json["license_id"] = json!("LicenseRef-All-Rights-Reserved"); // closed source + }; + project_creation_futures.push(create_async_future(id, FRIEND_USER_PAT, false, Box::new(modify_json))); + + let id = 4; + let modify_json = | json : &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..3]); + json["client_side"] = json!("optional"); + json["initial_versions"][0]["version_number"] = json!("1.2.5"); + }; + project_creation_futures.push(create_async_future(id, USER_USER_PAT, true, Box::new(modify_json))); + + let id = 5; + let modify_json = | json : &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[5..6]); + json["client_side"] = json!("optional"); + json["initial_versions"][0]["version_number"] = json!("1.2.5"); + json["license_id"] = json!("LGPL-3.0-or-later"); + }; + project_creation_futures.push(create_async_future(id, USER_USER_PAT, false, Box::new(modify_json))); + + let id = 6; + let modify_json = | json : &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[5..6]); + json["client_side"] = json!("optional"); + json["server_side"] = json!("required"); + json["license_id"] = json!("LGPL-3.0-or-later"); + }; + project_creation_futures.push(create_async_future(id, FRIEND_USER_PAT, false, Box::new(modify_json))); + + // Await all project creation + // Returns a mapping of: + // project id -> test id + let id_conversion : Arc> = Arc::new(futures::future::join_all(project_creation_futures).await.into_iter().collect()); + + // Pairs of: + // 1. vec of search facets + // 2. expected project ids to be returned by this search + let pairs = vec![ + (json!([ + ["categories:fabric"] + ]), vec![0,1,2,3,4,5,6 + ]), + (json!([ + ["categories:forge"] + ]), vec![]), + (json!([ + ["categories:fabric", "categories:forge"] + ]), vec![0,1,2,3,4,5,6]), + (json!([ + ["categories:fabric"], + ["categories:forge"] + ]), vec![]), + (json!([ + ["categories:fabric"], + [&format!("categories:{}", DUMMY_CATEGORIES[0])], + ]), vec![1,2,3,4]), + (json!([ + ["project_type:modpack"] + ]), vec![4]), + (json!([ + ["client_side:required"] + ]), vec![0,2,3]), + (json!([ + ["server_side:required"] + ]), vec![0,2,3,6]), + (json!([ + ["open_source:true"] + ]), vec![0,1,2,4,5,6]), + (json!([ + ["license:MIT"] + ]), vec![1,2,4]), + (json!([ + [r#"title:'Mysterious Project'"#] + ]), vec![2,3]), + (json!([ + ["author:user"] + ]), vec![0,1,2,4,5]) + ]; + // TODO: versions, game versions + + // Untested: + // - downloads (not varied) + // - color (not varied) + // - created_timestamp (not varied) + // - modified_timestamp (not varied) + + // Forcibly reset the search index + let req = test::TestRequest::post() + .uri("/v2/admin/_force_reindex") + .append_header(("Modrinth-Admin", dotenvy::var("LABRINTH_ADMIN_KEY").unwrap())) + .to_request(); + let resp = test_env.call(req).await; + assert_eq!(resp.status(), 204); + + // Test searches + let stream = futures::stream::iter(pairs); + stream.for_each_concurrent(10, |(facets, mut expected_project_ids)| { + let test_env = test_env.clone(); + let id_conversion = id_conversion.clone(); + let test_name = test_name.clone(); + async move { + let req = test::TestRequest::get() + .uri(&format!("/v2/search?query={test_name}&facets={facets}", facets=urlencoding::encode(&facets.to_string()))) + .append_header(("Authorization", USER_USER_PAT)) + .set_json(&facets) + .to_request(); + let resp = test_env.call(req).await; + let status = resp.status(); + assert_eq!(status, 200); + let projects : SearchResults = test::read_body_json(resp).await; + let mut found_project_ids : Vec = projects.hits.into_iter().map(|p| id_conversion[&parse_base62(&p.project_id).unwrap()]).collect(); + expected_project_ids.sort(); + found_project_ids.sort(); + assert_eq!(found_project_ids, expected_project_ids); + } + }).await; + + // Cleanup test db + test_env.cleanup().await; +} \ No newline at end of file diff --git a/tests/user.rs b/tests/user.rs index efb1f920..930f4d11 100644 --- a/tests/user.rs +++ b/tests/user.rs @@ -3,7 +3,7 @@ use common::{ environment::with_test_environment, }; -use crate::common::{dummy_data::DummyJarFile, request_data::get_public_project_creation_data}; +use crate::common::{dummy_data::TestFile, request_data::get_public_project_creation_data}; mod common; @@ -17,7 +17,7 @@ pub async fn get_user_projects_after_creating_project_returns_new_project() { let (project, _) = api .add_public_project(get_public_project_creation_data( "slug", - DummyJarFile::BasicMod, + TestFile::BasicMod, )) .await; @@ -36,7 +36,7 @@ pub async fn get_user_projects_after_deleting_project_shows_removal() { let (project, _) = api .add_public_project(get_public_project_creation_data( "iota", - DummyJarFile::BasicMod, + TestFile::BasicMod, )) .await; api.get_user_projects_deserialized(USER_USER_ID, USER_USER_PAT)