diff --git a/.sqlx/query-834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937.json b/.sqlx/query-834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937.json deleted file mode 100644 index e1cc97f1..00000000 --- a/.sqlx/query-834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT m.id FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id\n WHERE tm.user_id = $1 AND tm.is_owner = TRUE\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - false - ] - }, - "hash": "834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937" -} diff --git a/.sqlx/query-cd564263de068c5e6e4b5f32587c65fa62d431aa0d7130427f27a809457be33e.json b/.sqlx/query-cd564263de068c5e6e4b5f32587c65fa62d431aa0d7130427f27a809457be33e.json new file mode 100644 index 00000000..8f20c07c --- /dev/null +++ b/.sqlx/query-cd564263de068c5e6e4b5f32587c65fa62d431aa0d7130427f27a809457be33e.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE team_members\n SET user_id = $1\n WHERE (user_id = $2 AND is_owner = TRUE)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "cd564263de068c5e6e4b5f32587c65fa62d431aa0d7130427f27a809457be33e" +} diff --git a/.sqlx/query-f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62.json b/.sqlx/query-f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62.json deleted file mode 100644 index d24f2e9a..00000000 --- a/.sqlx/query-f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE team_members\n SET user_id = $1\n WHERE (user_id = $2 AND is_owner = TRUE)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62" -} diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 630eba57..b624948c 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -550,7 +550,7 @@ impl Project { .collect(); let slugs = remaining_strings .into_iter() - .map(|x| x.to_string().to_lowercase()) + .map(|x| x.to_lowercase()) .collect::>(); let all_version_ids = DashSet::new(); @@ -569,15 +569,18 @@ impl Project { .collect::>() ) .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap)>>, m| { - let version_id = VersionId(m.id); - let date_published = m.date_published; - all_version_ids.insert(version_id); - acc.entry(ProjectId(m.mod_id)) - .or_default() - .push((version_id, date_published)); - async move { Ok(acc) } - }) + .try_fold( + DashMap::new(), + |acc: DashMap)>>, m| { + let version_id = VersionId(m.id); + let date_published = m.date_published; + all_version_ids.insert(version_id); + acc.entry(ProjectId(m.mod_id)) + .or_default() + .push((version_id, date_published)); + async move { Ok(acc) } + }, + ) .await?; let loader_field_ids = DashSet::new(); @@ -592,25 +595,26 @@ impl Project { &all_version_ids.iter().map(|x| x.0).collect::>() ) .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap>, m| { - let qvf = QueryVersionField { - version_id: VersionId(m.version_id), - field_id: LoaderFieldId(m.field_id), - int_value: m.int_value, - enum_value: m.enum_value.map(LoaderFieldEnumValueId), - string_value: m.string_value, - }; - - loader_field_ids.insert(LoaderFieldId(m.field_id)); - if let Some(enum_value) = m.enum_value { - loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); - } - - acc.entry(ProjectId(m.mod_id)) - .or_default() - .push(qvf); - async move { Ok(acc) } - }) + .try_fold( + DashMap::new(), + |acc: DashMap>, m| { + let qvf = QueryVersionField { + version_id: VersionId(m.version_id), + field_id: LoaderFieldId(m.field_id), + int_value: m.int_value, + enum_value: m.enum_value.map(LoaderFieldEnumValueId), + string_value: m.string_value, + }; + + loader_field_ids.insert(LoaderFieldId(m.field_id)); + if let Some(enum_value) = m.enum_value { + loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); + } + + acc.entry(ProjectId(m.mod_id)).or_default().push(qvf); + async move { Ok(acc) } + }, + ) .await?; let loader_fields: Vec = sqlx::query!( diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 799fd027..6f821db8 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -434,7 +434,6 @@ impl User { pub async fn remove( id: UserId, - full: bool, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, redis: &RedisPool, ) -> Result, DatabaseError> { @@ -445,38 +444,17 @@ impl User { let deleted_user: UserId = crate::models::users::DELETED_USER.into(); - if full { - let projects: Vec = sqlx::query!( - " - SELECT m.id FROM mods m - INNER JOIN team_members tm ON tm.team_id = m.team_id - WHERE tm.user_id = $1 AND tm.is_owner = TRUE - ", - id as UserId, - ) - .fetch_many(&mut **transaction) - .try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) }) - .try_collect::>() - .await?; - - for project_id in projects { - let _result = - super::project_item::Project::remove(project_id, transaction, redis) - .await?; - } - } else { - sqlx::query!( - " - UPDATE team_members - SET user_id = $1 - WHERE (user_id = $2 AND is_owner = TRUE) - ", - deleted_user as UserId, - id as UserId, - ) - .execute(&mut **transaction) - .await?; - } + sqlx::query!( + " + UPDATE team_members + SET user_id = $1 + WHERE (user_id = $2 AND is_owner = TRUE) + ", + deleted_user as UserId, + id as UserId, + ) + .execute(&mut **transaction) + .await?; sqlx::query!( " diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index c7f5afb2..8fc2de5d 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -540,25 +540,26 @@ impl Version { &version_ids_parsed ) .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap>, m| { - let qvf = QueryVersionField { - version_id: VersionId(m.version_id), - field_id: LoaderFieldId(m.field_id), - int_value: m.int_value, - enum_value: m.enum_value.map(LoaderFieldEnumValueId), - string_value: m.string_value, - }; - - loader_field_ids.insert(LoaderFieldId(m.field_id)); - if let Some(enum_value) = m.enum_value { - loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); - } + .try_fold( + DashMap::new(), + |acc: DashMap>, m| { + let qvf = QueryVersionField { + version_id: VersionId(m.version_id), + field_id: LoaderFieldId(m.field_id), + int_value: m.int_value, + enum_value: m.enum_value.map(LoaderFieldEnumValueId), + string_value: m.string_value, + }; - acc.entry(VersionId(m.version_id)) - .or_default() - .push(qvf); - async move { Ok(acc) } - }) + loader_field_ids.insert(LoaderFieldId(m.field_id)); + if let Some(enum_value) = m.enum_value { + loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); + } + + acc.entry(VersionId(m.version_id)).or_default().push(qvf); + async move { Ok(acc) } + }, + ) .await?; let loader_fields: Vec = sqlx::query!( @@ -692,7 +693,7 @@ impl Version { &file_ids.iter().map(|x| x.0).collect::>() ) .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap>, m| { + .try_fold(DashMap::new(), |acc: DashMap>, m| { if let Some(found_hash) = m.hash { let hash = Hash { file_id: FileId(m.file_id), @@ -700,9 +701,9 @@ impl Version { hash: found_hash, }; - let version_id = *reverse_file_map.get(&FileId(m.file_id)).unwrap(); - - acc.entry(version_id).or_default().push(hash); + if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) { + acc.entry(*version_id).or_default().push(hash); + } } async move { Ok(acc) } }) diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 80c43c8c..b107bdb2 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -67,16 +67,15 @@ pub async fn project_search( .into_iter() .map(|facets| { facets - .into_iter() - .map(|facet| { - if facet.is_array() { - serde_json::from_value::>(facet).unwrap_or_default() - } else { - vec![serde_json::from_value::(facet.clone()) - .unwrap_or_default()] - } - }) - .collect_vec() + .into_iter() + .map(|facet| { + if facet.is_array() { + serde_json::from_value::>(facet).unwrap_or_default() + } else { + vec![serde_json::from_value::(facet).unwrap_or_default()] + } + }) + .collect_vec() }) .collect_vec(); diff --git a/src/routes/v2/users.rs b/src/routes/v2/users.rs index 37da2f9e..6d2d60ab 100644 --- a/src/routes/v2/users.rs +++ b/src/routes/v2/users.rs @@ -178,38 +178,17 @@ pub async fn user_icon_edit( .or_else(v2_reroute::flatten_404_error) } -#[derive(Deserialize)] -pub struct RemovalType { - #[serde(default = "default_removal")] - removal_type: String, -} - -fn default_removal() -> String { - "partial".into() -} - #[delete("{id}")] pub async fn user_delete( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - removal_type: web::Query, redis: web::Data, session_queue: web::Data, ) -> Result { - let removal_type = removal_type.into_inner(); - v3::users::user_delete( - req, - info, - pool, - web::Query(v3::users::RemovalType { - removal_type: removal_type.removal_type, - }), - redis, - session_queue, - ) - .await - .or_else(v2_reroute::flatten_404_error) + v3::users::user_delete(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[get("{id}/follows")] diff --git a/src/routes/v2/versions.rs b/src/routes/v2/versions.rs index 86d3209c..94468f34 100644 --- a/src/routes/v2/versions.rs +++ b/src/routes/v2/versions.rs @@ -8,6 +8,7 @@ use crate::models::projects::{Dependency, FileType, Version, VersionStatus, Vers use crate::models::v2::projects::LegacyVersion; use crate::queue::session::AuthQueue; use crate::routes::{v2_reroute, v3}; +use crate::search::SearchConfig; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; @@ -263,12 +264,13 @@ pub async fn version_edit( #[delete("{version_id}")] pub async fn version_delete( req: HttpRequest, - info: web::Path<(models::ids::VersionId,)>, + info: web::Path<(VersionId,)>, pool: web::Data, redis: web::Data, session_queue: web::Data, + search_config: web::Data, ) -> Result { - v3::versions::version_delete(req, info, pool, redis, session_queue) + v3::versions::version_delete(req, info, pool, redis, session_queue, search_config) .await .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v3/projects.rs b/src/routes/v3/projects.rs index 3b35d163..6cc8b319 100644 --- a/src/routes/v3/projects.rs +++ b/src/routes/v3/projects.rs @@ -21,6 +21,7 @@ use crate::models::teams::ProjectPermissions; use crate::models::threads::MessageBody; use crate::queue::session::AuthQueue; use crate::routes::ApiError; +use crate::search::indexing::remove_documents; use crate::search::{search_for_project, SearchConfig, SearchError}; use crate::util::img; use crate::util::routes::read_from_payload; @@ -28,7 +29,6 @@ use crate::util::validate::validation_errors_to_string; use actix_web::{web, HttpRequest, HttpResponse}; use chrono::{DateTime, Utc}; use futures::TryStreamExt; -use meilisearch_sdk::indexes::IndexesResults; use serde::{Deserialize, Serialize}; use serde_json::json; use sqlx::PgPool; @@ -231,7 +231,7 @@ pub async fn project_edit( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - config: web::Data, + search_config: web::Data, new_project: web::Json, redis: web::Data, session_queue: web::Data, @@ -472,7 +472,15 @@ pub async fn project_edit( .await?; if project_item.inner.status.is_searchable() && !status.is_searchable() { - delete_from_index(id.into(), config).await?; + remove_documents( + &project_item + .versions + .into_iter() + .map(|x| x.into()) + .collect::>(), + &search_config, + ) + .await?; } } @@ -910,21 +918,6 @@ pub async fn project_search( Ok(HttpResponse::Ok().json(results)) } -pub async fn delete_from_index( - id: ProjectId, - config: web::Data, -) -> Result<(), meilisearch_sdk::errors::Error> { - let client = meilisearch_sdk::client::Client::new(&*config.address, &*config.key); - - let indexes: IndexesResults = client.get_indexes().await?; - - for index in indexes.results { - index.delete_document(id.to_string()).await?; - } - - Ok(()) -} - //checks the validity of a project id or slug pub async fn project_get_check( info: web::Path<(String,)>, @@ -2045,7 +2038,7 @@ pub async fn project_delete( info: web::Path<(String,)>, pool: web::Data, redis: web::Data, - config: web::Data, + search_config: web::Data, session_queue: web::Data, ) -> Result { let user = get_user_from_headers( @@ -2118,7 +2111,15 @@ pub async fn project_delete( transaction.commit().await?; - delete_from_index(project.inner.id.into(), config).await?; + remove_documents( + &project + .versions + .into_iter() + .map(|x| x.into()) + .collect::>(), + &search_config, + ) + .await?; if result.is_some() { Ok(HttpResponse::NoContent().body("")) diff --git a/src/routes/v3/users.rs b/src/routes/v3/users.rs index 7d14279c..2f8ec604 100644 --- a/src/routes/v3/users.rs +++ b/src/routes/v3/users.rs @@ -546,21 +546,10 @@ pub async fn user_icon_edit( } } -#[derive(Deserialize)] -pub struct RemovalType { - #[serde(default = "default_removal")] - pub removal_type: String, -} - -fn default_removal() -> String { - "partial".into() -} - pub async fn user_delete( req: HttpRequest, info: web::Path<(String,)>, pool: web::Data, - removal_type: web::Query, redis: web::Data, session_queue: web::Data, ) -> Result { @@ -584,13 +573,7 @@ pub async fn user_delete( let mut transaction = pool.begin().await?; - let result = User::remove( - id, - removal_type.removal_type == "full", - &mut transaction, - &redis, - ) - .await?; + let result = User::remove(id, &mut transaction, &redis).await?; transaction.commit().await?; diff --git a/src/routes/v3/versions.rs b/src/routes/v3/versions.rs index 0b55c865..54da5fbb 100644 --- a/src/routes/v3/versions.rs +++ b/src/routes/v3/versions.rs @@ -20,6 +20,8 @@ use crate::models::projects::{skip_nulls, Loader}; use crate::models::projects::{Dependency, FileType, VersionStatus, VersionType}; use crate::models::teams::ProjectPermissions; use crate::queue::session::AuthQueue; +use crate::search::indexing::remove_documents; +use crate::search::SearchConfig; use crate::util::img; use crate::util::validate::validation_errors_to_string; use actix_web::{web, HttpRequest, HttpResponse}; @@ -934,6 +936,7 @@ pub async fn version_delete( pool: web::Data, redis: web::Data, session_queue: web::Data, + search_config: web::Data, ) -> Result { let user = get_user_from_headers( &req, @@ -1001,7 +1004,7 @@ pub async fn version_delete( let result = database::models::Version::remove_full(version.inner.id, &redis, &mut transaction).await?; - + remove_documents(&[version.inner.id.into()], &search_config).await?; database::models::Project::clear_cache(version.inner.project_id, None, Some(true), &redis) .await?; diff --git a/src/search/indexing/mod.rs b/src/search/indexing/mod.rs index 99c72bb5..ddc12e7a 100644 --- a/src/search/indexing/mod.rs +++ b/src/search/indexing/mod.rs @@ -1,11 +1,12 @@ /// This module is used for the indexing from any source. pub mod local_import; +use itertools::Itertools; use std::collections::HashMap; use crate::database::redis::RedisPool; +use crate::models::ids::base62_impl::to_base62; use crate::search::{SearchConfig, UploadSearchProject}; -use itertools::Itertools; use local_import::index_local; use log::info; use meilisearch_sdk::client::Client; @@ -34,23 +35,41 @@ pub enum IndexingError { // The chunk size for adding projects to the indexing database. If the request size // is too large (>10MiB) then the request fails with an error. This chunk size -// assumes a max average size of 1KiB per project to avoid this cap. -const MEILISEARCH_CHUNK_SIZE: usize = 10000; - +// assumes a max average size of 4KiB per project to avoid this cap. +const MEILISEARCH_CHUNK_SIZE: usize = 2500; // Should be less than FETCH_PROJECT_SIZE const FETCH_PROJECT_SIZE: usize = 5000; + +const TIMEOUT: std::time::Duration = std::time::Duration::from_secs(60); + +pub async fn remove_documents( + ids: &[crate::models::ids::VersionId], + config: &SearchConfig, +) -> Result<(), meilisearch_sdk::errors::Error> { + let indexes = get_indexes(config).await?; + + for index in indexes { + index + .delete_documents(&ids.iter().map(|x| to_base62(x.0)).collect::>()) + .await?; + } + + Ok(()) +} + pub async fn index_projects( pool: PgPool, redis: RedisPool, config: &SearchConfig, ) -> Result<(), IndexingError> { - let mut docs_to_add: Vec = vec![]; - let mut additional_fields: Vec = vec![]; + info!("Indexing projects."); + + let indices = get_indexes(config).await?; let all_ids = get_all_ids(pool.clone()).await?; let all_ids_len = all_ids.len(); info!("Got all ids, indexing {} projects", all_ids_len); - let mut so_far = 0; + let mut so_far = 0; let as_chunks: Vec<_> = all_ids .into_iter() .chunks(FETCH_PROJECT_SIZE) @@ -74,51 +93,74 @@ pub async fn index_projects( (version_id, (project_id, owner_username.to_lowercase())) }) .collect::>(); - let (mut uploads, mut loader_fields) = index_local(&pool, &redis, id_chunk).await?; - docs_to_add.append(&mut uploads); - additional_fields.append(&mut loader_fields); - } + let (uploads, loader_fields) = index_local(&pool, &redis, id_chunk).await?; - info!("Got all ids, indexing..."); - // Write Indices - add_projects(docs_to_add, additional_fields, config).await?; + info!("Got chunk, adding to docs_to_add"); + add_projects(&indices, uploads, loader_fields, config).await?; + } + info!("Done adding projects."); Ok(()) } -async fn create_index( +pub async fn get_indexes( + config: &SearchConfig, +) -> Result, meilisearch_sdk::errors::Error> { + let client = config.make_client(); + + let projects_index = create_or_update_index(&client, "projects", None).await?; + let projects_filtered_index = create_or_update_index( + &client, + "projects_filtered", + Some(&[ + "sort", + "words", + "typo", + "proximity", + "attribute", + "exactness", + ]), + ) + .await?; + + Ok(vec![projects_index, projects_filtered_index]) +} + +async fn create_or_update_index( client: &Client, name: &'static str, custom_rules: Option<&'static [&'static str]>, -) -> Result { - client - .delete_index(name) - .await? - .wait_for_completion(client, None, None) - .await?; +) -> Result { + info!("Updating/creating index."); match client.get_index(name).await { Ok(index) => { + info!("Updating index settings."); + + let mut settings = default_settings(); + + if let Some(custom_rules) = custom_rules { + settings = settings.with_ranking_rules(custom_rules); + } + index - .set_settings(&default_settings()) + .set_settings(&settings) .await? - .wait_for_completion(client, None, None) + .wait_for_completion(client, None, Some(TIMEOUT)) .await?; - Ok(index) } - Err(meilisearch_sdk::errors::Error::Meilisearch( - meilisearch_sdk::errors::MeilisearchError { - error_code: meilisearch_sdk::errors::ErrorCode::IndexNotFound, - .. - }, - )) => { + _ => { + info!("Creating index."); + // Only create index and set settings if the index doesn't already exist let task = client.create_index(name, Some("version_id")).await?; - let task = task.wait_for_completion(client, None, None).await?; + let task = task + .wait_for_completion(client, None, Some(TIMEOUT)) + .await?; let index = task .try_make_index(client) - .map_err(|_| IndexingError::Task)?; + .map_err(|x| x.unwrap_failure())?; let mut settings = default_settings(); @@ -129,47 +171,47 @@ async fn create_index( index .set_settings(&settings) .await? - .wait_for_completion(client, None, None) + .wait_for_completion(client, None, Some(TIMEOUT)) .await?; Ok(index) } - Err(e) => { - log::warn!("Unhandled error while creating index: {}", e); - Err(IndexingError::Indexing(e)) - } } } async fn add_to_index( client: &Client, - index: Index, + index: &Index, mods: &[UploadSearchProject], ) -> Result<(), IndexingError> { for chunk in mods.chunks(MEILISEARCH_CHUNK_SIZE) { + info!( + "Adding chunk starting with version id {}", + chunk[0].version_id + ); index - .add_documents(chunk, Some("version_id")) + .add_or_replace(chunk, Some("version_id")) .await? - .wait_for_completion(client, None, None) + .wait_for_completion(client, None, Some(std::time::Duration::from_secs(3600))) .await?; + info!("Added chunk of {} projects to index", chunk.len()); } + Ok(()) } -async fn create_and_add_to_index( +async fn update_and_add_to_index( client: &Client, + index: &Index, projects: &[UploadSearchProject], additional_fields: &[String], - name: &'static str, - custom_rules: Option<&'static [&'static str]>, ) -> Result<(), IndexingError> { - let index = create_index(client, name, custom_rules).await?; - - let mut new_filterable_attributes = index.get_filterable_attributes().await?; + let mut new_filterable_attributes: Vec = index.get_filterable_attributes().await?; let mut new_displayed_attributes = index.get_displayed_attributes().await?; new_filterable_attributes.extend(additional_fields.iter().map(|s| s.to_string())); new_displayed_attributes.extend(additional_fields.iter().map(|s| s.to_string())); + info!("add attributes."); index .set_filterable_attributes(new_filterable_attributes) .await?; @@ -177,34 +219,23 @@ async fn create_and_add_to_index( .set_displayed_attributes(new_displayed_attributes) .await?; + info!("Adding to index."); + add_to_index(client, index, projects).await?; Ok(()) } pub async fn add_projects( + indices: &[Index], projects: Vec, additional_fields: Vec, config: &SearchConfig, ) -> Result<(), IndexingError> { let client = config.make_client(); - - create_and_add_to_index(&client, &projects, &additional_fields, "projects", None).await?; - - create_and_add_to_index( - &client, - &projects, - &additional_fields, - "projects_filtered", - Some(&[ - "sort", - "words", - "typo", - "proximity", - "attribute", - "exactness", - ]), - ) - .await?; + for index in indices { + info!("adding projects part1 or 2."); + update_and_add_to_index(&client, index, &projects, &additional_fields).await?; + } Ok(()) } diff --git a/src/search/mod.rs b/src/search/mod.rs index 83160325..d7e7026f 100644 --- a/src/search/mod.rs +++ b/src/search/mod.rs @@ -164,8 +164,8 @@ pub struct ResultSearchProject { pub requested_status: Option, pub loaders: Vec, // Search uses loaders as categories- this is purely for the Project model. pub links: Vec, - pub games: Vec, // Todo: in future, could be a searchable field. pub gallery_items: Vec, // Gallery *only* urls are stored in gallery, but the gallery items are stored here- required for the Project model. + pub games: Vec, // Todo: in future, could be a searchable field. pub organization_id: Option, // Todo: in future, could be a searchable field. #[serde(flatten)] @@ -237,7 +237,7 @@ pub async fn search_for_project( if facet.is_array() { serde_json::from_value::>(facet).unwrap_or_default() } else { - vec![serde_json::from_value::(facet.clone()) + vec![serde_json::from_value::(facet) .unwrap_or_default()] } }) diff --git a/tests/oauth.rs b/tests/oauth.rs index 7821eb61..101cefbf 100644 --- a/tests/oauth.rs +++ b/tests/oauth.rs @@ -82,7 +82,7 @@ async fn oauth_flow_happy_path() { #[actix_rt::test] async fn oauth_authorize_for_already_authorized_scopes_returns_auth_code() { with_test_environment(None, |env: TestEnvironment| async move { - let DummyOAuthClientAlpha { client_id, .. } = env.dummy.unwrap().oauth_client_alpha.clone(); + let DummyOAuthClientAlpha { client_id, .. } = env.dummy.unwrap().oauth_client_alpha; let resp = env .api @@ -119,7 +119,7 @@ async fn get_oauth_token_with_already_used_auth_code_fails() { client_id, client_secret, .. - } = env.dummy.unwrap().oauth_client_alpha.clone(); + } = env.dummy.unwrap().oauth_client_alpha; let resp = env .api