Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

Commit

Permalink
Testing search prod (#791)
Browse files Browse the repository at this point in the history
* testing push

* lowers it

* removed unwrap

* reduced to 500

* Really took down time

* reorders chunking

* rebuild docker

* reverted most changes

* cargo fmt

* reduced meilisearch limit

* added logs, removed deletion of index

* one client creation

* changes

* reverted gallery cahnge

* testing re-splitting again

* Remove chunking + index deletion

* Bring back chunking

* Update chunk size

---------

Co-authored-by: Jai A <[email protected]>
Co-authored-by: Geometrically <[email protected]>
  • Loading branch information
3 people authored Dec 12, 2023
1 parent 6217523 commit 90954da
Show file tree
Hide file tree
Showing 15 changed files with 223 additions and 264 deletions.

This file was deleted.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

This file was deleted.

62 changes: 33 additions & 29 deletions src/database/models/project_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@ impl Project {
.collect();
let slugs = remaining_strings
.into_iter()
.map(|x| x.to_string().to_lowercase())
.map(|x| x.to_lowercase())
.collect::<Vec<_>>();

let all_version_ids = DashSet::new();
Expand All @@ -569,15 +569,18 @@ impl Project {
.collect::<Vec<String>>()
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>>, m| {
let version_id = VersionId(m.id);
let date_published = m.date_published;
all_version_ids.insert(version_id);
acc.entry(ProjectId(m.mod_id))
.or_default()
.push((version_id, date_published));
async move { Ok(acc) }
})
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>>, m| {
let version_id = VersionId(m.id);
let date_published = m.date_published;
all_version_ids.insert(version_id);
acc.entry(ProjectId(m.mod_id))
.or_default()
.push((version_id, date_published));
async move { Ok(acc) }
},
)
.await?;

let loader_field_ids = DashSet::new();
Expand All @@ -592,25 +595,26 @@ impl Project {
&all_version_ids.iter().map(|x| x.0).collect::<Vec<_>>()
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
string_value: m.string_value,
};

loader_field_ids.insert(LoaderFieldId(m.field_id));
if let Some(enum_value) = m.enum_value {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
}

acc.entry(ProjectId(m.mod_id))
.or_default()
.push(qvf);
async move { Ok(acc) }
})
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
string_value: m.string_value,
};

loader_field_ids.insert(LoaderFieldId(m.field_id));
if let Some(enum_value) = m.enum_value {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
}

acc.entry(ProjectId(m.mod_id)).or_default().push(qvf);
async move { Ok(acc) }
},
)
.await?;

let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
Expand Down
44 changes: 11 additions & 33 deletions src/database/models/user_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,6 @@ impl User {

pub async fn remove(
id: UserId,
full: bool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
Expand All @@ -445,38 +444,17 @@ impl User {

let deleted_user: UserId = crate::models::users::DELETED_USER.into();

if full {
let projects: Vec<ProjectId> = sqlx::query!(
"
SELECT m.id FROM mods m
INNER JOIN team_members tm ON tm.team_id = m.team_id
WHERE tm.user_id = $1 AND tm.is_owner = TRUE
",
id as UserId,
)
.fetch_many(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) })
.try_collect::<Vec<ProjectId>>()
.await?;

for project_id in projects {
let _result =
super::project_item::Project::remove(project_id, transaction, redis)
.await?;
}
} else {
sqlx::query!(
"
UPDATE team_members
SET user_id = $1
WHERE (user_id = $2 AND is_owner = TRUE)
",
deleted_user as UserId,
id as UserId,
)
.execute(&mut **transaction)
.await?;
}
sqlx::query!(
"
UPDATE team_members
SET user_id = $1
WHERE (user_id = $2 AND is_owner = TRUE)
",
deleted_user as UserId,
id as UserId,
)
.execute(&mut **transaction)
.await?;

sqlx::query!(
"
Expand Down
45 changes: 23 additions & 22 deletions src/database/models/version_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -540,25 +540,26 @@ impl Version {
&version_ids_parsed
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
string_value: m.string_value,
};

loader_field_ids.insert(LoaderFieldId(m.field_id));
if let Some(enum_value) = m.enum_value {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
}
.try_fold(
DashMap::new(),
|acc: DashMap<VersionId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
string_value: m.string_value,
};

acc.entry(VersionId(m.version_id))
.or_default()
.push(qvf);
async move { Ok(acc) }
})
loader_field_ids.insert(LoaderFieldId(m.field_id));
if let Some(enum_value) = m.enum_value {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
}

acc.entry(VersionId(m.version_id)).or_default().push(qvf);
async move { Ok(acc) }
},
)
.await?;

let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
Expand Down Expand Up @@ -692,17 +693,17 @@ impl Version {
&file_ids.iter().map(|x| x.0).collect::<Vec<_>>()
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<Hash>>, m| {
.try_fold(DashMap::new(), |acc: DashMap<VersionId, Vec<Hash>>, m| {
if let Some(found_hash) = m.hash {
let hash = Hash {
file_id: FileId(m.file_id),
algorithm: m.algorithm,
hash: found_hash,
};

let version_id = *reverse_file_map.get(&FileId(m.file_id)).unwrap();

acc.entry(version_id).or_default().push(hash);
if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) {
acc.entry(*version_id).or_default().push(hash);
}
}
async move { Ok(acc) }
})
Expand Down
19 changes: 9 additions & 10 deletions src/routes/v2/projects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,16 +67,15 @@ pub async fn project_search(
.into_iter()
.map(|facets| {
facets
.into_iter()
.map(|facet| {
if facet.is_array() {
serde_json::from_value::<Vec<String>>(facet).unwrap_or_default()
} else {
vec![serde_json::from_value::<String>(facet.clone())
.unwrap_or_default()]
}
})
.collect_vec()
.into_iter()
.map(|facet| {
if facet.is_array() {
serde_json::from_value::<Vec<String>>(facet).unwrap_or_default()
} else {
vec![serde_json::from_value::<String>(facet).unwrap_or_default()]
}
})
.collect_vec()
})
.collect_vec();

Expand Down
27 changes: 3 additions & 24 deletions src/routes/v2/users.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,38 +178,17 @@ pub async fn user_icon_edit(
.or_else(v2_reroute::flatten_404_error)
}

#[derive(Deserialize)]
pub struct RemovalType {
#[serde(default = "default_removal")]
removal_type: String,
}

fn default_removal() -> String {
"partial".into()
}

#[delete("{id}")]
pub async fn user_delete(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
removal_type: web::Query<RemovalType>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let removal_type = removal_type.into_inner();
v3::users::user_delete(
req,
info,
pool,
web::Query(v3::users::RemovalType {
removal_type: removal_type.removal_type,
}),
redis,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)
v3::users::user_delete(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)
}

#[get("{id}/follows")]
Expand Down
6 changes: 4 additions & 2 deletions src/routes/v2/versions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use crate::models::projects::{Dependency, FileType, Version, VersionStatus, Vers
use crate::models::v2::projects::LegacyVersion;
use crate::queue::session::AuthQueue;
use crate::routes::{v2_reroute, v3};
use crate::search::SearchConfig;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -263,12 +264,13 @@ pub async fn version_edit(
#[delete("{version_id}")]
pub async fn version_delete(
req: HttpRequest,
info: web::Path<(models::ids::VersionId,)>,
info: web::Path<(VersionId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
search_config: web::Data<SearchConfig>,
) -> Result<HttpResponse, ApiError> {
v3::versions::version_delete(req, info, pool, redis, session_queue)
v3::versions::version_delete(req, info, pool, redis, session_queue, search_config)
.await
.or_else(v2_reroute::flatten_404_error)
}
Expand Down
Loading

0 comments on commit 90954da

Please sign in to comment.