Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

Commit

Permalink
Merge branch 'master' into permissions-analytics
Browse files Browse the repository at this point in the history
  • Loading branch information
Geometrically authored Nov 21, 2023
2 parents 358e13f + dfba6c7 commit ccc35ab
Show file tree
Hide file tree
Showing 26 changed files with 367 additions and 140 deletions.
13 changes: 13 additions & 0 deletions .codecov.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
comment: false

coverage:
status:
project:
default:
threshold: 60% # make CI green
patch:
default:
threshold: 60% # make CI green

ignore: # ignore code coverage on following paths
- "**/tests"
44 changes: 44 additions & 0 deletions .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: Coverage-Tarpaulin

env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true

on:
push:
branches: [ master ]
# Uncomment to allow PRs to trigger the workflow
# pull_request:
# branches: [ master ]
jobs:
citarp:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2

# Start Docker Compose
- name: Start Docker Compose
run: docker-compose up -d

- name: Install cargo tarpaulin
uses: taiki-e/install-action@cargo-tarpaulin
- name: Generate code coverage
run: |
cargo tarpaulin --verbose --all-features --timeout 120 --out xml
env:
BACKBLAZE_BUCKET_ID: ${{ secrets.BACKBLAZE_BUCKET_ID }}
BACKBLAZE_KEY: ${{ secrets.BACKBLAZE_KEY }}
BACKBLAZE_KEY_ID: ${{ secrets.BACKBLAZE_KEY_ID }}
S3_ACCESS_TOKEN: ${{ secrets.S3_ACCESS_TOKEN }}
S3_SECRET: ${{ secrets.S3_SECRET }}
S3_URL: ${{ secrets.S3_URL }}
S3_REGION: ${{ secrets.S3_REGION }}
S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }}
SQLX_OFFLINE: true
DATABASE_URL: postgresql://labrinth:labrinth@localhost/postgres

- name: Upload to codecov.io
uses: codecov/codecov-action@v2
with:
# token: ${{secrets.CODECOV_TOKEN}} # not required for public repos
fail_ci_if_error: true
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
codecov.json

# Created by https://www.gitignore.io/api/rust,clion
# Edit at https://www.gitignore.io/?templates=rust,clion

Expand Down
6 changes: 6 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,9 @@ derive-new = "0.5.9"

[dev-dependencies]
actix-http = "3.4.0"

[profile.dev]
opt-level = 0 # Minimal optimization, speeds up compilation
lto = false # Disables Link Time Optimization
incremental = true # Enables incremental compilation
codegen-units = 16 # Higher number can improve compile times but reduce runtime performance
8 changes: 8 additions & 0 deletions src/database/models/categories.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ impl Category {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;

let res: Option<Vec<Category>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "category")
.await?;
Expand Down Expand Up @@ -155,6 +157,8 @@ impl DonationPlatform {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;

let res: Option<Vec<DonationPlatform>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "donation_platform")
.await?;
Expand Down Expand Up @@ -209,6 +213,8 @@ impl ReportType {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;

let res: Option<Vec<String>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "report_type")
.await?;
Expand Down Expand Up @@ -257,6 +263,8 @@ impl ProjectType {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;

let res: Option<Vec<String>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "project_type")
.await?;
Expand Down
9 changes: 8 additions & 1 deletion src/database/models/collection_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,8 @@ impl Collection {
{
use futures::TryStreamExt;

let mut redis = redis.connect().await?;

if collection_ids.is_empty() {
return Ok(Vec::new());
}
Expand All @@ -166,7 +168,10 @@ impl Collection {

if !collection_ids.is_empty() {
let collections = redis
.multi_get::<String, _>(COLLECTIONS_NAMESPACE, collection_ids.iter().map(|x| x.0))
.multi_get::<String>(
COLLECTIONS_NAMESPACE,
collection_ids.iter().map(|x| x.0.to_string()),
)
.await?;

for collection in collections {
Expand Down Expand Up @@ -240,6 +245,8 @@ impl Collection {
}

pub async fn clear_cache(id: CollectionId, redis: &RedisPool) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

redis.delete(COLLECTIONS_NAMESPACE, id.0).await?;
Ok(())
}
Expand Down
6 changes: 6 additions & 0 deletions src/database/models/flow_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@ impl Flow {
expires: Duration,
redis: &RedisPool,
) -> Result<String, DatabaseError> {
let mut redis = redis.connect().await?;

let flow = ChaCha20Rng::from_entropy()
.sample_iter(&Alphanumeric)
.take(32)
Expand All @@ -71,6 +73,8 @@ impl Flow {
}

pub async fn get(id: &str, redis: &RedisPool) -> Result<Option<Flow>, DatabaseError> {
let mut redis = redis.connect().await?;

redis.get_deserialized_from_json(FLOWS_NAMESPACE, id).await
}

Expand All @@ -91,6 +95,8 @@ impl Flow {
}

pub async fn remove(id: &str, redis: &RedisPool) -> Result<Option<()>, DatabaseError> {
let mut redis = redis.connect().await?;

redis.delete(FLOWS_NAMESPACE, id).await?;
Ok(Some(()))
}
Expand Down
5 changes: 4 additions & 1 deletion src/database/models/image_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ impl Image {
{
use futures::TryStreamExt;

let mut redis = redis.connect().await?;
if image_ids.is_empty() {
return Ok(Vec::new());
}
Expand All @@ -191,7 +192,7 @@ impl Image {

if !image_ids.is_empty() {
let images = redis
.multi_get::<String, _>(IMAGES_NAMESPACE, image_ids)
.multi_get::<String>(IMAGES_NAMESPACE, image_ids.iter().map(|x| x.to_string()))
.await?;
for image in images {
if let Some(image) = image.and_then(|x| serde_json::from_str::<Image>(&x).ok()) {
Expand Down Expand Up @@ -246,6 +247,8 @@ impl Image {
}

pub async fn clear_cache(id: ImageId, redis: &RedisPool) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

redis.delete(IMAGES_NAMESPACE, id.0).await?;
Ok(())
}
Expand Down
12 changes: 10 additions & 2 deletions src/database/models/loader_fields.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ impl Game {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let cached_games: Option<Vec<Game>> = redis
.get_deserialized_from_json(GAMES_LIST_NAMESPACE, "games")
.await?;
Expand Down Expand Up @@ -95,6 +96,7 @@ impl Loader {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let cached_id: Option<i32> = redis.get_deserialized_from_json(LOADER_ID, name).await?;
if let Some(cached_id) = cached_id {
return Ok(Some(LoaderId(cached_id)));
Expand Down Expand Up @@ -124,6 +126,7 @@ impl Loader {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let cached_loaders: Option<Vec<Loader>> = redis
.get_deserialized_from_json(LOADERS_LIST_NAMESPACE, "all")
.await?;
Expand Down Expand Up @@ -318,9 +321,11 @@ impl LoaderField {
{
type RedisLoaderFieldTuple = (LoaderId, Vec<LoaderField>);

let mut redis = redis.connect().await?;

let mut loader_ids = loader_ids.to_vec();
let cached_fields: Vec<RedisLoaderFieldTuple> = redis
.multi_get::<String, _>(LOADER_FIELDS_NAMESPACE, loader_ids.iter().map(|x| x.0))
.multi_get::<String>(LOADER_FIELDS_NAMESPACE, loader_ids.iter().map(|x| x.0))
.await?
.into_iter()
.flatten()
Expand Down Expand Up @@ -399,6 +404,8 @@ impl LoaderFieldEnum {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;

let cached_enum = redis
.get_deserialized_from_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name)
.await?;
Expand Down Expand Up @@ -488,12 +495,13 @@ impl LoaderFieldEnumValue {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let mut found_enums = Vec::new();
let mut remaining_enums: Vec<LoaderFieldEnumId> = loader_field_enum_ids.to_vec();

if !remaining_enums.is_empty() {
let enums = redis
.multi_get::<String, _>(
.multi_get::<String>(
LOADER_FIELD_ENUM_VALUES_NAMESPACE,
loader_field_enum_ids.iter().map(|x| x.0),
)
Expand Down
6 changes: 5 additions & 1 deletion src/database/models/notification_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,10 @@ impl Notification {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let mut redis = redis.connect().await?;

let cached_notifications: Option<Vec<Notification>> = redis
.get_deserialized_from_json(USER_NOTIFICATIONS_NAMESPACE, user_id.0)
.get_deserialized_from_json(USER_NOTIFICATIONS_NAMESPACE, &user_id.0.to_string())
.await?;

if let Some(notifications) = cached_notifications {
Expand Down Expand Up @@ -319,6 +321,8 @@ impl Notification {
user_ids: impl IntoIterator<Item = &UserId>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

redis
.delete_many(
user_ids
Expand Down
18 changes: 13 additions & 5 deletions src/database/models/organization_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,8 @@ impl Organization {
{
use futures::stream::TryStreamExt;

let mut redis = redis.connect().await?;

if organization_strings.is_empty() {
return Ok(Vec::new());
}
Expand All @@ -120,11 +122,12 @@ impl Organization {

organization_ids.append(
&mut redis
.multi_get::<i64, _>(
.multi_get::<i64>(
ORGANIZATIONS_TITLES_NAMESPACE,
organization_strings
.iter()
.map(|x| x.to_string().to_lowercase()),
.map(|x| x.to_string().to_lowercase())
.collect::<Vec<_>>(),
)
.await?
.into_iter()
Expand All @@ -134,7 +137,10 @@ impl Organization {

if !organization_ids.is_empty() {
let organizations = redis
.multi_get::<String, _>(ORGANIZATIONS_NAMESPACE, organization_ids)
.multi_get::<String>(
ORGANIZATIONS_NAMESPACE,
organization_ids.iter().map(|x| x.to_string()),
)
.await?;

for organization in organizations {
Expand Down Expand Up @@ -197,8 +203,8 @@ impl Organization {
redis
.set(
ORGANIZATIONS_TITLES_NAMESPACE,
organization.title.to_lowercase(),
organization.id.0,
&organization.title.to_lowercase(),
&organization.id.0.to_string(),
None,
)
.await?;
Expand Down Expand Up @@ -318,6 +324,8 @@ impl Organization {
title: Option<String>,
redis: &RedisPool,
) -> Result<(), super::DatabaseError> {
let mut redis = redis.connect().await?;

redis
.delete_many([
(ORGANIZATIONS_NAMESPACE, Some(id.0.to_string())),
Expand Down
Loading

0 comments on commit ccc35ab

Please sign in to comment.