Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

Tests v2 recreate #760

Merged
merged 13 commits into from
Nov 25, 2023

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 22 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,4 @@ derive-new = "0.5.9"

[dev-dependencies]
actix-http = "3.4.0"
json-patch = "*"
6 changes: 4 additions & 2 deletions src/models/v2/projects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,10 +233,12 @@ pub struct LegacyVersion {
/// and are now part of the dynamic fields system
/// A list of game versions this project supports
pub game_versions: Vec<String>,
/// A list of loaders this project supports

/// A list of loaders this project supports (has a newtype struct)
pub loaders: Vec<Loader>,

// TODO: remove this once we have v3 testing, as this is a v3 field and tests for it should be isolated to v3
// TODO: should we remove this? as this is a v3 field and tests for it should be isolated to v3
// it allows us to keep tests that use this struct in common
pub ordering: Option<i32>,

pub id: VersionId,
Expand Down
42 changes: 34 additions & 8 deletions src/routes/v2/version_creation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,14 +100,40 @@ pub async fn version_create(
fields.insert("client_side".to_string(), json!("required"));
fields.insert("server_side".to_string(), json!("optional"));

// TODO: Some kind of handling here to ensure project type is fine.
// We expect the version uploaded to be of loader type modpack, but there might not be a way to check here for that.
// After all, theoretically, they could be creating a genuine 'fabric' mod, and modpack no longer carries information on whether its a mod or modpack,
// as those are out to the versions.
// Handle project type via file extension prediction
let mut project_type = None;
for file_part in &legacy_create.file_parts {
if let Some(ext) = file_part.split('.').last() {
match ext {
"jar" => {
project_type = Some("mod");
break;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

see other pr, should probably port this over. we should only handle this for modpack .mrpack files

}
"mrpack" | "zip" => {
project_type = Some("modpack");
break;
}
_ => {}
}
break;
}
}
let project_type = project_type.ok_or(CreateError::InvalidInput(
"Could not determine project type from file parts for v2 version creation."
.to_string(),
))?;

// Ideally this would, if the project 'should' be a modpack:
// - change the loaders to mrpack only
// - add loader fields to the project for the corresponding loaders
// Modpacks now use the "mrpack" loader, and loaders are converted to loader fields.
// Setting of 'project_type' directly is removed, it's loader-based now.
if project_type == "modpack" {
fields.insert("mrpack_loaders".to_string(), json!(legacy_create.loaders));
}

let loaders = if project_type == "modpack" {
vec![Loader("mrpack".to_string())]
} else {
legacy_create.loaders
};

Ok(v3::version_creation::InitialVersionData {
project_id: legacy_create.project_id,
Expand All @@ -117,7 +143,7 @@ pub async fn version_create(
version_body: legacy_create.version_body,
dependencies: legacy_create.dependencies,
release_channel: legacy_create.release_channel,
loaders: legacy_create.loaders,
loaders,
featured: legacy_create.featured,
primary_file: legacy_create.primary_file,
status: legacy_create.status,
Expand Down
1 change: 0 additions & 1 deletion src/routes/v3/project_creation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -894,7 +894,6 @@ async fn create_initial_version(
&mut loader_field_enum_values,
)?;

println!("Made it past here");
let dependencies = version_data
.dependencies
.iter()
Expand Down
216 changes: 110 additions & 106 deletions tests/analytics.rs
Original file line number Diff line number Diff line change
@@ -1,127 +1,131 @@
use chrono::{DateTime, Duration, Utc};
use common::database::*;
use common::environment::TestEnvironment;
use common::{
api_v3::ApiV3,
database::*,
environment::{with_test_environment, TestEnvironment},
};
use itertools::Itertools;
use labrinth::models::ids::base62_impl::parse_base62;
use rust_decimal::{prelude::ToPrimitive, Decimal};

use crate::common::api_common::ApiProject;

mod common;

#[actix_rt::test]
pub async fn analytics_revenue() {
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v3;

let alpha_project_id = test_env
.dummy
.as_ref()
.unwrap()
.project_alpha
.project_id
.clone();
with_test_environment(None, |test_env: TestEnvironment<ApiV3>| async move {
let api = &test_env.api;

let pool = test_env.db.pool.clone();
let alpha_project_id = test_env
.dummy
.as_ref()
.unwrap()
.project_alpha
.project_id
.clone();

// Generate sample revenue data- directly insert into sql
let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) =
(Vec::new(), Vec::new(), Vec::new(), Vec::new());
let pool = test_env.db.pool.clone();

// Note: these go from most recent to least recent
let money_time_pairs: [(f64, DateTime<Utc>); 10] = [
(50.0, Utc::now() - Duration::minutes(5)),
(50.1, Utc::now() - Duration::minutes(10)),
(101.0, Utc::now() - Duration::days(1)),
(200.0, Utc::now() - Duration::days(2)),
(311.0, Utc::now() - Duration::days(3)),
(400.0, Utc::now() - Duration::days(4)),
(526.0, Utc::now() - Duration::days(5)),
(633.0, Utc::now() - Duration::days(6)),
(800.0, Utc::now() - Duration::days(14)),
(800.0, Utc::now() - Duration::days(800)),
];
// Generate sample revenue data- directly insert into sql
let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) =
(Vec::new(), Vec::new(), Vec::new(), Vec::new());

let project_id = parse_base62(&alpha_project_id).unwrap() as i64;
for (money, time) in money_time_pairs.iter() {
insert_user_ids.push(USER_USER_ID_PARSED);
insert_project_ids.push(project_id);
insert_payouts.push(Decimal::from_f64_retain(*money).unwrap());
insert_starts.push(*time);
}
// Note: these go from most recent to least recent
let money_time_pairs: [(f64, DateTime<Utc>); 10] = [
(50.0, Utc::now() - Duration::minutes(5)),
(50.1, Utc::now() - Duration::minutes(10)),
(101.0, Utc::now() - Duration::days(1)),
(200.0, Utc::now() - Duration::days(2)),
(311.0, Utc::now() - Duration::days(3)),
(400.0, Utc::now() - Duration::days(4)),
(526.0, Utc::now() - Duration::days(5)),
(633.0, Utc::now() - Duration::days(6)),
(800.0, Utc::now() - Duration::days(14)),
(800.0, Utc::now() - Duration::days(800)),
];

sqlx::query!(
"
INSERT INTO payouts_values (user_id, mod_id, amount, created)
SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[])
",
&insert_user_ids[..],
&insert_project_ids[..],
&insert_payouts[..],
&insert_starts[..]
)
.execute(&pool)
.await
.unwrap();
let project_id = parse_base62(&alpha_project_id).unwrap() as i64;
for (money, time) in money_time_pairs.iter() {
insert_user_ids.push(USER_USER_ID_PARSED);
insert_project_ids.push(project_id);
insert_payouts.push(Decimal::from_f64_retain(*money).unwrap());
insert_starts.push(*time);
}

let day = 86400;

// Test analytics endpoint with default values
// - all time points in the last 2 weeks
// - 1 day resolution
let analytics = api
.get_analytics_revenue_deserialized(
vec![&alpha_project_id],
None,
None,
None,
USER_USER_PAT,
sqlx::query!(
"
INSERT INTO payouts_values (user_id, mod_id, amount, created)
SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[])
",
&insert_user_ids[..],
&insert_project_ids[..],
&insert_payouts[..],
&insert_starts[..]
)
.await;
assert_eq!(analytics.len(), 1); // 1 project
let project_analytics = analytics.get(&alpha_project_id).unwrap();
assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included
// sorted_by_key, values in the order of smallest to largest key
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
.iter()
.sorted_by_key(|(k, _)| *k)
.rev()
.unzip();
assert_eq!(
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0],
to_f64_vec_rounded_up(sorted_by_key)
);
// Ensure that the keys are in multiples of 1 day
for k in sorted_keys {
assert_eq!(k % day, 0);
}
.execute(&pool)
.await
.unwrap();

// Test analytics with last 900 days to include all data
// keep resolution at default
let analytics = api
.get_analytics_revenue_deserialized(
vec![&alpha_project_id],
Some(Utc::now() - Duration::days(801)),
None,
None,
USER_USER_PAT,
)
.await;
let project_analytics = analytics.get(&alpha_project_id).unwrap();
assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
.iter()
.sorted_by_key(|(k, _)| *k)
.rev()
.unzip();
assert_eq!(
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0],
to_f64_vec_rounded_up(sorted_by_key)
);
for k in sorted_keys {
assert_eq!(k % day, 0);
}
let day = 86400;

// Test analytics endpoint with default values
// - all time points in the last 2 weeks
// - 1 day resolution
let analytics = api
.get_analytics_revenue_deserialized(
vec![&alpha_project_id],
None,
None,
None,
USER_USER_PAT,
)
.await;
assert_eq!(analytics.len(), 1); // 1 project
let project_analytics = analytics.get(&alpha_project_id).unwrap();
assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included
// sorted_by_key, values in the order of smallest to largest key
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
.iter()
.sorted_by_key(|(k, _)| *k)
.rev()
.unzip();
assert_eq!(
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0],
to_f64_vec_rounded_up(sorted_by_key)
);
// Ensure that the keys are in multiples of 1 day
for k in sorted_keys {
assert_eq!(k % day, 0);
}

// Cleanup test db
test_env.cleanup().await;
// Test analytics with last 900 days to include all data
// keep resolution at default
let analytics = api
.get_analytics_revenue_deserialized(
vec![&alpha_project_id],
Some(Utc::now() - Duration::days(801)),
None,
None,
USER_USER_PAT,
)
.await;
let project_analytics = analytics.get(&alpha_project_id).unwrap();
assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
.iter()
.sorted_by_key(|(k, _)| *k)
.rev()
.unzip();
assert_eq!(
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0],
to_f64_vec_rounded_up(sorted_by_key)
);
for k in sorted_keys {
assert_eq!(k % day, 0);
}
})
.await;
}

fn to_f64_rounded_up(d: Decimal) -> f64 {
Expand Down
Loading
Loading