Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

Commit

Permalink
merge issues
Browse files Browse the repository at this point in the history
  • Loading branch information
thesuzerain committed Nov 17, 2023
1 parent f286cb1 commit a6a410a
Show file tree
Hide file tree
Showing 10 changed files with 143 additions and 153 deletions.
5 changes: 1 addition & 4 deletions src/database/models/image_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,10 +192,7 @@ impl Image {

if !image_ids.is_empty() {
let images = redis
.multi_get::<String>(
IMAGES_NAMESPACE,
image_ids.iter().map(|x| x.to_string()),
)
.multi_get::<String>(IMAGES_NAMESPACE, image_ids.iter().map(|x| x.to_string()))
.await?;
for image in images {
if let Some(image) = image.and_then(|x| serde_json::from_str::<Image>(&x).ok()) {
Expand Down
12 changes: 10 additions & 2 deletions src/database/models/loader_fields.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ impl Game {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let cached_games: Option<Vec<Game>> = redis
.get_deserialized_from_json(GAMES_LIST_NAMESPACE, "games")
.await?;
Expand Down Expand Up @@ -95,6 +96,7 @@ impl Loader {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let cached_id: Option<i32> = redis.get_deserialized_from_json(LOADER_ID, name).await?;
if let Some(cached_id) = cached_id {
return Ok(Some(LoaderId(cached_id)));
Expand Down Expand Up @@ -124,6 +126,7 @@ impl Loader {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let cached_loaders: Option<Vec<Loader>> = redis
.get_deserialized_from_json(LOADERS_LIST_NAMESPACE, "all")
.await?;
Expand Down Expand Up @@ -318,9 +321,11 @@ impl LoaderField {
{
type RedisLoaderFieldTuple = (LoaderId, Vec<LoaderField>);

let mut redis = redis.connect().await?;

let mut loader_ids = loader_ids.to_vec();
let cached_fields: Vec<RedisLoaderFieldTuple> = redis
.multi_get::<String, _>(LOADER_FIELDS_NAMESPACE, loader_ids.iter().map(|x| x.0))
.multi_get::<String>(LOADER_FIELDS_NAMESPACE, loader_ids.iter().map(|x| x.0))
.await?
.into_iter()
.flatten()
Expand Down Expand Up @@ -399,6 +404,8 @@ impl LoaderFieldEnum {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;

let cached_enum = redis
.get_deserialized_from_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name)
.await?;
Expand Down Expand Up @@ -488,12 +495,13 @@ impl LoaderFieldEnumValue {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let mut found_enums = Vec::new();
let mut remaining_enums: Vec<LoaderFieldEnumId> = loader_field_enum_ids.to_vec();

if !remaining_enums.is_empty() {
let enums = redis
.multi_get::<String, _>(
.multi_get::<String>(
LOADER_FIELD_ENUM_VALUES_NAMESPACE,
loader_field_enum_ids.iter().map(|x| x.0),
)
Expand Down
5 changes: 1 addition & 4 deletions src/database/models/pat_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,10 +120,7 @@ impl PersonalAccessToken {

if !pat_ids.is_empty() {
let pats = redis
.multi_get::<String>(
PATS_NAMESPACE,
pat_ids.iter().map(|x| x.to_string()),
)
.multi_get::<String>(PATS_NAMESPACE, pat_ids.iter().map(|x| x.to_string()))
.await?;
for pat in pats {
if let Some(pat) =
Expand Down
4 changes: 1 addition & 3 deletions src/database/models/project_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -530,9 +530,7 @@ impl Project {
&mut redis
.multi_get::<i64>(
PROJECTS_SLUGS_NAMESPACE,
project_strings
.iter()
.map(|x| x.to_string().to_lowercase()),
project_strings.iter().map(|x| x.to_string().to_lowercase()),
)
.await?
.into_iter()
Expand Down
9 changes: 2 additions & 7 deletions src/database/models/user_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,9 +155,7 @@ impl User {
&mut redis
.multi_get::<i64>(
USER_USERNAMES_NAMESPACE,
users_strings
.iter()
.map(|x| x.to_string().to_lowercase()),
users_strings.iter().map(|x| x.to_string().to_lowercase()),
)
.await?
.into_iter()
Expand All @@ -167,10 +165,7 @@ impl User {

if !user_ids.is_empty() {
let users = redis
.multi_get::<String>(
USERS_NAMESPACE,
user_ids.iter().map(|x| x.to_string()),
)
.multi_get::<String>(USERS_NAMESPACE, user_ids.iter().map(|x| x.to_string()))
.await?;
for user in users {
if let Some(user) = user.and_then(|x| serde_json::from_str::<User>(&x).ok()) {
Expand Down
7 changes: 4 additions & 3 deletions src/database/redis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,8 @@ impl RedisConnection {
&mut self,
namespace: &str,
ids: impl IntoIterator<Item = impl Display>,
) -> Result<Vec<Option<R>>, DatabaseError>
where
) -> Result<Vec<Option<R>>, DatabaseError>
where
R: for<'a> serde::Deserialize<'a>,
{
let mut cmd = cmd("MGET");
Expand All @@ -137,7 +137,8 @@ impl RedisConnection {
.collect_vec(),
);
let res: Vec<Option<String>> = redis_execute(&mut cmd, &mut self.connection).await?;
Ok(res.into_iter()
Ok(res
.into_iter()
.map(|x| x.and_then(|x| serde_json::from_str(&x).ok()))
.collect())
}
Expand Down
7 changes: 0 additions & 7 deletions src/models/v3/images.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,6 @@ use serde::{Deserialize, Serialize};
#[serde(into = "Base62Id")]
pub struct ImageId(pub u64);








#[derive(Serialize, Deserialize)]
pub struct Image {
pub id: ImageId,
Expand Down
218 changes: 109 additions & 109 deletions tests/analytics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,124 +12,124 @@ pub async fn analytics_revenue() {
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v3;

// let alpha_project_id = test_env
// .dummy
// .as_ref()
// .unwrap()
// .project_alpha
// .project_id
// .clone();
let alpha_project_id = test_env
.dummy
.as_ref()
.unwrap()
.project_alpha
.project_id
.clone();

// let pool = test_env.db.pool.clone();
let pool = test_env.db.pool.clone();

// // Generate sample revenue data- directly insert into sql
// let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) =
// (Vec::new(), Vec::new(), Vec::new(), Vec::new());
// Generate sample revenue data- directly insert into sql
let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) =
(Vec::new(), Vec::new(), Vec::new(), Vec::new());

// // Note: these go from most recent to least recent
// let money_time_pairs: [(f64, DateTime<Utc>); 10] = [
// (50.0, Utc::now() - Duration::minutes(5)),
// (50.1, Utc::now() - Duration::minutes(10)),
// (101.0, Utc::now() - Duration::days(1)),
// (200.0, Utc::now() - Duration::days(2)),
// (311.0, Utc::now() - Duration::days(3)),
// (400.0, Utc::now() - Duration::days(4)),
// (526.0, Utc::now() - Duration::days(5)),
// (633.0, Utc::now() - Duration::days(6)),
// (800.0, Utc::now() - Duration::days(14)),
// (800.0, Utc::now() - Duration::days(800)),
// ];
// Note: these go from most recent to least recent
let money_time_pairs: [(f64, DateTime<Utc>); 10] = [
(50.0, Utc::now() - Duration::minutes(5)),
(50.1, Utc::now() - Duration::minutes(10)),
(101.0, Utc::now() - Duration::days(1)),
(200.0, Utc::now() - Duration::days(2)),
(311.0, Utc::now() - Duration::days(3)),
(400.0, Utc::now() - Duration::days(4)),
(526.0, Utc::now() - Duration::days(5)),
(633.0, Utc::now() - Duration::days(6)),
(800.0, Utc::now() - Duration::days(14)),
(800.0, Utc::now() - Duration::days(800)),
];

// let project_id = parse_base62(&alpha_project_id).unwrap() as i64;
// for (money, time) in money_time_pairs.iter() {
// insert_user_ids.push(USER_USER_ID_PARSED);
// insert_project_ids.push(project_id);
// insert_payouts.push(Decimal::from_f64_retain(*money).unwrap());
// insert_starts.push(*time);
// }
let project_id = parse_base62(&alpha_project_id).unwrap() as i64;
for (money, time) in money_time_pairs.iter() {
insert_user_ids.push(USER_USER_ID_PARSED);
insert_project_ids.push(project_id);
insert_payouts.push(Decimal::from_f64_retain(*money).unwrap());
insert_starts.push(*time);
}

// sqlx::query!(
// "
// INSERT INTO payouts_values (user_id, mod_id, amount, created)
// SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[])
// ",
// &insert_user_ids[..],
// &insert_project_ids[..],
// &insert_payouts[..],
// &insert_starts[..]
// )
// .execute(&pool)
// .await
// .unwrap();
sqlx::query!(
"
INSERT INTO payouts_values (user_id, mod_id, amount, created)
SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[])
",
&insert_user_ids[..],
&insert_project_ids[..],
&insert_payouts[..],
&insert_starts[..]
)
.execute(&pool)
.await
.unwrap();

// let day = 86400;
let day = 86400;

// // Test analytics endpoint with default values
// // - all time points in the last 2 weeks
// // - 1 day resolution
// let analytics = api
// .get_analytics_revenue_deserialized(
// vec![&alpha_project_id],
// None,
// None,
// None,
// USER_USER_PAT,
// )
// .await;
// assert_eq!(analytics.len(), 1); // 1 project
// let project_analytics = analytics.get(&alpha_project_id).unwrap();
// assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included
// // sorted_by_key, values in the order of smallest to largest key
// let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
// .iter()
// .sorted_by_key(|(k, _)| *k)
// .rev()
// .unzip();
// assert_eq!(
// vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0],
// to_f64_vec_rounded_up(sorted_by_key)
// );
// // Ensure that the keys are in multiples of 1 day
// for k in sorted_keys {
// assert_eq!(k % day, 0);
// }
// Test analytics endpoint with default values
// - all time points in the last 2 weeks
// - 1 day resolution
let analytics = api
.get_analytics_revenue_deserialized(
vec![&alpha_project_id],
None,
None,
None,
USER_USER_PAT,
)
.await;
assert_eq!(analytics.len(), 1); // 1 project
let project_analytics = analytics.get(&alpha_project_id).unwrap();
assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included
// sorted_by_key, values in the order of smallest to largest key
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
.iter()
.sorted_by_key(|(k, _)| *k)
.rev()
.unzip();
assert_eq!(
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0],
to_f64_vec_rounded_up(sorted_by_key)
);
// Ensure that the keys are in multiples of 1 day
for k in sorted_keys {
assert_eq!(k % day, 0);
}

// // Test analytics with last 900 days to include all data
// // keep resolution at default
// let analytics = api
// .get_analytics_revenue_deserialized(
// vec![&alpha_project_id],
// Some(Utc::now() - Duration::days(801)),
// None,
// None,
// USER_USER_PAT,
// )
// .await;
// let project_analytics = analytics.get(&alpha_project_id).unwrap();
// assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day
// let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
// .iter()
// .sorted_by_key(|(k, _)| *k)
// .rev()
// .unzip();
// assert_eq!(
// vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0],
// to_f64_vec_rounded_up(sorted_by_key)
// );
// for k in sorted_keys {
// assert_eq!(k % day, 0);
// }
// Test analytics with last 900 days to include all data
// keep resolution at default
let analytics = api
.get_analytics_revenue_deserialized(
vec![&alpha_project_id],
Some(Utc::now() - Duration::days(801)),
None,
None,
USER_USER_PAT,
)
.await;
let project_analytics = analytics.get(&alpha_project_id).unwrap();
assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
.iter()
.sorted_by_key(|(k, _)| *k)
.rev()
.unzip();
assert_eq!(
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0],
to_f64_vec_rounded_up(sorted_by_key)
);
for k in sorted_keys {
assert_eq!(k % day, 0);
}

// // Cleanup test db
// test_env.cleanup().await;
// Cleanup test db
test_env.cleanup().await;
}

// fn to_f64_rounded_up(d: Decimal) -> f64 {
// d.round_dp_with_strategy(1, rust_decimal::RoundingStrategy::MidpointAwayFromZero)
// .to_f64()
// .unwrap()
// }
fn to_f64_rounded_up(d: Decimal) -> f64 {
d.round_dp_with_strategy(1, rust_decimal::RoundingStrategy::MidpointAwayFromZero)
.to_f64()
.unwrap()
}

// fn to_f64_vec_rounded_up(d: Vec<Decimal>) -> Vec<f64> {
// d.into_iter().map(to_f64_rounded_up).collect_vec()
// }
fn to_f64_vec_rounded_up(d: Vec<Decimal>) -> Vec<f64> {
d.into_iter().map(to_f64_rounded_up).collect_vec()
}
Loading

0 comments on commit a6a410a

Please sign in to comment.