diff --git a/.sqlx/query-65b5acdce6675d9c2abe636793dafef8ec915ddcc11a2735c66a49a48f314dd6.json b/.sqlx/query-06bf1b34b70f5e61bf619c4d7706d07d6db413751ecab86896a708c8539e38b6.json similarity index 65% rename from .sqlx/query-65b5acdce6675d9c2abe636793dafef8ec915ddcc11a2735c66a49a48f314dd6.json rename to .sqlx/query-06bf1b34b70f5e61bf619c4d7706d07d6db413751ecab86896a708c8539e38b6.json index 89dd90f0..24575023 100644 --- a/.sqlx/query-65b5acdce6675d9c2abe636793dafef8ec915ddcc11a2735c66a49a48f314dd6.json +++ b/.sqlx/query-06bf1b34b70f5e61bf619c4d7706d07d6db413751ecab86896a708c8539e38b6.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering\n FROM organizations o\n INNER JOIN team_members tm ON tm.team_id = o.team_id AND user_id = $2 AND accepted = TRUE\n WHERE o.id = $1\n ", + "query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id \n FROM versions v\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 AND tm.accepted = TRUE\n WHERE v.id = $1\n ", "describe": { "columns": [ { @@ -25,28 +25,38 @@ }, { "ordinal": 4, + "name": "is_owner", + "type_info": "Bool" + }, + { + "ordinal": 5, "name": "permissions", "type_info": "Int8" }, { - "ordinal": 5, + "ordinal": 6, "name": "organization_permissions", "type_info": "Int8" }, { - "ordinal": 6, + "ordinal": 7, "name": "accepted", "type_info": "Bool" }, { - "ordinal": 7, + "ordinal": 8, "name": "payouts_split", "type_info": "Numeric" }, { - "ordinal": 8, + "ordinal": 9, "name": "ordering", "type_info": "Int8" + }, + { + "ordinal": 10, + "name": "mod_id", + "type_info": "Int8" } ], "parameters": { @@ -61,11 +71,13 @@ false, false, false, + false, true, false, false, + false, false ] }, - "hash": "65b5acdce6675d9c2abe636793dafef8ec915ddcc11a2735c66a49a48f314dd6" + "hash": "06bf1b34b70f5e61bf619c4d7706d07d6db413751ecab86896a708c8539e38b6" } diff --git a/.sqlx/query-108fe88fbd116817e48719c370a63b7d0d56b6de443db0e4466237ee3e93fbde.json b/.sqlx/query-108fe88fbd116817e48719c370a63b7d0d56b6de443db0e4466237ee3e93fbde.json deleted file mode 100644 index 6428c1f7..00000000 --- a/.sqlx/query-108fe88fbd116817e48719c370a63b7d0d56b6de443db0e4466237ee3e93fbde.json +++ /dev/null @@ -1,120 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT m.id id, m.title title, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug,\n u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'version_id', 0, -- TODO: When webhook is updated to match others, this should match version\n 'lf_id', lf.id,\n 'loader_name', lo.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id\n WHERE m.id = $1\n GROUP BY m.id, u.id;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "title", - "type_info": "Varchar" - }, - { - "ordinal": 2, - "name": "description", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "color", - "type_info": "Int4" - }, - { - "ordinal": 4, - "name": "icon_url", - "type_info": "Varchar" - }, - { - "ordinal": 5, - "name": "slug", - "type_info": "Varchar" - }, - { - "ordinal": 6, - "name": "username", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "avatar_url", - "type_info": "Varchar" - }, - { - "ordinal": 8, - "name": "categories", - "type_info": "VarcharArray" - }, - { - "ordinal": 9, - "name": "loaders", - "type_info": "VarcharArray" - }, - { - "ordinal": 10, - "name": "project_types", - "type_info": "VarcharArray" - }, - { - "ordinal": 11, - "name": "games", - "type_info": "VarcharArray" - }, - { - "ordinal": 12, - "name": "gallery", - "type_info": "VarcharArray" - }, - { - "ordinal": 13, - "name": "featured_gallery", - "type_info": "VarcharArray" - }, - { - "ordinal": 14, - "name": "version_fields", - "type_info": "Jsonb" - }, - { - "ordinal": 15, - "name": "loader_fields", - "type_info": "Jsonb" - }, - { - "ordinal": 16, - "name": "loader_field_enum_values", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Int8", - "TextArray", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - true, - true, - true, - false, - true, - null, - null, - null, - null, - null, - null, - null, - null, - null - ] - }, - "hash": "108fe88fbd116817e48719c370a63b7d0d56b6de443db0e4466237ee3e93fbde" -} diff --git a/.sqlx/query-186d0e933ece20163915926293a01754ff571de4f06e521bb4f7c0207268e03b.json b/.sqlx/query-186d0e933ece20163915926293a01754ff571de4f06e521bb4f7c0207268e03b.json new file mode 100644 index 00000000..e31392b4 --- /dev/null +++ b/.sqlx/query-186d0e933ece20163915926293a01754ff571de4f06e521bb4f7c0207268e03b.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM mods_links\n WHERE joining_mod_id = $1 AND joining_platform_id IN (\n SELECT id FROM link_platforms WHERE name = ANY($2)\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "TextArray" + ] + }, + "nullable": [] + }, + "hash": "186d0e933ece20163915926293a01754ff571de4f06e521bb4f7c0207268e03b" +} diff --git a/.sqlx/query-9608a95084c55d939d3f908f3dd7e53cb1c9455b5d53868993147bf6abc42ffb.json b/.sqlx/query-26a6271a6d365e64c68ea5855109f1597a121b2e0075b20e2bc34659a269294b.json similarity index 68% rename from .sqlx/query-9608a95084c55d939d3f908f3dd7e53cb1c9455b5d53868993147bf6abc42ffb.json rename to .sqlx/query-26a6271a6d365e64c68ea5855109f1597a121b2e0075b20e2bc34659a269294b.json index 11c686fa..c6545832 100644 --- a/.sqlx/query-9608a95084c55d939d3f908f3dd7e53cb1c9455b5d53868993147bf6abc42ffb.json +++ b/.sqlx/query-26a6271a6d365e64c68ea5855109f1597a121b2e0075b20e2bc34659a269294b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT o.id, o.title, o.team_id, o.description, o.icon_url, o.color\n FROM organizations o\n LEFT JOIN mods m ON m.organization_id = o.id\n WHERE m.id = $1\n GROUP BY o.id;\n ", + "query": "\n SELECT o.id, o.name, o.team_id, o.description, o.icon_url, o.color\n FROM organizations o\n LEFT JOIN mods m ON m.organization_id = o.id\n WHERE m.id = $1\n GROUP BY o.id;\n ", "describe": { "columns": [ { @@ -10,7 +10,7 @@ }, { "ordinal": 1, - "name": "title", + "name": "name", "type_info": "Varchar" }, { @@ -48,5 +48,5 @@ true ] }, - "hash": "9608a95084c55d939d3f908f3dd7e53cb1c9455b5d53868993147bf6abc42ffb" + "hash": "26a6271a6d365e64c68ea5855109f1597a121b2e0075b20e2bc34659a269294b" } diff --git a/.sqlx/query-8c93ad7aa81a0502494ff98dd6120c34d583d1a205b4c97ac54a7230b8c23765.json b/.sqlx/query-2e3ce3eafee2cd110085a94b122884fe25591aa6f48256abbb6c8d973efe932e.json similarity index 73% rename from .sqlx/query-8c93ad7aa81a0502494ff98dd6120c34d583d1a205b4c97ac54a7230b8c23765.json rename to .sqlx/query-2e3ce3eafee2cd110085a94b122884fe25591aa6f48256abbb6c8d973efe932e.json index 261a49cd..3580feb9 100644 --- a/.sqlx/query-8c93ad7aa81a0502494ff98dd6120c34d583d1a205b4c97ac54a7230b8c23765.json +++ b/.sqlx/query-2e3ce3eafee2cd110085a94b122884fe25591aa6f48256abbb6c8d973efe932e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id \n FROM versions v\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 AND tm.accepted = TRUE\n WHERE v.id = $1\n ", + "query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering\n FROM organizations o\n INNER JOIN team_members tm ON tm.team_id = o.team_id AND user_id = $2 AND accepted = TRUE\n WHERE o.id = $1\n ", "describe": { "columns": [ { @@ -25,32 +25,32 @@ }, { "ordinal": 4, + "name": "is_owner", + "type_info": "Bool" + }, + { + "ordinal": 5, "name": "permissions", "type_info": "Int8" }, { - "ordinal": 5, + "ordinal": 6, "name": "organization_permissions", "type_info": "Int8" }, { - "ordinal": 6, + "ordinal": 7, "name": "accepted", "type_info": "Bool" }, { - "ordinal": 7, + "ordinal": 8, "name": "payouts_split", "type_info": "Numeric" }, - { - "ordinal": 8, - "name": "ordering", - "type_info": "Int8" - }, { "ordinal": 9, - "name": "mod_id", + "name": "ordering", "type_info": "Int8" } ], @@ -66,12 +66,12 @@ false, false, false, - true, false, + true, false, false, false ] }, - "hash": "8c93ad7aa81a0502494ff98dd6120c34d583d1a205b4c97ac54a7230b8c23765" + "hash": "2e3ce3eafee2cd110085a94b122884fe25591aa6f48256abbb6c8d973efe932e" } diff --git a/.sqlx/query-3c061c1888cb14655288cdbb2dad22f6cb51d6be3736e8d8206f918a9a64aec7.json b/.sqlx/query-3c061c1888cb14655288cdbb2dad22f6cb51d6be3736e8d8206f918a9a64aec7.json deleted file mode 100644 index bc34af5b..00000000 --- a/.sqlx/query-3c061c1888cb14655288cdbb2dad22f6cb51d6be3736e8d8206f918a9a64aec7.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE mods_gallery\n SET title = $2\n WHERE id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "3c061c1888cb14655288cdbb2dad22f6cb51d6be3736e8d8206f918a9a64aec7" -} diff --git a/.sqlx/query-ce2e7642142f79bdce78ba3316fe402e18ae203cc65fe79f724d37a7076df2dd.json b/.sqlx/query-4b089a5d9408febe64af1cf5f78cc11c33f6702637c03c1ed9d24df8a847f91a.json similarity index 61% rename from .sqlx/query-ce2e7642142f79bdce78ba3316fe402e18ae203cc65fe79f724d37a7076df2dd.json rename to .sqlx/query-4b089a5d9408febe64af1cf5f78cc11c33f6702637c03c1ed9d24df8a847f91a.json index 7ed62345..0d70b3f7 100644 --- a/.sqlx/query-ce2e7642142f79bdce78ba3316fe402e18ae203cc65fe79f724d37a7076df2dd.json +++ b/.sqlx/query-4b089a5d9408febe64af1cf5f78cc11c33f6702637c03c1ed9d24df8a847f91a.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE collections\n SET title = $1\n WHERE (id = $2)\n ", + "query": "\n UPDATE collections\n SET name = $1\n WHERE (id = $2)\n ", "describe": { "columns": [], "parameters": { @@ -11,5 +11,5 @@ }, "nullable": [] }, - "hash": "ce2e7642142f79bdce78ba3316fe402e18ae203cc65fe79f724d37a7076df2dd" + "hash": "4b089a5d9408febe64af1cf5f78cc11c33f6702637c03c1ed9d24df8a847f91a" } diff --git a/.sqlx/query-4b9e5d78245ac083c167be708c196170c543a2157dbfa9d6249d98dc13bfaf72.json b/.sqlx/query-4b9e5d78245ac083c167be708c196170c543a2157dbfa9d6249d98dc13bfaf72.json new file mode 100644 index 00000000..e9224fce --- /dev/null +++ b/.sqlx/query-4b9e5d78245ac083c167be708c196170c543a2157dbfa9d6249d98dc13bfaf72.json @@ -0,0 +1,119 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT m.id id, m.name name, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug,\n u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'version_id', 0, -- TODO: When webhook is updated to match others, this should match version\n 'lf_id', lf.id,\n 'loader_name', lo.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.is_owner = TRUE AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id\n WHERE m.id = $1\n GROUP BY m.id, u.id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "description", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "color", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "icon_url", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "username", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "avatar_url", + "type_info": "Varchar" + }, + { + "ordinal": 8, + "name": "categories", + "type_info": "VarcharArray" + }, + { + "ordinal": 9, + "name": "loaders", + "type_info": "VarcharArray" + }, + { + "ordinal": 10, + "name": "project_types", + "type_info": "VarcharArray" + }, + { + "ordinal": 11, + "name": "games", + "type_info": "VarcharArray" + }, + { + "ordinal": 12, + "name": "gallery", + "type_info": "VarcharArray" + }, + { + "ordinal": 13, + "name": "featured_gallery", + "type_info": "VarcharArray" + }, + { + "ordinal": 14, + "name": "version_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 15, + "name": "loader_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 16, + "name": "loader_field_enum_values", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8", + "TextArray" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + true, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + }, + "hash": "4b9e5d78245ac083c167be708c196170c543a2157dbfa9d6249d98dc13bfaf72" +} diff --git a/.sqlx/query-4d0e2d4345aeab5ee7eed847c03c913073eeb43caaf299cddcac6e41351661fd.json b/.sqlx/query-4d0e2d4345aeab5ee7eed847c03c913073eeb43caaf299cddcac6e41351661fd.json deleted file mode 100644 index 801c4326..00000000 --- a/.sqlx/query-4d0e2d4345aeab5ee7eed847c03c913073eeb43caaf299cddcac6e41351661fd.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM mods_links\n WHERE joining_mod_id = $1 AND joining_platform_id IN (\n SELECT id FROM link_platforms WHERE name = ANY($2)\n )\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "TextArray" - ] - }, - "nullable": [] - }, - "hash": "4d0e2d4345aeab5ee7eed847c03c913073eeb43caaf299cddcac6e41351661fd" -} diff --git a/.sqlx/query-568ca221aaacb7222bf5099f59ae6bc3d96fbffaf91394115c29029ae9ea4108.json b/.sqlx/query-568ca221aaacb7222bf5099f59ae6bc3d96fbffaf91394115c29029ae9ea4108.json new file mode 100644 index 00000000..3db0dd3f --- /dev/null +++ b/.sqlx/query-568ca221aaacb7222bf5099f59ae6bc3d96fbffaf91394115c29029ae9ea4108.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE mods_gallery\n SET name = $2\n WHERE id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "568ca221aaacb7222bf5099f59ae6bc3d96fbffaf91394115c29029ae9ea4108" +} diff --git a/.sqlx/query-536f628092168eead27519db013ec8a1510a06f27e699839bac9dc85d16d99c2.json b/.sqlx/query-5ba9860050d19de8fe81482cbbdc68b32092609cc7150c7fcf491f342c5d9770.json similarity index 53% rename from .sqlx/query-536f628092168eead27519db013ec8a1510a06f27e699839bac9dc85d16d99c2.json rename to .sqlx/query-5ba9860050d19de8fe81482cbbdc68b32092609cc7150c7fcf491f342c5d9770.json index 09391ead..5bca7221 100644 --- a/.sqlx/query-536f628092168eead27519db013ec8a1510a06f27e699839bac9dc85d16d99c2.json +++ b/.sqlx/query-5ba9860050d19de8fe81482cbbdc68b32092609cc7150c7fcf491f342c5d9770.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO collections (\n id, user_id, title, description, \n created, icon_url, status\n )\n VALUES (\n $1, $2, $3, $4, \n $5, $6, $7\n )\n ", + "query": "\n INSERT INTO collections (\n id, user_id, name, description, \n created, icon_url, status\n )\n VALUES (\n $1, $2, $3, $4, \n $5, $6, $7\n )\n ", "describe": { "columns": [], "parameters": { @@ -16,5 +16,5 @@ }, "nullable": [] }, - "hash": "536f628092168eead27519db013ec8a1510a06f27e699839bac9dc85d16d99c2" + "hash": "5ba9860050d19de8fe81482cbbdc68b32092609cc7150c7fcf491f342c5d9770" } diff --git a/.sqlx/query-5e7146bc9dc9145cf3d01875ee599ada89e28c63fd10b3f23680d6660d0e57a2.json b/.sqlx/query-5e7146bc9dc9145cf3d01875ee599ada89e28c63fd10b3f23680d6660d0e57a2.json deleted file mode 100644 index 689e6395..00000000 --- a/.sqlx/query-5e7146bc9dc9145cf3d01875ee599ada89e28c63fd10b3f23680d6660d0e57a2.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)\n VALUES ($1, $2, $3)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int4", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "5e7146bc9dc9145cf3d01875ee599ada89e28c63fd10b3f23680d6660d0e57a2" -} diff --git a/.sqlx/query-6366891bb34a14278f1ae857b8d6f68dff44badae9ae5c5aceba3c32e8d00356.json b/.sqlx/query-6366891bb34a14278f1ae857b8d6f68dff44badae9ae5c5aceba3c32e8d00356.json new file mode 100644 index 00000000..b262237e --- /dev/null +++ b/.sqlx/query-6366891bb34a14278f1ae857b8d6f68dff44badae9ae5c5aceba3c32e8d00356.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)\n VALUES ($1, $2, $3)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "6366891bb34a14278f1ae857b8d6f68dff44badae9ae5c5aceba3c32e8d00356" +} diff --git a/.sqlx/query-0244926b35b964da2b50ccf82aff001250a3751d2314707c4884066432aa4753.json b/.sqlx/query-740c4343d7357af6820e28a3e1f165cbbc3f967c4dfbeeb13a0c63f78e072895.json similarity index 72% rename from .sqlx/query-0244926b35b964da2b50ccf82aff001250a3751d2314707c4884066432aa4753.json rename to .sqlx/query-740c4343d7357af6820e28a3e1f165cbbc3f967c4dfbeeb13a0c63f78e072895.json index 746c68ba..82e5ec33 100644 --- a/.sqlx/query-0244926b35b964da2b50ccf82aff001250a3751d2314707c4884066432aa4753.json +++ b/.sqlx/query-740c4343d7357af6820e28a3e1f165cbbc3f967c4dfbeeb13a0c63f78e072895.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering\n FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 AND accepted = TRUE\n WHERE m.id = $1\n ", + "query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering\n FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 AND accepted = TRUE\n WHERE m.id = $1\n ", "describe": { "columns": [ { @@ -25,26 +25,31 @@ }, { "ordinal": 4, + "name": "is_owner", + "type_info": "Bool" + }, + { + "ordinal": 5, "name": "permissions", "type_info": "Int8" }, { - "ordinal": 5, + "ordinal": 6, "name": "organization_permissions", "type_info": "Int8" }, { - "ordinal": 6, + "ordinal": 7, "name": "accepted", "type_info": "Bool" }, { - "ordinal": 7, + "ordinal": 8, "name": "payouts_split", "type_info": "Numeric" }, { - "ordinal": 8, + "ordinal": 9, "name": "ordering", "type_info": "Int8" } @@ -61,11 +66,12 @@ false, false, false, + false, true, false, false, false ] }, - "hash": "0244926b35b964da2b50ccf82aff001250a3751d2314707c4884066432aa4753" + "hash": "740c4343d7357af6820e28a3e1f165cbbc3f967c4dfbeeb13a0c63f78e072895" } diff --git a/.sqlx/query-2f4a620f954c7488e8bdb94a3d6968cec6d1332942b9e9f60925d14a8c2040f7.json b/.sqlx/query-834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937.json similarity index 65% rename from .sqlx/query-2f4a620f954c7488e8bdb94a3d6968cec6d1332942b9e9f60925d14a8c2040f7.json rename to .sqlx/query-834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937.json index 37424c24..e1cc97f1 100644 --- a/.sqlx/query-2f4a620f954c7488e8bdb94a3d6968cec6d1332942b9e9f60925d14a8c2040f7.json +++ b/.sqlx/query-834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT m.id FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id\n WHERE tm.user_id = $1 AND tm.role = $2\n ", + "query": "\n SELECT m.id FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id\n WHERE tm.user_id = $1 AND tm.is_owner = TRUE\n ", "describe": { "columns": [ { @@ -11,13 +11,12 @@ ], "parameters": { "Left": [ - "Int8", - "Text" + "Int8" ] }, "nullable": [ false ] }, - "hash": "2f4a620f954c7488e8bdb94a3d6968cec6d1332942b9e9f60925d14a8c2040f7" + "hash": "834be4337c2dcc2a5f38c0f4ae0a2065b5a30fc43bb32ccfe8d58e9f3da24937" } diff --git a/.sqlx/query-83dc16cc7a0f4507e308a06f4924065e4ea25de0210be222ceae8eb645f888e3.json b/.sqlx/query-83dc16cc7a0f4507e308a06f4924065e4ea25de0210be222ceae8eb645f888e3.json deleted file mode 100644 index 015a47d4..00000000 --- a/.sqlx/query-83dc16cc7a0f4507e308a06f4924065e4ea25de0210be222ceae8eb645f888e3.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, status, requested_status,\n license_url, license,\n slug, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5, $6, \n $7, $8, $9, $10, \n $11, $12, \n LOWER($13), $14, $15\n )\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Varchar", - "Varchar", - "Varchar", - "Timestamptz", - "Int4", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Text", - "Int4", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "83dc16cc7a0f4507e308a06f4924065e4ea25de0210be222ceae8eb645f888e3" -} diff --git a/.sqlx/query-c81df8078d6cba7f43a3d47a0b9cd365c7321c9ed304d6dd137cb2198780ff3a.json b/.sqlx/query-94de8109ff9f95be5e9f70c629fa9b1cfb2e9a1c094bc5e0d529a314a77fb4d7.json similarity index 65% rename from .sqlx/query-c81df8078d6cba7f43a3d47a0b9cd365c7321c9ed304d6dd137cb2198780ff3a.json rename to .sqlx/query-94de8109ff9f95be5e9f70c629fa9b1cfb2e9a1c094bc5e0d529a314a77fb4d7.json index 2a4ab4b5..e3f37c5f 100644 --- a/.sqlx/query-c81df8078d6cba7f43a3d47a0b9cd365c7321c9ed304d6dd137cb2198780ff3a.json +++ b/.sqlx/query-94de8109ff9f95be5e9f70c629fa9b1cfb2e9a1c094bc5e0d529a314a77fb4d7.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n WITH version_fields_cte AS (\n SELECT version_id, field_id, int_value, enum_value, string_value\n FROM version_fields\n ),\n version_fields_json AS (\n SELECT DISTINCT version_id,\n JSONB_AGG( \n DISTINCT jsonb_build_object('field_id', field_id, 'int_value', int_value, 'enum_value', enum_value, 'string_value', string_value)\n ) version_fields_json\n FROM version_fields_cte\n GROUP BY version_id\n ),\n loader_fields_cte AS (\n SELECT DISTINCT vf.version_id, lf.*, l.loader\n FROM loader_fields lf\n INNER JOIN version_fields_cte vf ON lf.id = vf.field_id\n LEFT JOIN loaders_versions lv ON vf.version_id = lv.version_id\n LEFT JOIN loaders l ON lv.loader_id = l.id\n GROUP BY vf.version_id, lf.enum_type, lf.id, l.loader\n ),\n loader_fields_json AS (\n SELECT DISTINCT version_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'version_id', lf.version_id,\n 'lf_id', id, 'loader_name', loader, 'field', field, 'field_type', field_type, 'enum_type', enum_type, 'min_val', min_val, 'max_val', max_val, 'optional', optional\n )\n ) filter (where lf.id is not null) loader_fields_json\n FROM loader_fields_cte lf\n GROUP BY version_id\n ),\n loader_field_enum_values_json AS (\n SELECT DISTINCT version_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id, 'enum_id', lfev.enum_id, 'value', lfev.value, 'ordering', lfev.ordering, 'created', lfev.created, 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values_json\n FROM loader_field_enum_values lfev\n INNER JOIN loader_fields_cte lf on lf.enum_type = lfev.enum_id\n GROUP BY version_id\n )\n\n SELECT m.id id, v.id version_id, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,\n m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color,\n u.username username,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n vf.version_fields_json version_fields,\n lf.loader_fields_json loader_fields,\n lfev.loader_field_enum_values_json loader_field_enum_values\n FROM versions v\n INNER JOIN mods m ON v.mod_id = m.id AND m.status = ANY($2)\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields_json vf ON v.id = vf.version_id\n LEFT OUTER JOIN loader_fields_json lf ON v.id = lf.version_id\n LEFT OUTER JOIN loader_field_enum_values_json lfev ON v.id = lfev.version_id\n WHERE v.status != ANY($1)\n GROUP BY v.id, vf.version_fields_json, lf.loader_fields_json, lfev.loader_field_enum_values_json, m.id, u.id;\n ", + "query": "\n WITH version_fields_cte AS (\n SELECT version_id, field_id, int_value, enum_value, string_value\n FROM version_fields\n ),\n version_fields_json AS (\n SELECT DISTINCT version_id,\n JSONB_AGG( \n DISTINCT jsonb_build_object('field_id', field_id, 'int_value', int_value, 'enum_value', enum_value, 'string_value', string_value)\n ) version_fields_json\n FROM version_fields_cte\n GROUP BY version_id\n ),\n loader_fields_cte AS (\n SELECT DISTINCT vf.version_id, lf.*, l.loader\n FROM loader_fields lf\n INNER JOIN version_fields_cte vf ON lf.id = vf.field_id\n LEFT JOIN loaders_versions lv ON vf.version_id = lv.version_id\n LEFT JOIN loaders l ON lv.loader_id = l.id\n GROUP BY vf.version_id, lf.enum_type, lf.id, l.loader\n ),\n loader_fields_json AS (\n SELECT DISTINCT version_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'version_id', lf.version_id,\n 'lf_id', id, 'loader_name', loader, 'field', field, 'field_type', field_type, 'enum_type', enum_type, 'min_val', min_val, 'max_val', max_val, 'optional', optional\n )\n ) filter (where lf.id is not null) loader_fields_json\n FROM loader_fields_cte lf\n GROUP BY version_id\n ),\n loader_field_enum_values_json AS (\n SELECT DISTINCT version_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id, 'enum_id', lfev.enum_id, 'value', lfev.value, 'ordering', lfev.ordering, 'created', lfev.created, 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values_json\n FROM loader_field_enum_values lfev\n INNER JOIN loader_fields_cte lf on lf.enum_type = lfev.enum_id\n GROUP BY version_id\n )\n\n SELECT m.id id, v.id version_id, m.name name, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,\n m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color,\n u.username username,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n vf.version_fields_json version_fields,\n lf.loader_fields_json loader_fields,\n lfev.loader_field_enum_values_json loader_field_enum_values\n FROM versions v\n INNER JOIN mods m ON v.mod_id = m.id AND m.status = ANY($2)\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.is_owner = TRUE AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields_json vf ON v.id = vf.version_id\n LEFT OUTER JOIN loader_fields_json lf ON v.id = lf.version_id\n LEFT OUTER JOIN loader_field_enum_values_json lfev ON v.id = lfev.version_id\n WHERE v.status != ANY($1)\n GROUP BY v.id, vf.version_fields_json, lf.loader_fields_json, lfev.loader_field_enum_values_json, m.id, u.id;\n ", "describe": { "columns": [ { @@ -15,7 +15,7 @@ }, { "ordinal": 2, - "name": "title", + "name": "name", "type_info": "Varchar" }, { @@ -137,8 +137,7 @@ "parameters": { "Left": [ "TextArray", - "TextArray", - "Text" + "TextArray" ] }, "nullable": [ @@ -170,5 +169,5 @@ null ] }, - "hash": "c81df8078d6cba7f43a3d47a0b9cd365c7321c9ed304d6dd137cb2198780ff3a" + "hash": "94de8109ff9f95be5e9f70c629fa9b1cfb2e9a1c094bc5e0d529a314a77fb4d7" } diff --git a/.sqlx/query-ce20a9c53249e255be7312819f505d935d3ab2ee3c21a6422e5b12155c159bd7.json b/.sqlx/query-9abdd9a2018e7bfe26836dd5463ba0923ef0a76c32ca258faf55fc3301c567bf.json similarity index 74% rename from .sqlx/query-ce20a9c53249e255be7312819f505d935d3ab2ee3c21a6422e5b12155c159bd7.json rename to .sqlx/query-9abdd9a2018e7bfe26836dd5463ba0923ef0a76c32ca258faf55fc3301c567bf.json index e2531195..92195b68 100644 --- a/.sqlx/query-ce20a9c53249e255be7312819f505d935d3ab2ee3c21a6422e5b12155c159bd7.json +++ b/.sqlx/query-9abdd9a2018e7bfe26836dd5463ba0923ef0a76c32ca258faf55fc3301c567bf.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, team_id, role AS member_role, permissions, organization_permissions,\n accepted, payouts_split, role,\n ordering, user_id\n \n FROM team_members\n WHERE (team_id = $1 AND user_id = $2)\n ORDER BY ordering\n ", + "query": "\n SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,\n accepted, payouts_split, role,\n ordering, user_id\n \n FROM team_members\n WHERE (team_id = $1 AND user_id = $2)\n ORDER BY ordering\n ", "describe": { "columns": [ { @@ -20,36 +20,41 @@ }, { "ordinal": 3, + "name": "is_owner", + "type_info": "Bool" + }, + { + "ordinal": 4, "name": "permissions", "type_info": "Int8" }, { - "ordinal": 4, + "ordinal": 5, "name": "organization_permissions", "type_info": "Int8" }, { - "ordinal": 5, + "ordinal": 6, "name": "accepted", "type_info": "Bool" }, { - "ordinal": 6, + "ordinal": 7, "name": "payouts_split", "type_info": "Numeric" }, { - "ordinal": 7, + "ordinal": 8, "name": "role", "type_info": "Varchar" }, { - "ordinal": 8, + "ordinal": 9, "name": "ordering", "type_info": "Int8" }, { - "ordinal": 9, + "ordinal": 10, "name": "user_id", "type_info": "Int8" } @@ -65,6 +70,7 @@ false, false, false, + false, true, false, false, @@ -73,5 +79,5 @@ false ] }, - "hash": "ce20a9c53249e255be7312819f505d935d3ab2ee3c21a6422e5b12155c159bd7" + "hash": "9abdd9a2018e7bfe26836dd5463ba0923ef0a76c32ca258faf55fc3301c567bf" } diff --git a/.sqlx/query-9b0c04bc7d44a60c175259cfe86b8e7ba0340ea9c89be30a89cc224d0f7e9727.json b/.sqlx/query-9b0c04bc7d44a60c175259cfe86b8e7ba0340ea9c89be30a89cc224d0f7e9727.json new file mode 100644 index 00000000..801e637e --- /dev/null +++ b/.sqlx/query-9b0c04bc7d44a60c175259cfe86b8e7ba0340ea9c89be30a89cc224d0f7e9727.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO mods (\n id, team_id, name, summary, description,\n published, downloads, icon_url, status, requested_status,\n license_url, license,\n slug, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5, $6, \n $7, $8, $9, $10, \n $11, $12, \n LOWER($13), $14, $15\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Varchar", + "Varchar", + "Varchar", + "Timestamptz", + "Int4", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Text", + "Int4", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "9b0c04bc7d44a60c175259cfe86b8e7ba0340ea9c89be30a89cc224d0f7e9727" +} diff --git a/.sqlx/query-dc6aa2e7bfd5d5004620ddd4cd6a47ecc56159e1489054e0652d56df802fb5e5.json b/.sqlx/query-9c1b6ba7cbe2619ff767ee7bbfb01725dc3324d284b2f20cf393574ab3bc655f.json similarity index 64% rename from .sqlx/query-dc6aa2e7bfd5d5004620ddd4cd6a47ecc56159e1489054e0652d56df802fb5e5.json rename to .sqlx/query-9c1b6ba7cbe2619ff767ee7bbfb01725dc3324d284b2f20cf393574ab3bc655f.json index f4949452..5152ba22 100644 --- a/.sqlx/query-dc6aa2e7bfd5d5004620ddd4cd6a47ecc56159e1489054e0652d56df802fb5e5.json +++ b/.sqlx/query-9c1b6ba7cbe2619ff767ee7bbfb01725dc3324d284b2f20cf393574ab3bc655f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE mods\n SET body = $1\n WHERE (id = $2)\n ", + "query": "\n UPDATE mods\n SET name = $1\n WHERE (id = $2)\n ", "describe": { "columns": [], "parameters": { @@ -11,5 +11,5 @@ }, "nullable": [] }, - "hash": "dc6aa2e7bfd5d5004620ddd4cd6a47ecc56159e1489054e0652d56df802fb5e5" + "hash": "9c1b6ba7cbe2619ff767ee7bbfb01725dc3324d284b2f20cf393574ab3bc655f" } diff --git a/.sqlx/query-a25b09712476fa4b12d98e08a4d260260e250e46fc68d806bf6372130cc65e1b.json b/.sqlx/query-a25b09712476fa4b12d98e08a4d260260e250e46fc68d806bf6372130cc65e1b.json new file mode 100644 index 00000000..d491a0ab --- /dev/null +++ b/.sqlx/query-a25b09712476fa4b12d98e08a4d260260e250e46fc68d806bf6372130cc65e1b.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE team_members\n SET is_owner = $1\n WHERE (team_id = $2 AND user_id = $3)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bool", + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "a25b09712476fa4b12d98e08a4d260260e250e46fc68d806bf6372130cc65e1b" +} diff --git a/.sqlx/query-3d700aaeb0d5129ac8c297ee0542757435a50a35ec94582d9d6ce67aa5302291.json b/.sqlx/query-b677e66031752e66d2219079a559e368c6cea1800da8a5f9d50ba5b1ac3a15fc.json similarity index 60% rename from .sqlx/query-3d700aaeb0d5129ac8c297ee0542757435a50a35ec94582d9d6ce67aa5302291.json rename to .sqlx/query-b677e66031752e66d2219079a559e368c6cea1800da8a5f9d50ba5b1ac3a15fc.json index 1ff0ea78..a6654700 100644 --- a/.sqlx/query-3d700aaeb0d5129ac8c297ee0542757435a50a35ec94582d9d6ce67aa5302291.json +++ b/.sqlx/query-b677e66031752e66d2219079a559e368c6cea1800da8a5f9d50ba5b1ac3a15fc.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE mods\n SET title = $1\n WHERE (id = $2)\n ", + "query": "\n UPDATE mods\n SET summary = $1\n WHERE (id = $2)\n ", "describe": { "columns": [], "parameters": { @@ -11,5 +11,5 @@ }, "nullable": [] }, - "hash": "3d700aaeb0d5129ac8c297ee0542757435a50a35ec94582d9d6ce67aa5302291" + "hash": "b677e66031752e66d2219079a559e368c6cea1800da8a5f9d50ba5b1ac3a15fc" } diff --git a/.sqlx/query-77be410d0687b65b3554a35740fcf3c02418c5897856000716a35c02eed43d5a.json b/.sqlx/query-bd48b18b9bef07185d2d050c7c978904cfbdf4ec765b7d3568f930939e236cbe.json similarity index 50% rename from .sqlx/query-77be410d0687b65b3554a35740fcf3c02418c5897856000716a35c02eed43d5a.json rename to .sqlx/query-bd48b18b9bef07185d2d050c7c978904cfbdf4ec765b7d3568f930939e236cbe.json index 7499e58d..7cf68d08 100644 --- a/.sqlx/query-77be410d0687b65b3554a35740fcf3c02418c5897856000716a35c02eed43d5a.json +++ b/.sqlx/query-bd48b18b9bef07185d2d050c7c978904cfbdf4ec765b7d3568f930939e236cbe.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO organizations (id, title, team_id, description, icon_url, color)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", + "query": "\n INSERT INTO organizations (id, name, team_id, description, icon_url, color)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", "describe": { "columns": [], "parameters": { @@ -15,5 +15,5 @@ }, "nullable": [] }, - "hash": "77be410d0687b65b3554a35740fcf3c02418c5897856000716a35c02eed43d5a" + "hash": "bd48b18b9bef07185d2d050c7c978904cfbdf4ec765b7d3568f930939e236cbe" } diff --git a/.sqlx/query-8ba2b2c38958f1c542e514fc62ab4682f58b0b442ac1842d20625420698e34ec.json b/.sqlx/query-c0bd8a50915398377b6e8a6c046a2d406c3d9e7721647c8a6f4fcf9e7c72bc25.json similarity index 55% rename from .sqlx/query-8ba2b2c38958f1c542e514fc62ab4682f58b0b442ac1842d20625420698e34ec.json rename to .sqlx/query-c0bd8a50915398377b6e8a6c046a2d406c3d9e7721647c8a6f4fcf9e7c72bc25.json index b8824c58..4a7f901e 100644 --- a/.sqlx/query-8ba2b2c38958f1c542e514fc62ab4682f58b0b442ac1842d20625420698e34ec.json +++ b/.sqlx/query-c0bd8a50915398377b6e8a6c046a2d406c3d9e7721647c8a6f4fcf9e7c72bc25.json @@ -1,16 +1,15 @@ { "db_name": "PostgreSQL", - "query": "\n DELETE FROM team_members\n WHERE (team_id = $1 AND user_id = $2 AND NOT role = $3)\n ", + "query": "\n DELETE FROM team_members\n WHERE (team_id = $1 AND user_id = $2 AND NOT is_owner = TRUE)\n ", "describe": { "columns": [], "parameters": { "Left": [ "Int8", - "Int8", - "Text" + "Int8" ] }, "nullable": [] }, - "hash": "8ba2b2c38958f1c542e514fc62ab4682f58b0b442ac1842d20625420698e34ec" + "hash": "c0bd8a50915398377b6e8a6c046a2d406c3d9e7721647c8a6f4fcf9e7c72bc25" } diff --git a/.sqlx/query-e6db02891be261e61a25716b83c1298482eb9a04f0c026532030aeb374405f13.json b/.sqlx/query-c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345.json similarity index 72% rename from .sqlx/query-e6db02891be261e61a25716b83c1298482eb9a04f0c026532030aeb374405f13.json rename to .sqlx/query-c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345.json index bce95c7c..dd76374d 100644 --- a/.sqlx/query-e6db02891be261e61a25716b83c1298482eb9a04f0c026532030aeb374405f13.json +++ b/.sqlx/query-c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, team_id, role AS member_role, permissions, organization_permissions,\n accepted, payouts_split, \n ordering, user_id\n FROM team_members\n WHERE team_id = ANY($1)\n ORDER BY team_id, ordering;\n ", + "query": "\n SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,\n accepted, payouts_split, \n ordering, user_id\n FROM team_members\n WHERE team_id = ANY($1)\n ORDER BY team_id, ordering;\n ", "describe": { "columns": [ { @@ -20,31 +20,36 @@ }, { "ordinal": 3, + "name": "is_owner", + "type_info": "Bool" + }, + { + "ordinal": 4, "name": "permissions", "type_info": "Int8" }, { - "ordinal": 4, + "ordinal": 5, "name": "organization_permissions", "type_info": "Int8" }, { - "ordinal": 5, + "ordinal": 6, "name": "accepted", "type_info": "Bool" }, { - "ordinal": 6, + "ordinal": 7, "name": "payouts_split", "type_info": "Numeric" }, { - "ordinal": 7, + "ordinal": 8, "name": "ordering", "type_info": "Int8" }, { - "ordinal": 8, + "ordinal": 9, "name": "user_id", "type_info": "Int8" } @@ -59,6 +64,7 @@ false, false, false, + false, true, false, false, @@ -66,5 +72,5 @@ false ] }, - "hash": "e6db02891be261e61a25716b83c1298482eb9a04f0c026532030aeb374405f13" + "hash": "c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345" } diff --git a/.sqlx/query-30307fb92fd2d8e1f03f21f8ad76f285ef8cb2bf8f40f9facafaae3f8c75d587.json b/.sqlx/query-ca9f3298ff92051412f5096690b3314fe91fe0b7c79ab2f7d09396af47b85ee6.json similarity index 69% rename from .sqlx/query-30307fb92fd2d8e1f03f21f8ad76f285ef8cb2bf8f40f9facafaae3f8c75d587.json rename to .sqlx/query-ca9f3298ff92051412f5096690b3314fe91fe0b7c79ab2f7d09396af47b85ee6.json index 0532f326..885bcd56 100644 --- a/.sqlx/query-30307fb92fd2d8e1f03f21f8ad76f285ef8cb2bf8f40f9facafaae3f8c75d587.json +++ b/.sqlx/query-ca9f3298ff92051412f5096690b3314fe91fe0b7c79ab2f7d09396af47b85ee6.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT o.id, o.title, o.team_id, o.description, o.icon_url, o.color\n FROM organizations o\n WHERE o.id = ANY($1) OR LOWER(o.title) = ANY($2)\n GROUP BY o.id;\n ", + "query": "\n SELECT o.id, o.name, o.team_id, o.description, o.icon_url, o.color\n FROM organizations o\n WHERE o.id = ANY($1) OR LOWER(o.name) = ANY($2)\n GROUP BY o.id;\n ", "describe": { "columns": [ { @@ -10,7 +10,7 @@ }, { "ordinal": 1, - "name": "title", + "name": "name", "type_info": "Varchar" }, { @@ -49,5 +49,5 @@ true ] }, - "hash": "30307fb92fd2d8e1f03f21f8ad76f285ef8cb2bf8f40f9facafaae3f8c75d587" + "hash": "ca9f3298ff92051412f5096690b3314fe91fe0b7c79ab2f7d09396af47b85ee6" } diff --git a/.sqlx/query-7075dc0343dab7c4dd4469b4af095232dcdd056a15d928a6d93556daf6fd327c.json b/.sqlx/query-d137055262526c5e9295a712430c528b9d0f37aacbb53aeb530d3a64fc49365e.json similarity index 51% rename from .sqlx/query-7075dc0343dab7c4dd4469b4af095232dcdd056a15d928a6d93556daf6fd327c.json rename to .sqlx/query-d137055262526c5e9295a712430c528b9d0f37aacbb53aeb530d3a64fc49365e.json index 15b75ba3..13088202 100644 --- a/.sqlx/query-7075dc0343dab7c4dd4469b4af095232dcdd056a15d928a6d93556daf6fd327c.json +++ b/.sqlx/query-d137055262526c5e9295a712430c528b9d0f37aacbb53aeb530d3a64fc49365e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO team_members (id, team_id, user_id, role, permissions, organization_permissions, accepted, payouts_split, ordering)\n SELECT * FROM UNNEST ($1::int8[], $2::int8[], $3::int8[], $4::varchar[], $5::int8[], $6::int8[], $7::bool[], $8::numeric[], $9::int8[])\n ", + "query": "\n INSERT INTO team_members (id, team_id, user_id, role, is_owner, permissions, organization_permissions, accepted, payouts_split, ordering)\n SELECT * FROM UNNEST ($1::int8[], $2::int8[], $3::int8[], $4::varchar[], $5::bool[], $6::int8[], $7::int8[], $8::bool[], $9::numeric[], $10::int8[])\n ", "describe": { "columns": [], "parameters": { @@ -9,6 +9,7 @@ "Int8Array", "Int8Array", "VarcharArray", + "BoolArray", "Int8Array", "Int8Array", "BoolArray", @@ -18,5 +19,5 @@ }, "nullable": [] }, - "hash": "7075dc0343dab7c4dd4469b4af095232dcdd056a15d928a6d93556daf6fd327c" + "hash": "d137055262526c5e9295a712430c528b9d0f37aacbb53aeb530d3a64fc49365e" } diff --git a/.sqlx/query-5d65f89c020ae032f26d742c37afe47876911eb3a16a6852299b98f2a8251fb4.json b/.sqlx/query-d5b2cfec04f4a74b5a3767047732e67c3107dcf4a386a4af552191460216f45d.json similarity index 53% rename from .sqlx/query-5d65f89c020ae032f26d742c37afe47876911eb3a16a6852299b98f2a8251fb4.json rename to .sqlx/query-d5b2cfec04f4a74b5a3767047732e67c3107dcf4a386a4af552191460216f45d.json index fdfe0439..e0f596ac 100644 --- a/.sqlx/query-5d65f89c020ae032f26d742c37afe47876911eb3a16a6852299b98f2a8251fb4.json +++ b/.sqlx/query-d5b2cfec04f4a74b5a3767047732e67c3107dcf4a386a4af552191460216f45d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO mods_gallery (\n mod_id, image_url, featured, title, description, ordering\n )\n SELECT * FROM UNNEST ($1::bigint[], $2::varchar[], $3::bool[], $4::varchar[], $5::varchar[], $6::bigint[])\n ", + "query": "\n INSERT INTO mods_gallery (\n mod_id, image_url, featured, name, description, ordering\n )\n SELECT * FROM UNNEST ($1::bigint[], $2::varchar[], $3::bool[], $4::varchar[], $5::varchar[], $6::bigint[])\n ", "describe": { "columns": [], "parameters": { @@ -15,5 +15,5 @@ }, "nullable": [] }, - "hash": "5d65f89c020ae032f26d742c37afe47876911eb3a16a6852299b98f2a8251fb4" + "hash": "d5b2cfec04f4a74b5a3767047732e67c3107dcf4a386a4af552191460216f45d" } diff --git a/.sqlx/query-e3389d233c75649e95456d504d1b716d520a03a8a3e0cc5311a4a753f1f04614.json b/.sqlx/query-da30019590b9d0f7e21668997e780044c67a7c5d225e556c7ec2a4d7709db5ea.json similarity index 70% rename from .sqlx/query-e3389d233c75649e95456d504d1b716d520a03a8a3e0cc5311a4a753f1f04614.json rename to .sqlx/query-da30019590b9d0f7e21668997e780044c67a7c5d225e556c7ec2a4d7709db5ea.json index c02cfc21..0b572b2d 100644 --- a/.sqlx/query-e3389d233c75649e95456d504d1b716d520a03a8a3e0cc5311a4a753f1f04614.json +++ b/.sqlx/query-da30019590b9d0f7e21668997e780044c67a7c5d225e556c7ec2a4d7709db5ea.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT EXISTS(SELECT 1 FROM organizations WHERE title = LOWER($1))\n ", + "query": "\n SELECT EXISTS(SELECT 1 FROM organizations WHERE name = LOWER($1))\n ", "describe": { "columns": [ { @@ -18,5 +18,5 @@ null ] }, - "hash": "e3389d233c75649e95456d504d1b716d520a03a8a3e0cc5311a4a753f1f04614" + "hash": "da30019590b9d0f7e21668997e780044c67a7c5d225e556c7ec2a4d7709db5ea" } diff --git a/.sqlx/query-009bce5eee6ed65d9dc0899a4e24da528507a3f00b7ec997fa9ccdd7599655b1.json b/.sqlx/query-da962cbb02919ea79e1106e6e5de39224d240d9b8afb5cead28578ca65e281ae.json similarity index 72% rename from .sqlx/query-009bce5eee6ed65d9dc0899a4e24da528507a3f00b7ec997fa9ccdd7599655b1.json rename to .sqlx/query-da962cbb02919ea79e1106e6e5de39224d240d9b8afb5cead28578ca65e281ae.json index a27a9196..4a529a0b 100644 --- a/.sqlx/query-009bce5eee6ed65d9dc0899a4e24da528507a3f00b7ec997fa9ccdd7599655b1.json +++ b/.sqlx/query-da962cbb02919ea79e1106e6e5de39224d240d9b8afb5cead28578ca65e281ae.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT m.id FROM organizations o\n INNER JOIN mods m ON m.organization_id = o.id\n WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL)\n ", + "query": "\n SELECT m.id FROM organizations o\n INNER JOIN mods m ON m.organization_id = o.id\n WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.name = $2 AND $2 IS NOT NULL)\n ", "describe": { "columns": [ { @@ -19,5 +19,5 @@ false ] }, - "hash": "009bce5eee6ed65d9dc0899a4e24da528507a3f00b7ec997fa9ccdd7599655b1" + "hash": "da962cbb02919ea79e1106e6e5de39224d240d9b8afb5cead28578ca65e281ae" } diff --git a/.sqlx/query-c49cda8215982b699d7aee14614763c9b5b997489581293fc2ae3604697867fe.json b/.sqlx/query-dc05295852b5a1d49be7906cd248566ffdfe790d7b61bd69969b00d558b41804.json similarity index 64% rename from .sqlx/query-c49cda8215982b699d7aee14614763c9b5b997489581293fc2ae3604697867fe.json rename to .sqlx/query-dc05295852b5a1d49be7906cd248566ffdfe790d7b61bd69969b00d558b41804.json index b159b9d0..a6e27474 100644 --- a/.sqlx/query-c49cda8215982b699d7aee14614763c9b5b997489581293fc2ae3604697867fe.json +++ b/.sqlx/query-dc05295852b5a1d49be7906cd248566ffdfe790d7b61bd69969b00d558b41804.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type, n.body,\n JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'title', na.title, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions\n FROM notifications n\n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id\n WHERE n.user_id = $1\n GROUP BY n.id, n.user_id;\n ", + "query": "\n SELECT n.id, n.user_id, n.name, n.text, n.link, n.created, n.read, n.type notification_type, n.body,\n JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'name', na.name, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions\n FROM notifications n\n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id\n WHERE n.user_id = $1\n GROUP BY n.id, n.user_id;\n ", "describe": { "columns": [ { @@ -15,7 +15,7 @@ }, { "ordinal": 2, - "name": "title", + "name": "name", "type_info": "Varchar" }, { @@ -72,5 +72,5 @@ null ] }, - "hash": "c49cda8215982b699d7aee14614763c9b5b997489581293fc2ae3604697867fe" + "hash": "dc05295852b5a1d49be7906cd248566ffdfe790d7b61bd69969b00d558b41804" } diff --git a/.sqlx/query-d55bdef50adf0b8a547022d0a041bec8618da02d82a1138da77d8885c0d9cfb9.json b/.sqlx/query-ec8f310133cef187e8a6d101105210d6fcc194f67f671a8c4021ac23e0fb5dfc.json similarity index 74% rename from .sqlx/query-d55bdef50adf0b8a547022d0a041bec8618da02d82a1138da77d8885c0d9cfb9.json rename to .sqlx/query-ec8f310133cef187e8a6d101105210d6fcc194f67f671a8c4021ac23e0fb5dfc.json index 4277a343..70169c5f 100644 --- a/.sqlx/query-d55bdef50adf0b8a547022d0a041bec8618da02d82a1138da77d8885c0d9cfb9.json +++ b/.sqlx/query-ec8f310133cef187e8a6d101105210d6fcc194f67f671a8c4021ac23e0fb5dfc.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, team_id, role AS member_role, permissions, organization_permissions,\n accepted, payouts_split, role,\n ordering, user_id\n FROM team_members\n WHERE (team_id = ANY($1) AND user_id = $2 AND accepted = TRUE)\n ORDER BY ordering\n ", + "query": "\n SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,\n accepted, payouts_split, role,\n ordering, user_id\n FROM team_members\n WHERE (team_id = ANY($1) AND user_id = $2 AND accepted = TRUE)\n ORDER BY ordering\n ", "describe": { "columns": [ { @@ -20,36 +20,41 @@ }, { "ordinal": 3, + "name": "is_owner", + "type_info": "Bool" + }, + { + "ordinal": 4, "name": "permissions", "type_info": "Int8" }, { - "ordinal": 4, + "ordinal": 5, "name": "organization_permissions", "type_info": "Int8" }, { - "ordinal": 5, + "ordinal": 6, "name": "accepted", "type_info": "Bool" }, { - "ordinal": 6, + "ordinal": 7, "name": "payouts_split", "type_info": "Numeric" }, { - "ordinal": 7, + "ordinal": 8, "name": "role", "type_info": "Varchar" }, { - "ordinal": 8, + "ordinal": 9, "name": "ordering", "type_info": "Int8" }, { - "ordinal": 9, + "ordinal": 10, "name": "user_id", "type_info": "Int8" } @@ -65,6 +70,7 @@ false, false, false, + false, true, false, false, @@ -73,5 +79,5 @@ false ] }, - "hash": "d55bdef50adf0b8a547022d0a041bec8618da02d82a1138da77d8885c0d9cfb9" + "hash": "ec8f310133cef187e8a6d101105210d6fcc194f67f671a8c4021ac23e0fb5dfc" } diff --git a/.sqlx/query-07b692d2f89cdcc66da4e1a834f6fefe6a24c13c287490662585749b2b8baae3.json b/.sqlx/query-eefe0f3e40273da9adea96cdef5fd5cff917a864a701408455cc6b02cd005cf7.json similarity index 57% rename from .sqlx/query-07b692d2f89cdcc66da4e1a834f6fefe6a24c13c287490662585749b2b8baae3.json rename to .sqlx/query-eefe0f3e40273da9adea96cdef5fd5cff917a864a701408455cc6b02cd005cf7.json index 7a8c0388..6cbfaf44 100644 --- a/.sqlx/query-07b692d2f89cdcc66da4e1a834f6fefe6a24c13c287490662585749b2b8baae3.json +++ b/.sqlx/query-eefe0f3e40273da9adea96cdef5fd5cff917a864a701408455cc6b02cd005cf7.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE organizations\n SET title = LOWER($1)\n WHERE (id = $2)\n ", + "query": "\n UPDATE organizations\n SET name = LOWER($1)\n WHERE (id = $2)\n ", "describe": { "columns": [], "parameters": { @@ -11,5 +11,5 @@ }, "nullable": [] }, - "hash": "07b692d2f89cdcc66da4e1a834f6fefe6a24c13c287490662585749b2b8baae3" + "hash": "eefe0f3e40273da9adea96cdef5fd5cff917a864a701408455cc6b02cd005cf7" } diff --git a/.sqlx/query-fce67ce3d0c27c64af85fb7d36661513bc5ea2e96fcf12f3a51c97999b01b83c.json b/.sqlx/query-f0068d4e1303bfa69bf1c8d536e74395de5d6b6f7ba7389e8c934eeb8c10286f.json similarity index 62% rename from .sqlx/query-fce67ce3d0c27c64af85fb7d36661513bc5ea2e96fcf12f3a51c97999b01b83c.json rename to .sqlx/query-f0068d4e1303bfa69bf1c8d536e74395de5d6b6f7ba7389e8c934eeb8c10286f.json index b2477254..b50eebe2 100644 --- a/.sqlx/query-fce67ce3d0c27c64af85fb7d36661513bc5ea2e96fcf12f3a51c97999b01b83c.json +++ b/.sqlx/query-f0068d4e1303bfa69bf1c8d536e74395de5d6b6f7ba7389e8c934eeb8c10286f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type, n.body,\n JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'title', na.title, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions\n FROM notifications n\n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id\n WHERE n.id = ANY($1)\n GROUP BY n.id, n.user_id\n ORDER BY n.created DESC;\n ", + "query": "\n SELECT n.id, n.user_id, n.name, n.text, n.link, n.created, n.read, n.type notification_type, n.body,\n JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'name', na.name, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions\n FROM notifications n\n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id\n WHERE n.id = ANY($1)\n GROUP BY n.id, n.user_id\n ORDER BY n.created DESC;\n ", "describe": { "columns": [ { @@ -15,7 +15,7 @@ }, { "ordinal": 2, - "name": "title", + "name": "name", "type_info": "Varchar" }, { @@ -72,5 +72,5 @@ null ] }, - "hash": "fce67ce3d0c27c64af85fb7d36661513bc5ea2e96fcf12f3a51c97999b01b83c" + "hash": "f0068d4e1303bfa69bf1c8d536e74395de5d6b6f7ba7389e8c934eeb8c10286f" } diff --git a/.sqlx/query-0b9f174d86badae0d30e34b32130c7cee69926e37db95494ab08f025d19cdb7c.json b/.sqlx/query-f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0.json similarity index 66% rename from .sqlx/query-0b9f174d86badae0d30e34b32130c7cee69926e37db95494ab08f025d19cdb7c.json rename to .sqlx/query-f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0.json index 1285356a..f117b90b 100644 --- a/.sqlx/query-0b9f174d86badae0d30e34b32130c7cee69926e37db95494ab08f025d19cdb7c.json +++ b/.sqlx/query-f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT c.id id, c.title title, c.description description,\n c.icon_url icon_url, c.color color, c.created created, c.user_id user_id,\n c.updated updated, c.status status,\n ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods\n FROM collections c\n LEFT JOIN collections_mods cm ON cm.collection_id = c.id\n WHERE c.id = ANY($1)\n GROUP BY c.id;\n ", + "query": "\n SELECT c.id id, c.name name, c.description description,\n c.icon_url icon_url, c.color color, c.created created, c.user_id user_id,\n c.updated updated, c.status status,\n ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods\n FROM collections c\n LEFT JOIN collections_mods cm ON cm.collection_id = c.id\n WHERE c.id = ANY($1)\n GROUP BY c.id;\n ", "describe": { "columns": [ { @@ -10,7 +10,7 @@ }, { "ordinal": 1, - "name": "title", + "name": "name", "type_info": "Varchar" }, { @@ -72,5 +72,5 @@ null ] }, - "hash": "0b9f174d86badae0d30e34b32130c7cee69926e37db95494ab08f025d19cdb7c" + "hash": "f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0" } diff --git a/.sqlx/query-877927a1310a1c835e1ca24b6995b7d95c62664629d7a59fbfe1404b4aff14d6.json b/.sqlx/query-f46c0ba514d4fa192b5d740b0ba6111ecaec51a0a23ac390d25214e2f3fb5cca.json similarity index 65% rename from .sqlx/query-877927a1310a1c835e1ca24b6995b7d95c62664629d7a59fbfe1404b4aff14d6.json rename to .sqlx/query-f46c0ba514d4fa192b5d740b0ba6111ecaec51a0a23ac390d25214e2f3fb5cca.json index af82884f..091fa160 100644 --- a/.sqlx/query-877927a1310a1c835e1ca24b6995b7d95c62664629d7a59fbfe1404b4aff14d6.json +++ b/.sqlx/query-f46c0ba514d4fa192b5d740b0ba6111ecaec51a0a23ac390d25214e2f3fb5cca.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n WITH version_fields_cte AS (\n SELECT mod_id, version_id, field_id, int_value, enum_value, string_value\n FROM mods m\n INNER JOIN versions v ON m.id = v.mod_id\n INNER JOIN version_fields vf ON v.id = vf.version_id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n ),\n\t\t\t\tversion_fields_json AS (\n\t\t\t\t\tSELECT DISTINCT mod_id,\n JSONB_AGG( \n DISTINCT jsonb_build_object('version_id', version_id, 'field_id', field_id, 'int_value', int_value, 'enum_value', enum_value, 'string_value', string_value)\n ) version_fields_json\n FROM version_fields_cte\n GROUP BY mod_id\n\t\t\t\t),\n\t\t\t\tloader_fields_cte AS (\n\t\t\t\t\tSELECT DISTINCT vf.mod_id, vf.version_id, lf.*, l.loader\n\t\t\t\t\tFROM loader_fields lf\n INNER JOIN version_fields_cte vf ON lf.id = vf.field_id\n\t\t\t\t\tLEFT JOIN loaders_versions lv ON vf.version_id = lv.version_id\n\t\t\t\t\tLEFT JOIN loaders l ON lv.loader_id = l.id\n GROUP BY vf.mod_id, vf.version_id, lf.enum_type, lf.id, l.loader\n\t\t\t\t),\n loader_fields_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'version_id', lf.version_id,\n 'lf_id', id, 'loader_name', loader, 'field', field, 'field_type', field_type, 'enum_type', enum_type, 'min_val', min_val, 'max_val', max_val, 'optional', optional\n )\n ) filter (where lf.id is not null) loader_fields_json\n FROM loader_fields_cte lf\n GROUP BY mod_id\n ),\n loader_field_enum_values_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id, 'enum_id', lfev.enum_id, 'value', lfev.value, 'ordering', lfev.ordering, 'created', lfev.created, 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values_json\n FROM loader_field_enum_values lfev\n INNER JOIN loader_fields_cte lf on lf.enum_type = lfev.enum_id\n GROUP BY mod_id\n ),\n versions_cte AS (\n SELECT DISTINCT mod_id, v.id as id, date_published\n FROM mods m\n INNER JOIN versions v ON m.id = v.mod_id AND v.status = ANY($3)\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n ),\n versions_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', id, 'date_published', date_published\n )\n ) filter (where id is not null) versions_json\n FROM versions_cte\n GROUP BY mod_id\n ),\n loaders_cte AS (\n SELECT DISTINCT mod_id, l.id as id, l.loader\n FROM versions_cte\n INNER JOIN loaders_versions lv ON versions_cte.id = lv.version_id\n INNER JOIN loaders l ON lv.loader_id = l.id \n ),\n mods_gallery_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering\n )\n ) filter (where image_url is not null) mods_gallery_json\n FROM mods_gallery mg\n INNER JOIN mods m ON mg.mod_id = m.id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY mod_id\n ),\n links_json AS (\n SELECT DISTINCT joining_mod_id as mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'platform_id', ml.joining_platform_id, 'platform_name', lp.name,'url', ml.url, 'donation', lp.donation\n )\n ) filter (where ml.joining_platform_id is not null) links_json\n FROM mods_links ml\n INNER JOIN mods m ON ml.joining_mod_id = m.id AND m.id = ANY($1) OR m.slug = ANY($2)\n INNER JOIN link_platforms lp ON ml.joining_platform_id = lp.id\n GROUP BY mod_id\n )\n \n SELECT m.id id, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.body body, m.published published,\n m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,\n m.license_url license_url,\n m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,\n m.webhook_sent, m.color,\n t.id thread_id, m.monetization_status monetization_status,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n v.versions_json versions,\n mg.mods_gallery_json gallery,\n ml.links_json links,\n vf.version_fields_json version_fields,\n lf.loader_fields_json loader_fields,\n lfev.loader_field_enum_values_json loader_field_enum_values\n FROM mods m \n INNER JOIN threads t ON t.mod_id = m.id\n LEFT JOIN mods_gallery_json mg ON mg.mod_id = m.id\n LEFT JOIN links_json ml ON ml.mod_id = m.id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON mc.joining_category_id = c.id\n LEFT JOIN versions_json v ON v.mod_id = m.id\n LEFT JOIN loaders_cte l on l.mod_id = m.id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN version_fields_json vf ON m.id = vf.mod_id\n LEFT OUTER JOIN loader_fields_json lf ON m.id = lf.mod_id\n LEFT OUTER JOIN loader_field_enum_values_json lfev ON m.id = lfev.mod_id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY t.id, m.id, version_fields_json, loader_fields_json, loader_field_enum_values_json, versions_json, mods_gallery_json, links_json;\n ", + "query": "\n WITH version_fields_cte AS (\n SELECT mod_id, version_id, field_id, int_value, enum_value, string_value\n FROM mods m\n INNER JOIN versions v ON m.id = v.mod_id\n INNER JOIN version_fields vf ON v.id = vf.version_id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n ),\n\t\t\t\tversion_fields_json AS (\n\t\t\t\t\tSELECT DISTINCT mod_id,\n JSONB_AGG( \n DISTINCT jsonb_build_object('version_id', version_id, 'field_id', field_id, 'int_value', int_value, 'enum_value', enum_value, 'string_value', string_value)\n ) version_fields_json\n FROM version_fields_cte\n GROUP BY mod_id\n\t\t\t\t),\n\t\t\t\tloader_fields_cte AS (\n\t\t\t\t\tSELECT DISTINCT vf.mod_id, vf.version_id, lf.*, l.loader\n\t\t\t\t\tFROM loader_fields lf\n INNER JOIN version_fields_cte vf ON lf.id = vf.field_id\n\t\t\t\t\tLEFT JOIN loaders_versions lv ON vf.version_id = lv.version_id\n\t\t\t\t\tLEFT JOIN loaders l ON lv.loader_id = l.id\n GROUP BY vf.mod_id, vf.version_id, lf.enum_type, lf.id, l.loader\n\t\t\t\t),\n loader_fields_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'version_id', lf.version_id,\n 'lf_id', id, 'loader_name', loader, 'field', field, 'field_type', field_type, 'enum_type', enum_type, 'min_val', min_val, 'max_val', max_val, 'optional', optional\n )\n ) filter (where lf.id is not null) loader_fields_json\n FROM loader_fields_cte lf\n GROUP BY mod_id\n ),\n loader_field_enum_values_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id, 'enum_id', lfev.enum_id, 'value', lfev.value, 'ordering', lfev.ordering, 'created', lfev.created, 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values_json\n FROM loader_field_enum_values lfev\n INNER JOIN loader_fields_cte lf on lf.enum_type = lfev.enum_id\n GROUP BY mod_id\n ),\n versions_cte AS (\n SELECT DISTINCT mod_id, v.id as id, date_published\n FROM mods m\n INNER JOIN versions v ON m.id = v.mod_id AND v.status = ANY($3)\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n ),\n versions_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', id, 'date_published', date_published\n )\n ) filter (where id is not null) versions_json\n FROM versions_cte\n GROUP BY mod_id\n ),\n loaders_cte AS (\n SELECT DISTINCT mod_id, l.id as id, l.loader\n FROM versions_cte\n INNER JOIN loaders_versions lv ON versions_cte.id = lv.version_id\n INNER JOIN loaders l ON lv.loader_id = l.id \n ),\n mods_gallery_json AS (\n SELECT DISTINCT mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'image_url', mg.image_url, 'featured', mg.featured, 'name', mg.name, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering\n )\n ) filter (where image_url is not null) mods_gallery_json\n FROM mods_gallery mg\n INNER JOIN mods m ON mg.mod_id = m.id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY mod_id\n ),\n links_json AS (\n SELECT DISTINCT joining_mod_id as mod_id,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'platform_id', ml.joining_platform_id, 'platform_name', lp.name,'url', ml.url, 'donation', lp.donation\n )\n ) filter (where ml.joining_platform_id is not null) links_json\n FROM mods_links ml\n INNER JOIN mods m ON ml.joining_mod_id = m.id AND m.id = ANY($1) OR m.slug = ANY($2)\n INNER JOIN link_platforms lp ON ml.joining_platform_id = lp.id\n GROUP BY mod_id\n )\n \n SELECT m.id id, m.name name, m.summary summary, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.description description, m.published published,\n m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,\n m.license_url license_url,\n m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,\n m.webhook_sent, m.color,\n t.id thread_id, m.monetization_status monetization_status,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n v.versions_json versions,\n mg.mods_gallery_json gallery,\n ml.links_json links,\n vf.version_fields_json version_fields,\n lf.loader_fields_json loader_fields,\n lfev.loader_field_enum_values_json loader_field_enum_values\n FROM mods m \n INNER JOIN threads t ON t.mod_id = m.id\n LEFT JOIN mods_gallery_json mg ON mg.mod_id = m.id\n LEFT JOIN links_json ml ON ml.mod_id = m.id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON mc.joining_category_id = c.id\n LEFT JOIN versions_json v ON v.mod_id = m.id\n LEFT JOIN loaders_cte l on l.mod_id = m.id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN version_fields_json vf ON m.id = vf.mod_id\n LEFT OUTER JOIN loader_fields_json lf ON m.id = lf.mod_id\n LEFT OUTER JOIN loader_field_enum_values_json lfev ON m.id = lfev.mod_id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY t.id, m.id, version_fields_json, loader_fields_json, loader_field_enum_values_json, versions_json, mods_gallery_json, links_json;\n ", "describe": { "columns": [ { @@ -10,12 +10,12 @@ }, { "ordinal": 1, - "name": "title", + "name": "name", "type_info": "Varchar" }, { "ordinal": 2, - "name": "description", + "name": "summary", "type_info": "Varchar" }, { @@ -35,7 +35,7 @@ }, { "ordinal": 6, - "name": "body", + "name": "description", "type_info": "Varchar" }, { @@ -224,5 +224,5 @@ null ] }, - "hash": "877927a1310a1c835e1ca24b6995b7d95c62664629d7a59fbfe1404b4aff14d6" + "hash": "f46c0ba514d4fa192b5d740b0ba6111ecaec51a0a23ac390d25214e2f3fb5cca" } diff --git a/.sqlx/query-a1a8aa7cc5d7967fbc64b979489222d9f5c154e21227f0edcbce1d96dddad3c6.json b/.sqlx/query-f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62.json similarity index 65% rename from .sqlx/query-a1a8aa7cc5d7967fbc64b979489222d9f5c154e21227f0edcbce1d96dddad3c6.json rename to .sqlx/query-f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62.json index 904bfd19..d24f2e9a 100644 --- a/.sqlx/query-a1a8aa7cc5d7967fbc64b979489222d9f5c154e21227f0edcbce1d96dddad3c6.json +++ b/.sqlx/query-f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62.json @@ -1,16 +1,15 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE team_members\n SET user_id = $1\n WHERE (user_id = $2 AND role = $3)\n ", + "query": "\n UPDATE team_members\n SET user_id = $1\n WHERE (user_id = $2 AND is_owner = TRUE)\n ", "describe": { "columns": [], "parameters": { "Left": [ "Int8", - "Int8", - "Text" + "Int8" ] }, "nullable": [] }, - "hash": "a1a8aa7cc5d7967fbc64b979489222d9f5c154e21227f0edcbce1d96dddad3c6" + "hash": "f73a0a6a79f97213477fc862101d0ced00500ab81336d129b5621581e9cd5e62" } diff --git a/migrations/20231124070100_renaming_consistency.sql b/migrations/20231124070100_renaming_consistency.sql new file mode 100644 index 00000000..272ec809 --- /dev/null +++ b/migrations/20231124070100_renaming_consistency.sql @@ -0,0 +1,16 @@ +-- rename 'title' to 'name' in all tables (collections, organizations, mods, mods_gallery, notifications, notifications_actions) +ALTER TABLE collections RENAME COLUMN title TO name; +ALTER TABLE organizations RENAME COLUMN title TO name; +ALTER TABLE mods RENAME COLUMN title TO name; +ALTER TABLE mods_gallery RENAME COLUMN title TO name; +ALTER TABLE notifications RENAME COLUMN title TO name; +ALTER TABLE notifications_actions RENAME COLUMN title TO name; + +-- rename project 'description' to 'summary' +-- rename project 'body' to 'description' +ALTER TABLE mods RENAME COLUMN description TO summary; +ALTER TABLE mods RENAME COLUMN body TO description; + +-- Adds 'is_owner' boolean to team members table- only one can be true. +ALTER TABLE team_members ADD COLUMN is_owner boolean NOT NULL DEFAULT false; +UPDATE team_members SET is_owner = true WHERE role = 'Owner'; \ No newline at end of file diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index 4a4f7424..035fad6b 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -12,7 +12,7 @@ const COLLECTIONS_NAMESPACE: &str = "collections"; pub struct CollectionBuilder { pub collection_id: CollectionId, pub user_id: UserId, - pub title: String, + pub name: String, pub description: String, pub status: CollectionStatus, pub projects: Vec, @@ -25,7 +25,7 @@ impl CollectionBuilder { ) -> Result { let collection_struct = Collection { id: self.collection_id, - title: self.title, + name: self.name, user_id: self.user_id, description: self.description, created: Utc::now(), @@ -44,7 +44,7 @@ impl CollectionBuilder { pub struct Collection { pub id: CollectionId, pub user_id: UserId, - pub title: String, + pub name: String, pub description: String, pub created: DateTime, pub updated: DateTime, @@ -62,7 +62,7 @@ impl Collection { sqlx::query!( " INSERT INTO collections ( - id, user_id, title, description, + id, user_id, name, description, created, icon_url, status ) VALUES ( @@ -72,7 +72,7 @@ impl Collection { ", self.id as CollectionId, self.user_id as UserId, - &self.title, + &self.name, &self.description, self.created, self.icon_url.as_ref(), @@ -190,7 +190,7 @@ impl Collection { remaining_collections.iter().map(|x| x.0).collect(); let db_collections: Vec = sqlx::query!( " - SELECT c.id id, c.title title, c.description description, + SELECT c.id id, c.name name, c.description description, c.icon_url icon_url, c.color color, c.created created, c.user_id user_id, c.updated updated, c.status status, ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods @@ -209,7 +209,7 @@ impl Collection { Collection { id: CollectionId(id), user_id: UserId(m.user_id), - title: m.title.clone(), + name: m.name.clone(), description: m.description.clone(), icon_url: m.icon_url.clone(), color: m.color.map(|x| x as u32), diff --git a/src/database/models/notification_item.rs b/src/database/models/notification_item.rs index 2bc89fec..206c5373 100644 --- a/src/database/models/notification_item.rs +++ b/src/database/models/notification_item.rs @@ -25,7 +25,7 @@ pub struct Notification { pub struct NotificationAction { pub id: NotificationActionId, pub notification_id: NotificationId, - pub title: String, + pub name: String, pub action_route_method: String, pub action_route: String, } @@ -122,8 +122,8 @@ impl Notification { let notification_ids_parsed: Vec = notification_ids.iter().map(|x| x.0).collect(); sqlx::query!( " - SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type, n.body, - JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'title', na.title, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions + SELECT n.id, n.user_id, n.name, n.text, n.link, n.created, n.read, n.type notification_type, n.body, + JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'name', na.name, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions FROM notifications n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id WHERE n.id = ANY($1) @@ -143,10 +143,10 @@ impl Notification { read: row.read, created: row.created, body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| { - if let Some(title) = row.title { + if let Some(name) = row.name { NotificationBody::LegacyMarkdown { notification_type: row.notification_type, - title, + name, text: row.text.unwrap_or_default(), link: row.link.unwrap_or_default(), actions: serde_json::from_value( @@ -186,8 +186,8 @@ impl Notification { let db_notifications = sqlx::query!( " - SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type, n.body, - JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'title', na.title, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions + SELECT n.id, n.user_id, n.name, n.text, n.link, n.created, n.read, n.type notification_type, n.body, + JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'name', na.name, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions FROM notifications n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id WHERE n.user_id = $1 @@ -206,10 +206,10 @@ impl Notification { read: row.read, created: row.created, body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| { - if let Some(title) = row.title { + if let Some(name) = row.name { NotificationBody::LegacyMarkdown { notification_type: row.notification_type, - title, + name, text: row.text.unwrap_or_default(), link: row.link.unwrap_or_default(), actions: serde_json::from_value( diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index 137d7ae0..a2d47eac 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -16,7 +16,7 @@ pub struct Organization { pub id: OrganizationId, /// The title (and slug) of the organization - pub title: String, + pub name: String, /// The associated team of the organization pub team_id: TeamId, @@ -36,11 +36,11 @@ impl Organization { ) -> Result<(), super::DatabaseError> { sqlx::query!( " - INSERT INTO organizations (id, title, team_id, description, icon_url, color) + INSERT INTO organizations (id, name, team_id, description, icon_url, color) VALUES ($1, $2, $3, $4, $5, $6) ", self.id.0, - self.title, + self.name, self.team_id as TeamId, self.description, self.icon_url, @@ -149,7 +149,7 @@ impl Organization { { remaining_strings.retain(|x| { &to_base62(organization.id.0 as u64) != x - && organization.title.to_lowercase() != x.to_lowercase() + && organization.name.to_lowercase() != x.to_lowercase() }); found_organizations.push(organization); continue; @@ -166,9 +166,9 @@ impl Organization { let organizations: Vec = sqlx::query!( " - SELECT o.id, o.title, o.team_id, o.description, o.icon_url, o.color + SELECT o.id, o.name, o.team_id, o.description, o.icon_url, o.color FROM organizations o - WHERE o.id = ANY($1) OR LOWER(o.title) = ANY($2) + WHERE o.id = ANY($1) OR LOWER(o.name) = ANY($2) GROUP BY o.id; ", &organization_ids_parsed, @@ -181,7 +181,7 @@ impl Organization { .try_filter_map(|e| async { Ok(e.right().map(|m| Organization { id: OrganizationId(m.id), - title: m.title, + name: m.name, team_id: TeamId(m.team_id), description: m.description, icon_url: m.icon_url, @@ -203,7 +203,7 @@ impl Organization { redis .set( ORGANIZATIONS_TITLES_NAMESPACE, - &organization.title.to_lowercase(), + &organization.name.to_lowercase(), &organization.id.0.to_string(), None, ) @@ -226,7 +226,7 @@ impl Organization { { let result = sqlx::query!( " - SELECT o.id, o.title, o.team_id, o.description, o.icon_url, o.color + SELECT o.id, o.name, o.team_id, o.description, o.icon_url, o.color FROM organizations o LEFT JOIN mods m ON m.organization_id = o.id WHERE m.id = $1 @@ -240,7 +240,7 @@ impl Organization { if let Some(result) = result { Ok(Some(Organization { id: OrganizationId(result.id), - title: result.title, + name: result.name, team_id: TeamId(result.team_id), description: result.description, icon_url: result.icon_url, @@ -279,7 +279,7 @@ impl Organization { super::project_item::Project::remove(project_id, transaction, redis).await?; } - Organization::clear_cache(id, Some(organization.title), redis).await?; + Organization::clear_cache(id, Some(organization.name), redis).await?; sqlx::query!( " diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 4f8d60d6..b051d765 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -54,7 +54,7 @@ impl LinkUrl { pub struct GalleryItem { pub image_url: String, pub featured: bool, - pub title: Option, + pub name: Option, pub description: Option, pub created: DateTime, pub ordering: i64, @@ -66,7 +66,7 @@ impl GalleryItem { project_id: ProjectId, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result<(), sqlx::error::Error> { - let (project_ids, image_urls, featureds, titles, descriptions, orderings): ( + let (project_ids, image_urls, featureds, names, descriptions, orderings): ( Vec<_>, Vec<_>, Vec<_>, @@ -80,7 +80,7 @@ impl GalleryItem { project_id.0, gi.image_url, gi.featured, - gi.title, + gi.name, gi.description, gi.ordering, ) @@ -89,14 +89,14 @@ impl GalleryItem { sqlx::query!( " INSERT INTO mods_gallery ( - mod_id, image_url, featured, title, description, ordering + mod_id, image_url, featured, name, description, ordering ) SELECT * FROM UNNEST ($1::bigint[], $2::varchar[], $3::bool[], $4::varchar[], $5::varchar[], $6::bigint[]) ", &project_ids[..], &image_urls[..], &featureds[..], - &titles[..] as &[Option], + &names[..] as &[Option], &descriptions[..] as &[Option], &orderings[..] ) @@ -144,9 +144,9 @@ pub struct ProjectBuilder { pub project_id: ProjectId, pub team_id: TeamId, pub organization_id: Option, - pub title: String, + pub name: String, + pub summary: String, pub description: String, - pub body: String, pub icon_url: Option, pub license_url: Option, pub categories: Vec, @@ -171,10 +171,9 @@ impl ProjectBuilder { id: self.project_id, team_id: self.team_id, organization_id: self.organization_id, - title: self.title, + name: self.name, + summary: self.summary, description: self.description, - body: self.body, - body_url: None, published: Utc::now(), updated: Utc::now(), approved: None, @@ -237,10 +236,9 @@ pub struct Project { pub id: ProjectId, pub team_id: TeamId, pub organization_id: Option, - pub title: String, + pub name: String, + pub summary: String, pub description: String, - pub body: String, - pub body_url: Option, pub published: DateTime, pub updated: DateTime, pub approved: Option>, @@ -269,7 +267,7 @@ impl Project { sqlx::query!( " INSERT INTO mods ( - id, team_id, title, description, body, + id, team_id, name, summary, description, published, downloads, icon_url, status, requested_status, license_url, license, slug, color, monetization_status @@ -283,9 +281,9 @@ impl Project { ", self.id as ProjectId, self.team_id as TeamId, - &self.title, + &self.name, + &self.summary, &self.description, - &self.body, self.published, self.downloads, self.icon_url.as_ref(), @@ -623,7 +621,7 @@ impl Project { SELECT DISTINCT mod_id, JSONB_AGG( DISTINCT jsonb_build_object( - 'image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering + 'image_url', mg.image_url, 'featured', mg.featured, 'name', mg.name, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering ) ) filter (where image_url is not null) mods_gallery_json FROM mods_gallery mg @@ -644,8 +642,8 @@ impl Project { GROUP BY mod_id ) - SELECT m.id id, m.title title, m.description description, m.downloads downloads, m.follows follows, - m.icon_url icon_url, m.body body, m.published published, + SELECT m.id id, m.name name, m.summary summary, m.downloads downloads, m.follows follows, + m.icon_url icon_url, m.description description, m.published published, m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status, m.license_url license_url, m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body, @@ -693,10 +691,9 @@ impl Project { id: ProjectId(id), team_id: TeamId(m.team_id), organization_id: m.organization_id.map(OrganizationId), - title: m.title.clone(), - description: m.description.clone(), + name: m.name.clone(), + summary: m.summary.clone(), downloads: m.downloads, - body_url: None, icon_url: m.icon_url.clone(), published: m.published, updated: m.updated, @@ -709,7 +706,7 @@ impl Project { )), license: m.license.clone(), slug: m.slug.clone(), - body: m.body.clone(), + description: m.description.clone(), follows: m.follows, moderation_message: m.moderation_message, moderation_message_body: m.moderation_message_body, diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index a0a92f70..704d229a 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -15,6 +15,7 @@ pub struct TeamBuilder { pub struct TeamMemberBuilder { pub user_id: UserId, pub role: String, + pub is_owner: bool, pub permissions: ProjectPermissions, pub organization_permissions: Option, pub accepted: bool, @@ -50,6 +51,7 @@ impl TeamBuilder { team_ids, user_ids, roles, + is_owners, permissions, organization_permissions, accepteds, @@ -64,6 +66,7 @@ impl TeamBuilder { Vec<_>, Vec<_>, Vec<_>, + Vec<_>, ) = members .into_iter() .map(|m| { @@ -71,6 +74,7 @@ impl TeamBuilder { team.id.0, m.user_id.0, m.role, + m.is_owner, m.permissions.bits() as i64, m.organization_permissions.map(|p| p.bits() as i64), m.accepted, @@ -81,13 +85,14 @@ impl TeamBuilder { .multiunzip(); sqlx::query!( " - INSERT INTO team_members (id, team_id, user_id, role, permissions, organization_permissions, accepted, payouts_split, ordering) - SELECT * FROM UNNEST ($1::int8[], $2::int8[], $3::int8[], $4::varchar[], $5::int8[], $6::int8[], $7::bool[], $8::numeric[], $9::int8[]) + INSERT INTO team_members (id, team_id, user_id, role, is_owner, permissions, organization_permissions, accepted, payouts_split, ordering) + SELECT * FROM UNNEST ($1::int8[], $2::int8[], $3::int8[], $4::varchar[], $5::bool[], $6::int8[], $7::int8[], $8::bool[], $9::numeric[], $10::int8[]) ", &team_member_ids[..], &team_ids[..], &user_ids[..], &roles[..], + &is_owners[..], &permissions[..], &organization_permissions[..] as &[Option], &accepteds[..], @@ -162,6 +167,7 @@ pub struct TeamMember { /// The ID of the user associated with the member pub user_id: UserId, pub role: String, + pub is_owner: bool, // The permissions of the user in this project team // For an organization team, these are the fallback permissions for any project in the organization @@ -233,7 +239,7 @@ impl TeamMember { if !team_ids_parsed.is_empty() { let teams: Vec = sqlx::query!( " - SELECT id, team_id, role AS member_role, permissions, organization_permissions, + SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions, accepted, payouts_split, ordering, user_id FROM team_members @@ -248,6 +254,7 @@ impl TeamMember { id: TeamMemberId(m.id), team_id: TeamId(m.team_id), role: m.member_role, + is_owner: m.is_owner, permissions: ProjectPermissions::from_bits(m.permissions as u64) .unwrap_or_default(), organization_permissions: m @@ -310,7 +317,7 @@ impl TeamMember { let team_members = sqlx::query!( " - SELECT id, team_id, role AS member_role, permissions, organization_permissions, + SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions, accepted, payouts_split, role, ordering, user_id FROM team_members @@ -328,6 +335,7 @@ impl TeamMember { team_id: TeamId(m.team_id), user_id, role: m.role, + is_owner: m.is_owner, permissions: ProjectPermissions::from_bits(m.permissions as u64) .unwrap_or_default(), organization_permissions: m @@ -362,7 +370,7 @@ impl TeamMember { { let result = sqlx::query!( " - SELECT id, team_id, role AS member_role, permissions, organization_permissions, + SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions, accepted, payouts_split, role, ordering, user_id @@ -382,6 +390,7 @@ impl TeamMember { team_id: id, user_id, role: m.role, + is_owner: m.is_owner, permissions: ProjectPermissions::from_bits(m.permissions as u64) .unwrap_or_default(), organization_permissions: m @@ -431,11 +440,10 @@ impl TeamMember { sqlx::query!( " DELETE FROM team_members - WHERE (team_id = $1 AND user_id = $2 AND NOT role = $3) + WHERE (team_id = $1 AND user_id = $2 AND NOT is_owner = TRUE) ", id as TeamId, user_id as UserId, - crate::models::teams::OWNER_ROLE, ) .execute(&mut **transaction) .await?; @@ -453,6 +461,7 @@ impl TeamMember { new_accepted: Option, new_payouts_split: Option, new_ordering: Option, + new_is_owner: Option, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result<(), super::DatabaseError> { if let Some(permissions) = new_permissions { @@ -546,6 +555,21 @@ impl TeamMember { .await?; } + if let Some(is_owner) = new_is_owner { + sqlx::query!( + " + UPDATE team_members + SET is_owner = $1 + WHERE (team_id = $2 AND user_id = $3) + ", + is_owner, + id as TeamId, + user_id as UserId, + ) + .execute(&mut **transaction) + .await?; + } + Ok(()) } @@ -559,7 +583,7 @@ impl TeamMember { { let result = sqlx::query!( " - SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering + SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 AND accepted = TRUE WHERE m.id = $1 @@ -576,6 +600,7 @@ impl TeamMember { team_id: TeamId(m.team_id), user_id, role: m.role, + is_owner: m.is_owner, permissions: ProjectPermissions::from_bits(m.permissions as u64) .unwrap_or_default(), organization_permissions: m @@ -600,7 +625,7 @@ impl TeamMember { { let result = sqlx::query!( " - SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering + SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering FROM organizations o INNER JOIN team_members tm ON tm.team_id = o.team_id AND user_id = $2 AND accepted = TRUE WHERE o.id = $1 @@ -617,6 +642,7 @@ impl TeamMember { team_id: TeamId(m.team_id), user_id, role: m.role, + is_owner: m.is_owner, permissions: ProjectPermissions::from_bits(m.permissions as u64) .unwrap_or_default(), organization_permissions: m @@ -641,7 +667,7 @@ impl TeamMember { { let result = sqlx::query!( " - SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id + SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id FROM versions v INNER JOIN mods m ON m.id = v.mod_id INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 AND tm.accepted = TRUE @@ -659,6 +685,7 @@ impl TeamMember { team_id: TeamId(m.team_id), user_id, role: m.role, + is_owner: m.is_owner, permissions: ProjectPermissions::from_bits(m.permissions as u64) .unwrap_or_default(), organization_permissions: m diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 6df542f6..799fd027 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -450,10 +450,9 @@ impl User { " SELECT m.id FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id - WHERE tm.user_id = $1 AND tm.role = $2 + WHERE tm.user_id = $1 AND tm.is_owner = TRUE ", id as UserId, - crate::models::teams::OWNER_ROLE ) .fetch_many(&mut **transaction) .try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) }) @@ -470,11 +469,10 @@ impl User { " UPDATE team_members SET user_id = $1 - WHERE (user_id = $2 AND role = $3) + WHERE (user_id = $2 AND is_owner = TRUE) ", deleted_user as UserId, id as UserId, - crate::models::teams::OWNER_ROLE ) .execute(&mut **transaction) .await?; diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index 96f9fe92..7d9bd9e3 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -209,7 +209,6 @@ impl VersionBuilder { name: self.name, version_number: self.version_number, changelog: self.changelog, - changelog_url: None, date_published: Utc::now(), downloads: 0, featured: self.featured, @@ -293,7 +292,6 @@ pub struct Version { pub name: String, pub version_number: String, pub changelog: String, - pub changelog_url: Option, pub date_published: DateTime, pub downloads: i32, pub version_type: String, @@ -644,7 +642,6 @@ impl Version { name: v.version_name, version_number: v.version_number, changelog: v.changelog, - changelog_url: None, date_published: v.date_published, downloads: v.downloads, version_type: v.version_type, @@ -1015,7 +1012,6 @@ mod tests { name: Default::default(), version_number: Default::default(), changelog: Default::default(), - changelog_url: Default::default(), downloads: Default::default(), version_type: Default::default(), featured: Default::default(), diff --git a/src/models/v2/mod.rs b/src/models/v2/mod.rs index 291432b9..58ea73ba 100644 --- a/src/models/v2/mod.rs +++ b/src/models/v2/mod.rs @@ -1,3 +1,5 @@ // Legacy models from V2, where its useful to keep the struct for rerouting/conversion +pub mod notifications; pub mod projects; pub mod search; +pub mod teams; diff --git a/src/models/v2/notifications.rs b/src/models/v2/notifications.rs new file mode 100644 index 00000000..a81a5b81 --- /dev/null +++ b/src/models/v2/notifications.rs @@ -0,0 +1,183 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use crate::models::{ + ids::{ + NotificationId, OrganizationId, ProjectId, ReportId, TeamId, ThreadId, ThreadMessageId, + UserId, VersionId, + }, + notifications::{Notification, NotificationAction, NotificationBody}, + projects::ProjectStatus, +}; + +#[derive(Serialize, Deserialize)] +pub struct LegacyNotification { + pub id: NotificationId, + pub user_id: UserId, + pub read: bool, + pub created: DateTime, + pub body: LegacyNotificationBody, + + // DEPRECATED: use body field instead + #[serde(rename = "type")] + pub type_: Option, + pub title: String, + pub text: String, + pub link: String, + pub actions: Vec, +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct LegacyNotificationAction { + pub title: String, + /// The route to call when this notification action is called. Formatted HTTP Method, route + pub action_route: (String, String), +} + +#[derive(Serialize, Deserialize)] +pub enum LegacyNotificationBody { + ProjectUpdate { + project_id: ProjectId, + version_id: VersionId, + }, + TeamInvite { + project_id: ProjectId, + team_id: TeamId, + invited_by: UserId, + role: String, + }, + OrganizationInvite { + organization_id: OrganizationId, + invited_by: UserId, + team_id: TeamId, + role: String, + }, + StatusChange { + project_id: ProjectId, + old_status: ProjectStatus, + new_status: ProjectStatus, + }, + ModeratorMessage { + thread_id: ThreadId, + message_id: ThreadMessageId, + + project_id: Option, + report_id: Option, + }, + LegacyMarkdown { + notification_type: Option, + title: String, + text: String, + link: String, + actions: Vec, + }, + Unknown, +} + +impl LegacyNotification { + pub fn from(notification: Notification) -> Self { + let type_ = match ¬ification.body { + NotificationBody::ProjectUpdate { .. } => Some("project_update".to_string()), + NotificationBody::TeamInvite { .. } => Some("team_invite".to_string()), + NotificationBody::OrganizationInvite { .. } => Some("organization_invite".to_string()), + NotificationBody::StatusChange { .. } => Some("status_change".to_string()), + NotificationBody::ModeratorMessage { .. } => Some("moderator_message".to_string()), + NotificationBody::LegacyMarkdown { + notification_type, .. + } => notification_type.clone(), + NotificationBody::Unknown => None, + }; + + let legacy_body = match notification.body { + NotificationBody::ProjectUpdate { + project_id, + version_id, + } => LegacyNotificationBody::ProjectUpdate { + project_id, + version_id, + }, + NotificationBody::TeamInvite { + project_id, + team_id, + invited_by, + role, + } => LegacyNotificationBody::TeamInvite { + project_id, + team_id, + invited_by, + role, + }, + NotificationBody::OrganizationInvite { + organization_id, + invited_by, + team_id, + role, + } => LegacyNotificationBody::OrganizationInvite { + organization_id, + invited_by, + team_id, + role, + }, + NotificationBody::StatusChange { + project_id, + old_status, + new_status, + } => LegacyNotificationBody::StatusChange { + project_id, + old_status, + new_status, + }, + NotificationBody::ModeratorMessage { + thread_id, + message_id, + project_id, + report_id, + } => LegacyNotificationBody::ModeratorMessage { + thread_id, + message_id, + project_id, + report_id, + }, + NotificationBody::LegacyMarkdown { + notification_type, + name, + text, + link, + actions, + } => LegacyNotificationBody::LegacyMarkdown { + notification_type, + title: name, + text, + link, + actions, + }, + NotificationBody::Unknown => LegacyNotificationBody::Unknown, + }; + + Self { + id: notification.id, + user_id: notification.user_id, + read: notification.read, + created: notification.created, + body: legacy_body, + type_, + title: notification.name, + text: notification.text, + link: notification.link, + actions: notification + .actions + .into_iter() + .map(LegacyNotificationAction::from) + .collect(), + } + } +} + +impl LegacyNotificationAction { + pub fn from(notification_action: NotificationAction) -> Self { + Self { + title: notification_action.name, + action_route: notification_action.action_route, + } + } +} diff --git a/src/models/v2/projects.rs b/src/models/v2/projects.rs index 456c2aef..38409796 100644 --- a/src/models/v2/projects.rs +++ b/src/models/v2/projects.rs @@ -10,7 +10,7 @@ use crate::database::models::{version_item, DatabaseError}; use crate::database::redis::RedisPool; use crate::models::ids::{ProjectId, VersionId}; use crate::models::projects::{ - Dependency, GalleryItem, License, Link, Loader, ModeratorMessage, MonetizationStatus, Project, + Dependency, License, Link, Loader, ModeratorMessage, MonetizationStatus, Project, ProjectStatus, Version, VersionFile, VersionStatus, VersionType, }; use crate::models::threads::ThreadId; @@ -63,7 +63,7 @@ pub struct LegacyProject { pub wiki_url: Option, pub discord_url: Option, pub donation_urls: Option>, - pub gallery: Vec, + pub gallery: Vec, pub color: Option, pub thread_id: ThreadId, pub monetization_status: MonetizationStatus, @@ -151,12 +151,12 @@ impl LegacyProject { id: data.id, slug: data.slug, project_type, - team: data.team, + team: data.team_id, organization: data.organization, - title: data.title, - description: data.description, - body: data.body, - body_url: data.body_url, + title: data.name, + description: data.summary, // V2 description is V3 summary + body: data.description, // V2 body is V3 description + body_url: None, // Always None even in V2 published: data.published, updated: data.updated, approved: data.approved, @@ -177,7 +177,11 @@ impl LegacyProject { wiki_url, discord_url, donation_urls, - gallery: data.gallery, + gallery: data + .gallery + .into_iter() + .map(LegacyGalleryItem::from) + .collect(), color: data.color, thread_id: data.thread_id, monetization_status: data.monetization_status, @@ -317,7 +321,7 @@ impl From for LegacyVersion { name: data.name, version_number: data.version_number, changelog: data.changelog, - changelog_url: data.changelog_url, + changelog_url: None, // Always None even in V2 date_published: data.date_published, downloads: data.downloads, version_type: data.version_type, @@ -332,6 +336,29 @@ impl From for LegacyVersion { } } +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct LegacyGalleryItem { + pub url: String, + pub featured: bool, + pub name: Option, + pub description: Option, + pub created: DateTime, + pub ordering: i64, +} + +impl LegacyGalleryItem { + fn from(data: crate::models::projects::GalleryItem) -> Self { + Self { + url: data.url, + featured: data.featured, + name: data.name, + description: data.description, + created: data.created, + ordering: data.ordering, + } + } +} + #[derive(Serialize, Deserialize, Validate, Clone, Eq, PartialEq)] pub struct DonationLink { pub id: String, diff --git a/src/models/v2/search.rs b/src/models/v2/search.rs index 3c922f68..5f11ec43 100644 --- a/src/models/v2/search.rs +++ b/src/models/v2/search.rs @@ -109,7 +109,7 @@ impl LegacyResultSearchProject { project_id: result_search_project.project_id, slug: result_search_project.slug, author: result_search_project.author, - title: result_search_project.title, + title: result_search_project.name, description: result_search_project.description, display_categories, downloads: result_search_project.downloads, diff --git a/src/models/v2/teams.rs b/src/models/v2/teams.rs new file mode 100644 index 00000000..f265b770 --- /dev/null +++ b/src/models/v2/teams.rs @@ -0,0 +1,41 @@ +use rust_decimal::Decimal; +use serde::{Deserialize, Serialize}; + +use crate::models::{ + ids::TeamId, + teams::{ProjectPermissions, TeamMember}, + users::User, +}; + +/// A member of a team +#[derive(Serialize, Deserialize, Clone)] +pub struct LegacyTeamMember { + pub role: String, + // is_owner removed, and role hardcoded to Owner if true, + pub team_id: TeamId, + pub user: User, + pub permissions: Option, + pub accepted: bool, + + #[serde(with = "rust_decimal::serde::float_option")] + pub payouts_split: Option, + pub ordering: i64, +} + +impl LegacyTeamMember { + pub fn from(team_member: TeamMember) -> Self { + LegacyTeamMember { + role: match (team_member.is_owner, team_member.role.as_str()) { + (true, _) => "Owner".to_string(), + (false, "Owner") => "Member".to_string(), // The odd case of a non-owner with the owner role should show as 'Member' + (false, role) => role.to_string(), + }, + team_id: team_member.team_id, + user: team_member.user, + permissions: team_member.permissions, + accepted: team_member.accepted, + payouts_split: team_member.payouts_split, + ordering: team_member.ordering, + } + } +} diff --git a/src/models/v3/collections.rs b/src/models/v3/collections.rs index 63188183..77c74d30 100644 --- a/src/models/v3/collections.rs +++ b/src/models/v3/collections.rs @@ -20,7 +20,7 @@ pub struct Collection { /// The person that has ownership of this collection. pub user: UserId, /// The title or name of the collection. - pub title: String, + pub name: String, /// A short description of the collection. pub description: String, @@ -48,7 +48,7 @@ impl From for Collection { id: c.id.into(), user: c.user_id.into(), created: c.created, - title: c.title, + name: c.name, description: c.description, updated: c.updated, projects: c.projects.into_iter().map(|x| x.into()).collect(), diff --git a/src/models/v3/notifications.rs b/src/models/v3/notifications.rs index eda1c05c..4d3f6ccd 100644 --- a/src/models/v3/notifications.rs +++ b/src/models/v3/notifications.rs @@ -21,10 +21,7 @@ pub struct Notification { pub created: DateTime, pub body: NotificationBody, - // DEPRECATED: use body field instead - #[serde(rename = "type")] - pub type_: Option, - pub title: String, + pub name: String, pub text: String, pub link: String, pub actions: Vec, @@ -63,7 +60,7 @@ pub enum NotificationBody { }, LegacyMarkdown { notification_type: Option, - title: String, + name: String, text: String, link: String, actions: Vec, @@ -73,13 +70,12 @@ pub enum NotificationBody { impl From for Notification { fn from(notif: DBNotification) -> Self { - let (type_, title, text, link, actions) = { + let (name, text, link, actions) = { match ¬if.body { NotificationBody::ProjectUpdate { project_id, version_id, } => ( - Some("project_update".to_string()), "A project you follow has been updated!".to_string(), format!( "The project {} has released a new version: {}", @@ -94,17 +90,16 @@ impl From for Notification { team_id, .. } => ( - Some("team_invite".to_string()), "You have been invited to join a team!".to_string(), format!("An invite has been sent for you to be {} of a team", role), format!("/project/{}", project_id), vec![ NotificationAction { - title: "Accept".to_string(), + name: "Accept".to_string(), action_route: ("POST".to_string(), format!("team/{team_id}/join")), }, NotificationAction { - title: "Deny".to_string(), + name: "Deny".to_string(), action_route: ( "DELETE".to_string(), format!("team/{team_id}/members/{}", UserId::from(notif.user_id)), @@ -118,7 +113,6 @@ impl From for Notification { team_id, .. } => ( - Some("organization_invite".to_string()), "You have been invited to join an organization!".to_string(), format!( "An invite has been sent for you to be {} of an organization", @@ -127,11 +121,11 @@ impl From for Notification { format!("/organization/{}", organization_id), vec![ NotificationAction { - title: "Accept".to_string(), + name: "Accept".to_string(), action_route: ("POST".to_string(), format!("team/{team_id}/join")), }, NotificationAction { - title: "Deny".to_string(), + name: "Deny".to_string(), action_route: ( "DELETE".to_string(), format!( @@ -147,7 +141,6 @@ impl From for Notification { new_status, project_id, } => ( - Some("status_change".to_string()), "Project status has changed".to_string(), format!( "Status has changed from {} to {}", @@ -162,7 +155,6 @@ impl From for Notification { report_id, .. } => ( - Some("moderator_message".to_string()), "A moderator has sent you a message!".to_string(), "Click on the link to read more.".to_string(), if let Some(project_id) = project_id { @@ -175,25 +167,20 @@ impl From for Notification { vec![], ), NotificationBody::LegacyMarkdown { - notification_type, - title, + name, text, link, actions, + .. } => ( - notification_type.clone(), - title.clone(), + name.clone(), text.clone(), link.clone(), actions.clone().into_iter().map(Into::into).collect(), ), - NotificationBody::Unknown => ( - None, - "".to_string(), - "".to_string(), - "#".to_string(), - vec![], - ), + NotificationBody::Unknown => { + ("".to_string(), "".to_string(), "#".to_string(), vec![]) + } } }; @@ -204,9 +191,7 @@ impl From for Notification { read: notif.read, created: notif.created, - // DEPRECATED - type_, - title, + name, text, link, actions, @@ -216,7 +201,7 @@ impl From for Notification { #[derive(Serialize, Deserialize, Clone)] pub struct NotificationAction { - pub title: String, + pub name: String, /// The route to call when this notification action is called. Formatted HTTP Method, route pub action_route: (String, String), } @@ -224,7 +209,7 @@ pub struct NotificationAction { impl From for NotificationAction { fn from(act: DBNotificationAction) -> Self { Self { - title: act.title, + name: act.name, action_route: (act.action_route_method, act.action_route), } } diff --git a/src/models/v3/organizations.rs b/src/models/v3/organizations.rs index 6163ddee..984be233 100644 --- a/src/models/v3/organizations.rs +++ b/src/models/v3/organizations.rs @@ -16,7 +16,7 @@ pub struct Organization { /// The id of the organization pub id: OrganizationId, /// The title (and slug) of the organization - pub title: String, + pub name: String, /// The associated team of the organization pub team_id: TeamId, /// The description of the organization @@ -38,7 +38,7 @@ impl Organization { ) -> Self { Self { id: data.id.into(), - title: data.title, + name: data.name, team_id: data.team_id.into(), description: data.description, members: team_members, diff --git a/src/models/v3/projects.rs b/src/models/v3/projects.rs index ed01dbfd..b9bfe50f 100644 --- a/src/models/v3/projects.rs +++ b/src/models/v3/projects.rs @@ -34,17 +34,15 @@ pub struct Project { /// The aggregated games of the versions of this project pub games: Vec, /// The team of people that has ownership of this project. - pub team: TeamId, + pub team_id: TeamId, /// The optional organization of people that have ownership of this project. pub organization: Option, /// The title or name of the project. - pub title: String, + pub name: String, /// A short description of the project. - pub description: String, + pub summary: String, /// A long form description of the project. - pub body: String, - /// The link to the long description of the project. Deprecated, always None - pub body_url: Option, + pub description: String, /// The date at which the project was first published. pub published: DateTime, @@ -151,12 +149,11 @@ impl From for Project { slug: m.slug, project_types: data.project_types, games: data.games, - team: m.team_id.into(), + team_id: m.team_id.into(), organization: m.organization_id.map(|i| i.into()), - title: m.title, + name: m.name, + summary: m.summary, description: m.description, - body: m.body, - body_url: None, published: m.published, updated: m.updated, approved: m.approved, @@ -210,7 +207,7 @@ impl From for Project { .map(|x| GalleryItem { url: x.image_url, featured: x.featured, - title: x.title, + name: x.name, description: x.description, created: x.created, ordering: x.ordering, @@ -228,7 +225,7 @@ impl From for Project { pub struct GalleryItem { pub url: String, pub featured: bool, - pub title: Option, + pub name: Option, pub description: Option, pub created: DateTime, pub ordering: i64, @@ -464,8 +461,6 @@ pub struct Version { pub games: Vec, /// The changelog for this version of the project. pub changelog: String, - /// A link to the changelog for this version of the project. Deprecated, always None - pub changelog_url: Option, /// The date that this version was published. pub date_published: DateTime, @@ -517,7 +512,6 @@ impl From for Version { project_types: data.project_types, games: data.games, changelog: v.changelog, - changelog_url: None, date_published: v.date_published, downloads: v.downloads as u32, version_type: match v.version_type.as_str() { diff --git a/src/models/v3/teams.rs b/src/models/v3/teams.rs index ad3c6e70..814705e1 100644 --- a/src/models/v3/teams.rs +++ b/src/models/v3/teams.rs @@ -135,6 +135,8 @@ pub struct TeamMember { pub user: User, /// The role of the user in the team pub role: String, + /// Is the user the owner of the team? + pub is_owner: bool, /// A bitset containing the user's permissions in this team. /// In an organization-controlled project, these are the unique overriding permissions for the user's role for any project in the organization, if they exist. /// In an organization, these are the default project permissions for any project in the organization. @@ -178,6 +180,7 @@ impl TeamMember { team_id: data.team_id.into(), user, role: data.role, + is_owner: data.is_owner, permissions: if override_permissions { None } else { diff --git a/src/routes/maven.rs b/src/routes/maven.rs index 6ef35c94..cb8fd45f 100644 --- a/src/routes/maven.rs +++ b/src/routes/maven.rs @@ -80,7 +80,7 @@ pub async fn maven_metadata( ) -> Result { let project_id = params.into_inner().0; let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); }; let user_option = get_user_from_headers( @@ -95,7 +95,7 @@ pub async fn maven_metadata( .ok(); if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let version_names = sqlx::query!( @@ -274,7 +274,7 @@ pub async fn version_file( ) -> Result { let (project_id, vnum, file) = params.into_inner(); let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); }; let user_option = get_user_from_headers( @@ -289,15 +289,15 @@ pub async fn version_file( .ok(); if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); }; if !is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } if file == format!("{}-{}.pom", &project_id, &vnum) { @@ -310,7 +310,7 @@ pub async fn version_file( group_id: "maven.modrinth".to_string(), artifact_id: project_id, version: vnum, - name: project.inner.title, + name: project.inner.name, description: project.inner.description, }; return Ok(HttpResponse::Ok() @@ -322,7 +322,7 @@ pub async fn version_file( .body("")); } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } #[get("maven/modrinth/{id}/{versionnum}/{file}.sha1")] @@ -335,7 +335,7 @@ pub async fn version_file_sha1( ) -> Result { let (project_id, vnum, file) = params.into_inner(); let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); }; let user_option = get_user_from_headers( @@ -350,15 +350,15 @@ pub async fn version_file_sha1( .ok(); if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); }; if !is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } Ok(find_file(&project_id, &vnum, &version, &file) @@ -377,7 +377,7 @@ pub async fn version_file_sha512( ) -> Result { let (project_id, vnum, file) = params.into_inner(); let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); }; let user_option = get_user_from_headers( @@ -392,15 +392,15 @@ pub async fn version_file_sha512( .ok(); if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); }; if !is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } Ok(find_file(&project_id, &vnum, &version, &file) diff --git a/src/routes/mod.rs b/src/routes/mod.rs index c68ccfbb..917e015a 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -123,6 +123,8 @@ pub enum ApiError { Mail(#[from] crate::auth::email::MailError), #[error("Error while rerouting request: {0}")] Reroute(#[from] reqwest::Error), + #[error("Resource not found")] + NotFound, } impl actix_web::ResponseError for ApiError { @@ -150,6 +152,7 @@ impl actix_web::ResponseError for ApiError { ApiError::PasswordStrengthCheck(..) => StatusCode::BAD_REQUEST, ApiError::Mail(..) => StatusCode::INTERNAL_SERVER_ERROR, ApiError::Reroute(..) => StatusCode::INTERNAL_SERVER_ERROR, + ApiError::NotFound => StatusCode::NOT_FOUND, } } @@ -178,6 +181,7 @@ impl actix_web::ResponseError for ApiError { ApiError::Mail(..) => "mail_error", ApiError::Clickhouse(..) => "clickhouse_error", ApiError::Reroute(..) => "reroute_error", + ApiError::NotFound => "not_found", }, description: &self.to_string(), }) diff --git a/src/routes/v2/analytics_get.rs b/src/routes/v2/analytics_get.rs new file mode 100644 index 00000000..842abb9d --- /dev/null +++ b/src/routes/v2/analytics_get.rs @@ -0,0 +1,272 @@ +use super::ApiError; +use crate::database::redis::RedisPool; +use crate::routes::{v2_reroute, v3}; +use crate::{models::ids::VersionId, queue::session::AuthQueue}; +use actix_web::{get, web, HttpRequest, HttpResponse}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use std::collections::HashMap; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("analytics") + .service(playtimes_get) + .service(views_get) + .service(downloads_get) + .service(revenue_get) + .service(countries_downloads_get) + .service(countries_views_get), + ); +} + +/// The json data to be passed to fetch analytic data +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively +/// start_date and end_date are inclusive +/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day) +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct GetData { + // only one of project_ids or version_ids should be used + // if neither are provided, all projects the user has access to will be used + pub project_ids: Option, + pub version_ids: Option, + + pub start_date: Option>, // defaults to 2 weeks ago + pub end_date: Option>, // defaults to now + + pub resolution_minutes: Option, // defaults to 1 day. Ignored in routes that do not aggregate over a resolution (eg: /countries) +} + +/// Get playtime data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of days to playtime data +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 23 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +#[derive(Serialize, Deserialize, Clone)] +pub struct FetchedPlaytime { + pub time: u64, + pub total_seconds: u64, + pub loader_seconds: HashMap, + pub game_version_seconds: HashMap, + pub parent_seconds: HashMap, +} +#[get("playtime")] +pub async fn playtimes_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let data = data.into_inner(); + v3::analytics_get::playtimes_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, + ) + .await +} + +/// Get view data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of days to views +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 1090 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +#[get("views")] +pub async fn views_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let data = data.into_inner(); + v3::analytics_get::views_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, + ) + .await + .or_else(v2_reroute::flatten_404_error) +} + +/// Get download data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 32 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +#[get("downloads")] +pub async fn downloads_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let data = data.into_inner(); + v3::analytics_get::downloads_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, + ) + .await + .or_else(v2_reroute::flatten_404_error) +} + +/// Get payout data for a set of projects +/// Data is returned as a hashmap of project ids to a hashmap of days to amount earned per day +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 0.001 +/// } +///} +/// ONLY project IDs can be used. Unauthorized projects will be filtered out. +#[get("revenue")] +pub async fn revenue_get( + req: HttpRequest, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let data = data.into_inner(); + v3::analytics_get::revenue_get( + req, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: None, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, + ) + .await + .or_else(v2_reroute::flatten_404_error) +} + +/// Get country data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads. +/// Unknown countries are labeled "". +/// This is usuable to see significant performing countries per project +/// eg: +/// { +/// "4N1tEhnO": { +/// "CAN": 22 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch +#[get("countries/downloads")] +pub async fn countries_downloads_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let data = data.into_inner(); + v3::analytics_get::countries_downloads_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, + ) + .await + .or_else(v2_reroute::flatten_404_error) +} + +/// Get country data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to views. +/// Unknown countries are labeled "". +/// This is usuable to see significant performing countries per project +/// eg: +/// { +/// "4N1tEhnO": { +/// "CAN": 56165 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch +#[get("countries/views")] +pub async fn countries_views_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let data = data.into_inner(); + v3::analytics_get::countries_views_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, + ) + .await + .or_else(v2_reroute::flatten_404_error) +} diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs new file mode 100644 index 00000000..15be66ef --- /dev/null +++ b/src/routes/v2/collections.rs @@ -0,0 +1,191 @@ +use crate::database::redis::RedisPool; +use crate::file_hosting::FileHost; +use crate::models::collections::CollectionStatus; +use crate::queue::session::AuthQueue; +use crate::routes::v3::project_creation::CreateError; +use crate::routes::{v3, ApiError}; +use actix_web::web::Data; +use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use std::sync::Arc; +use validator::Validate; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service(collections_get); + cfg.service(collection_create); + cfg.service( + web::scope("collection") + .service(collection_get) + .service(collection_delete) + .service(collection_edit) + .service(collection_icon_edit) + .service(delete_collection_icon), + ); +} + +#[derive(Serialize, Deserialize, Validate, Clone)] +pub struct CollectionCreateData { + #[validate( + length(min = 3, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + /// The title or name of the project. + pub title: String, + #[validate(length(min = 3, max = 255))] + /// A short description of the collection. + pub description: String, + #[validate(length(max = 32))] + #[serde(default = "Vec::new")] + /// A list of initial projects to use with the created collection + pub projects: Vec, +} + +#[post("collection")] +pub async fn collection_create( + req: HttpRequest, + collection_create_data: web::Json, + client: Data, + redis: Data, + session_queue: Data, +) -> Result { + let collection_create_data = collection_create_data.into_inner(); + v3::collections::collection_create( + req, + web::Json(v3::collections::CollectionCreateData { + name: collection_create_data.title, + description: collection_create_data.description, + projects: collection_create_data.projects, + }), + client, + redis, + session_queue, + ) + .await +} + +#[derive(Serialize, Deserialize)] +pub struct CollectionIds { + pub ids: String, +} +#[get("collections")] +pub async fn collections_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + v3::collections::collections_get( + req, + web::Query(v3::collections::CollectionIds { ids: ids.ids }), + pool, + redis, + session_queue, + ) + .await +} + +#[get("{id}")] +pub async fn collection_get( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + v3::collections::collection_get(req, info, pool, redis, session_queue).await +} + +#[derive(Deserialize, Validate)] +pub struct EditCollection { + #[validate( + length(min = 3, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + pub title: Option, + #[validate(length(min = 3, max = 256))] + pub description: Option, + pub status: Option, + #[validate(length(max = 64))] + pub new_projects: Option>, +} + +#[patch("{id}")] +pub async fn collection_edit( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + new_collection: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let new_collection = new_collection.into_inner(); + v3::collections::collection_edit( + req, + info, + pool, + web::Json(v3::collections::EditCollection { + name: new_collection.title, + description: new_collection.description, + status: new_collection.status, + new_projects: new_collection.new_projects, + }), + redis, + session_queue, + ) + .await +} + +#[derive(Serialize, Deserialize)] +pub struct Extension { + pub ext: String, +} + +#[patch("{id}/icon")] +#[allow(clippy::too_many_arguments)] +pub async fn collection_icon_edit( + web::Query(ext): web::Query, + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + payload: web::Payload, + session_queue: web::Data, +) -> Result { + v3::collections::collection_icon_edit( + web::Query(v3::collections::Extension { ext: ext.ext }), + req, + info, + pool, + redis, + file_host, + payload, + session_queue, + ) + .await +} + +#[delete("{id}/icon")] +pub async fn delete_collection_icon( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + session_queue: web::Data, +) -> Result { + v3::collections::delete_collection_icon(req, info, pool, redis, file_host, session_queue).await +} + +#[delete("{id}")] +pub async fn collection_delete( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + v3::collections::collection_delete(req, info, pool, redis, session_queue).await +} diff --git a/src/routes/v2/moderation.rs b/src/routes/v2/moderation.rs index f44214de..b9ac981a 100644 --- a/src/routes/v2/moderation.rs +++ b/src/routes/v2/moderation.rs @@ -1,7 +1,7 @@ use super::ApiError; -use crate::database::redis::RedisPool; use crate::queue::session::AuthQueue; use crate::routes::v3; +use crate::{database::redis::RedisPool, routes::v2_reroute}; use actix_web::{get, web, HttpRequest, HttpResponse}; use serde::Deserialize; use sqlx::PgPool; @@ -36,4 +36,5 @@ pub async fn get_projects( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2/notifications.rs b/src/routes/v2/notifications.rs index af04cafb..ab58ee1a 100644 --- a/src/routes/v2/notifications.rs +++ b/src/routes/v2/notifications.rs @@ -1,6 +1,9 @@ use crate::database::redis::RedisPool; use crate::models::ids::NotificationId; +use crate::models::notifications::Notification; +use crate::models::v2::notifications::LegacyNotification; use crate::queue::session::AuthQueue; +use crate::routes::v2_reroute; use crate::routes::v3; use crate::routes::ApiError; use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse}; @@ -33,7 +36,7 @@ pub async fn notifications_get( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::notifications::notifications_get( + let resp = v3::notifications::notifications_get( req, web::Query(v3::notifications::NotificationIds { ids: ids.ids }), pool, @@ -41,6 +44,17 @@ pub async fn notifications_get( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error); + match v2_reroute::extract_ok_json::>(resp?).await { + Ok(notifications) => { + let notifications: Vec = notifications + .into_iter() + .map(LegacyNotification::from) + .collect(); + Ok(HttpResponse::Ok().json(notifications)) + } + Err(response) => Ok(response), + } } #[get("{id}")] @@ -51,7 +65,16 @@ pub async fn notification_get( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::notifications::notification_get(req, info, pool, redis, session_queue).await + let response = v3::notifications::notification_get(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; + match v2_reroute::extract_ok_json::(response).await { + Ok(notification) => { + let notification = LegacyNotification::from(notification); + Ok(HttpResponse::Ok().json(notification)) + } + Err(response) => Ok(response), + } } #[patch("{id}")] @@ -62,7 +85,9 @@ pub async fn notification_read( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::notifications::notification_read(req, info, pool, redis, session_queue).await + v3::notifications::notification_read(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[delete("{id}")] @@ -73,7 +98,9 @@ pub async fn notification_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::notifications::notification_delete(req, info, pool, redis, session_queue).await + v3::notifications::notification_delete(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[patch("notifications")] @@ -92,6 +119,7 @@ pub async fn notifications_read( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[delete("notifications")] @@ -110,4 +138,5 @@ pub async fn notifications_delete( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2/organizations.rs b/src/routes/v2/organizations.rs new file mode 100644 index 00000000..29f1f467 --- /dev/null +++ b/src/routes/v2/organizations.rs @@ -0,0 +1,265 @@ +use crate::database::redis::RedisPool; +use crate::file_hosting::FileHost; +use crate::models::projects::Project; +use crate::models::v2::projects::LegacyProject; +use crate::queue::session::AuthQueue; +use crate::routes::v3::project_creation::CreateError; +use crate::routes::{v2_reroute, v3, ApiError}; +use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use std::sync::Arc; +use validator::Validate; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service(organizations_get).service(organization_create); + cfg.service( + web::scope("organization") + .service(organization_get) + .service(organizations_edit) + .service(organization_delete) + .service(organization_projects_get) + .service(organization_projects_add) + .service(organization_projects_remove) + .service(organization_icon_edit) + .service(delete_organization_icon) + .service(super::teams::team_members_get_organization), + ); +} + +#[derive(Deserialize, Validate)] +pub struct NewOrganization { + #[validate( + length(min = 3, max = 64), + regex = "crate::util::validate::RE_URL_SAFE" + )] + // Title of the organization, also used as slug + pub title: String, + #[validate(length(min = 3, max = 256))] + pub description: String, +} + +#[post("organization")] +pub async fn organization_create( + req: HttpRequest, + new_organization: web::Json, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let new_organization = new_organization.into_inner(); + v3::organizations::organization_create( + req, + web::Json(v3::organizations::NewOrganization { + name: new_organization.title, + description: new_organization.description, + }), + pool.clone(), + redis.clone(), + session_queue, + ) + .await +} + +#[get("{id}")] +pub async fn organization_get( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + v3::organizations::organization_get(req, info, pool.clone(), redis.clone(), session_queue).await +} + +#[derive(Deserialize)] +pub struct OrganizationIds { + pub ids: String, +} +#[get("organizations")] +pub async fn organizations_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + v3::organizations::organizations_get( + req, + web::Query(v3::organizations::OrganizationIds { ids: ids.ids }), + pool, + redis, + session_queue, + ) + .await +} + +#[derive(Serialize, Deserialize, Validate)] +pub struct OrganizationEdit { + #[validate(length(min = 3, max = 256))] + pub description: Option, + #[validate( + length(min = 3, max = 64), + regex = "crate::util::validate::RE_URL_SAFE" + )] + // Title of the organization, also used as slug + pub title: Option, +} + +#[patch("{id}")] +pub async fn organizations_edit( + req: HttpRequest, + info: web::Path<(String,)>, + new_organization: web::Json, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let new_organization = new_organization.into_inner(); + v3::organizations::organizations_edit( + req, + info, + web::Json(v3::organizations::OrganizationEdit { + description: new_organization.description, + name: new_organization.title, + }), + pool.clone(), + redis.clone(), + session_queue, + ) + .await +} + +#[delete("{id}")] +pub async fn organization_delete( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + v3::organizations::organization_delete(req, info, pool.clone(), redis.clone(), session_queue) + .await +} + +#[get("{id}/projects")] +pub async fn organization_projects_get( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let response = v3::organizations::organization_projects_get( + req, + info, + pool.clone(), + redis.clone(), + session_queue, + ) + .await?; + + // Convert v3 projects to v2 + match v2_reroute::extract_ok_json::>(response).await { + Ok(project) => { + let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?; + Ok(HttpResponse::Ok().json(legacy_projects)) + } + Err(response) => Ok(response), + } +} + +#[derive(Deserialize)] +pub struct OrganizationProjectAdd { + pub project_id: String, // Also allow title/slug +} +#[post("{id}/projects")] +pub async fn organization_projects_add( + req: HttpRequest, + info: web::Path<(String,)>, + project_info: web::Json, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let project_info = project_info.into_inner(); + v3::organizations::organization_projects_add( + req, + info, + web::Json(v3::organizations::OrganizationProjectAdd { + project_id: project_info.project_id, + }), + pool.clone(), + redis.clone(), + session_queue, + ) + .await +} + +#[delete("{organization_id}/projects/{project_id}")] +pub async fn organization_projects_remove( + req: HttpRequest, + info: web::Path<(String, String)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + v3::organizations::organization_projects_remove( + req, + info, + pool.clone(), + redis.clone(), + session_queue, + ) + .await +} + +#[derive(Serialize, Deserialize)] +pub struct Extension { + pub ext: String, +} + +#[patch("{id}/icon")] +#[allow(clippy::too_many_arguments)] +pub async fn organization_icon_edit( + web::Query(ext): web::Query, + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + payload: web::Payload, + session_queue: web::Data, +) -> Result { + v3::organizations::organization_icon_edit( + web::Query(v3::organizations::Extension { ext: ext.ext }), + req, + info, + pool.clone(), + redis.clone(), + file_host, + payload, + session_queue, + ) + .await +} + +#[delete("{id}/icon")] +pub async fn delete_organization_icon( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + session_queue: web::Data, +) -> Result { + v3::organizations::delete_organization_icon( + req, + info, + pool.clone(), + redis.clone(), + file_host, + session_queue, + ) + .await +} diff --git a/src/routes/v2/project_creation.rs b/src/routes/v2/project_creation.rs index e56d5159..7e1c5053 100644 --- a/src/routes/v2/project_creation.rs +++ b/src/routes/v2/project_creation.rs @@ -213,10 +213,10 @@ pub async fn project_create( } Ok(v3::project_creation::ProjectCreateData { - title: legacy_create.title, + name: legacy_create.title, slug: legacy_create.slug, - description: legacy_create.description, - body: legacy_create.body, + summary: legacy_create.description, // Description becomes summary + description: legacy_create.body, // Body becomes description initial_versions, categories: legacy_create.categories, additional_categories: legacy_create.additional_categories, diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 48895e3a..80c43c8c 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -102,6 +102,8 @@ pub async fn project_search( format!("game_versions:{}", val) } else if facet.starts_with("project_type:") { format!("project_types:{}", val) + } else if facet.starts_with("title:") { + format!("name:{}", val) } else { facet.to_string() } @@ -143,7 +145,10 @@ pub async fn random_projects_get( let count = v3::projects::RandomProjects { count: count.count }; let response = - v3::projects::random_projects_get(web::Query(count), pool.clone(), redis.clone()).await?; + v3::projects::random_projects_get(web::Query(count), pool.clone(), redis.clone()) + .await + .or_else(v2_reroute::flatten_404_error) + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::>(response).await { Ok(project) => { @@ -170,7 +175,9 @@ pub async fn projects_get( redis.clone(), session_queue, ) - .await?; + .await + .or_else(v2_reroute::flatten_404_error) + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::>(response).await { @@ -191,10 +198,11 @@ pub async fn project_get( session_queue: web::Data, ) -> Result { // Convert V2 data to V3 data - // Call V3 project creation - let response = - v3::projects::project_get(req, info, pool.clone(), redis.clone(), session_queue).await?; + let response = v3::projects::project_get(req, info, pool.clone(), redis.clone(), session_queue) + .await + .or_else(v2_reroute::flatten_404_error) + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::(response).await { @@ -217,7 +225,10 @@ pub async fn project_get_check( pool: web::Data, redis: web::Data, ) -> Result { - v3::projects::project_get_check(info, pool, redis).await + v3::projects::project_get_check(info, pool, redis) + .await + .or_else(v2_reroute::flatten_404_error) + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize)] @@ -235,7 +246,10 @@ pub async fn dependency_list( session_queue: web::Data, ) -> Result { // TODO: requires V2 conversion and tests, probably - v3::projects::dependency_list(req, info, pool, redis, session_queue).await + v3::projects::dependency_list(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize, Validate)] @@ -426,9 +440,9 @@ pub async fn project_edit( } let new_project = v3::projects::EditProject { - title: v2_new_project.title, - description: v2_new_project.description, - body: v2_new_project.body, + name: v2_new_project.title, + summary: v2_new_project.description, // Description becomes summary + description: v2_new_project.body, // Body becomes description categories: v2_new_project.categories, additional_categories: v2_new_project.additional_categories, license_url: v2_new_project.license_url, @@ -453,7 +467,8 @@ pub async fn project_edit( redis.clone(), session_queue.clone(), ) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; // If client and server side were set, we will call // the version setting route for each version to set the side types for each of them. @@ -642,6 +657,7 @@ pub async fn projects_edit( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize)] @@ -672,6 +688,7 @@ pub async fn project_icon_edit( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[delete("{id}/icon")] @@ -683,7 +700,9 @@ pub async fn delete_project_icon( file_host: web::Data>, session_queue: web::Data, ) -> Result { - v3::projects::delete_project_icon(req, info, pool, redis, file_host, session_queue).await + v3::projects::delete_project_icon(req, info, pool, redis, file_host, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize, Validate)] @@ -714,7 +733,7 @@ pub async fn add_gallery_item( req, web::Query(v3::projects::GalleryCreateQuery { featured: item.featured, - title: item.title, + name: item.title, description: item.description, ordering: item.ordering, }), @@ -726,6 +745,7 @@ pub async fn add_gallery_item( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize, Validate)] @@ -764,7 +784,7 @@ pub async fn edit_gallery_item( web::Query(v3::projects::GalleryEditQuery { url: item.url, featured: item.featured, - title: item.title, + name: item.title, description: item.description, ordering: item.ordering, }), @@ -774,6 +794,7 @@ pub async fn edit_gallery_item( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize)] @@ -801,6 +822,7 @@ pub async fn delete_gallery_item( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[delete("{id}")] @@ -812,7 +834,9 @@ pub async fn project_delete( config: web::Data, session_queue: web::Data, ) -> Result { - v3::projects::project_delete(req, info, pool, redis, config, session_queue).await + v3::projects::project_delete(req, info, pool, redis, config, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[post("{id}/follow")] @@ -823,7 +847,9 @@ pub async fn project_follow( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::projects::project_follow(req, info, pool, redis, session_queue).await + v3::projects::project_follow(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[delete("{id}/follow")] @@ -834,5 +860,7 @@ pub async fn project_unfollow( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::projects::project_unfollow(req, info, pool, redis, session_queue).await + v3::projects::project_unfollow(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2/reports.rs b/src/routes/v2/reports.rs index f167eceb..80e481c8 100644 --- a/src/routes/v2/reports.rs +++ b/src/routes/v2/reports.rs @@ -2,7 +2,7 @@ use crate::database::redis::RedisPool; use crate::models::ids::ImageId; use crate::models::reports::ItemType; use crate::queue::session::AuthQueue; -use crate::routes::{v3, ApiError}; +use crate::routes::{v2_reroute, v3, ApiError}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use serde::Deserialize; use sqlx::PgPool; @@ -37,7 +37,9 @@ pub async fn report_create( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::reports::report_create(req, pool, body, redis, session_queue).await + v3::reports::report_create(req, pool, body, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Deserialize)] @@ -74,6 +76,7 @@ pub async fn reports( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Deserialize)] @@ -97,6 +100,7 @@ pub async fn reports_get( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[get("report/{id}")] @@ -107,7 +111,9 @@ pub async fn report_get( info: web::Path<(crate::models::reports::ReportId,)>, session_queue: web::Data, ) -> Result { - v3::reports::report_get(req, pool, redis, info, session_queue).await + v3::reports::report_get(req, pool, redis, info, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Deserialize, Validate)] @@ -139,6 +145,7 @@ pub async fn report_edit( }), ) .await + .or_else(v2_reroute::flatten_404_error) } #[delete("report/{id}")] @@ -149,5 +156,7 @@ pub async fn report_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::reports::report_delete(req, pool, info, redis, session_queue).await + v3::reports::report_delete(req, pool, info, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2/statistics.rs b/src/routes/v2/statistics.rs index 962bc39f..514dc5a5 100644 --- a/src/routes/v2/statistics.rs +++ b/src/routes/v2/statistics.rs @@ -1,4 +1,4 @@ -use crate::routes::{v3, ApiError}; +use crate::routes::{v2_reroute, v3, ApiError}; use actix_web::{get, web, HttpResponse}; use sqlx::PgPool; @@ -8,5 +8,7 @@ pub fn config(cfg: &mut web::ServiceConfig) { #[get("statistics")] pub async fn get_stats(pool: web::Data) -> Result { - v3::statistics::get_stats(pool).await + v3::statistics::get_stats(pool) + .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2/tags.rs b/src/routes/v2/tags.rs index 71bbadcf..00ba0726 100644 --- a/src/routes/v2/tags.rs +++ b/src/routes/v2/tags.rs @@ -161,7 +161,9 @@ pub struct LicenseText { #[get("license/{id}")] pub async fn license_text(params: web::Path<(String,)>) -> Result { - v3::tags::license_text(params).await + v3::tags::license_text(params) + .await + .or_else(v2_reroute::flatten_404_error) } #[derive(serde::Serialize)] @@ -192,6 +194,7 @@ pub async fn donation_platform_list( Err(response) => response, }, ) + .or_else(v2_reroute::flatten_404_error) } #[get("report_type")] @@ -199,7 +202,9 @@ pub async fn report_type_list( pool: web::Data, redis: web::Data, ) -> Result { - v3::tags::report_type_list(pool, redis).await + v3::tags::report_type_list(pool, redis) + .await + .or_else(v2_reroute::flatten_404_error) } #[get("project_type")] @@ -207,7 +212,9 @@ pub async fn project_type_list( pool: web::Data, redis: web::Data, ) -> Result { - v3::tags::project_type_list(pool, redis).await + v3::tags::project_type_list(pool, redis) + .await + .or_else(v2_reroute::flatten_404_error) } #[get("side_type")] diff --git a/src/routes/v2/teams.rs b/src/routes/v2/teams.rs index 74a64a6c..5ff2a9e0 100644 --- a/src/routes/v2/teams.rs +++ b/src/routes/v2/teams.rs @@ -1,8 +1,9 @@ use crate::database::redis::RedisPool; -use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId}; +use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId, TeamMember}; use crate::models::users::UserId; +use crate::models::v2::teams::LegacyTeamMember; use crate::queue::session::AuthQueue; -use crate::routes::{v3, ApiError}; +use crate::routes::{v2_reroute, v3, ApiError}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; @@ -34,7 +35,20 @@ pub async fn team_members_get_project( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::teams::team_members_get_project(req, info, pool, redis, session_queue).await + let response = v3::teams::team_members_get_project(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(members) => { + let members = members + .into_iter() + .map(LegacyTeamMember::from) + .collect::>(); + Ok(HttpResponse::Ok().json(members)) + } + Err(response) => Ok(response), + } } // Returns all members of a team, but not necessarily those of a project-team's organization (unlike team_members_get_project) @@ -46,7 +60,20 @@ pub async fn team_members_get( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::teams::team_members_get(req, info, pool, redis, session_queue).await + let response = v3::teams::team_members_get(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(members) => { + let members = members + .into_iter() + .map(LegacyTeamMember::from) + .collect::>(); + Ok(HttpResponse::Ok().json(members)) + } + Err(response) => Ok(response), + } } #[derive(Serialize, Deserialize)] @@ -62,7 +89,7 @@ pub async fn teams_get( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::teams::teams_get( + let response = v3::teams::teams_get( req, web::Query(v3::teams::TeamIds { ids: ids.ids }), pool, @@ -70,6 +97,23 @@ pub async fn teams_get( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error); + // Convert response to V2 format + match v2_reroute::extract_ok_json::>>(response?).await { + Ok(members) => { + let members = members + .into_iter() + .map(|members| { + members + .into_iter() + .map(LegacyTeamMember::from) + .collect::>() + }) + .collect::>(); + Ok(HttpResponse::Ok().json(members)) + } + Err(response) => Ok(response), + } } #[post("{id}/join")] @@ -80,7 +124,9 @@ pub async fn join_team( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::teams::join_team(req, info, pool, redis, session_queue).await + v3::teams::join_team(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } fn default_role() -> String { @@ -132,6 +178,7 @@ pub async fn add_team_member( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize, Clone)] @@ -167,6 +214,7 @@ pub async fn edit_team_member( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Deserialize)] @@ -194,6 +242,7 @@ pub async fn transfer_ownership( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[delete("{id}/members/{user_id}")] @@ -204,5 +253,7 @@ pub async fn remove_team_member( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::teams::remove_team_member(req, info, pool, redis, session_queue).await + v3::teams::remove_team_member(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index 6b7e8c2b..deb3e240 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -5,7 +5,7 @@ use crate::file_hosting::FileHost; use crate::models::ids::ThreadMessageId; use crate::models::threads::{MessageBody, ThreadId}; use crate::queue::session::AuthQueue; -use crate::routes::{v3, ApiError}; +use crate::routes::{v2_reroute, v3, ApiError}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; use serde::Deserialize; use sqlx::PgPool; @@ -30,7 +30,9 @@ pub async fn thread_get( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::threads::thread_get(req, info, pool, redis, session_queue).await + v3::threads::thread_get(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Deserialize)] @@ -54,6 +56,7 @@ pub async fn threads_get( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Deserialize)] @@ -82,6 +85,7 @@ pub async fn thread_send_message( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[get("inbox")] @@ -91,7 +95,9 @@ pub async fn moderation_inbox( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::threads::moderation_inbox(req, pool, redis, session_queue).await + v3::threads::moderation_inbox(req, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[post("{id}/read")] @@ -102,7 +108,9 @@ pub async fn thread_read( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::threads::thread_read(req, info, pool, redis, session_queue).await + v3::threads::thread_read(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[delete("{id}")] @@ -114,5 +122,7 @@ pub async fn message_delete( session_queue: web::Data, file_host: web::Data>, ) -> Result { - v3::threads::message_delete(req, info, pool, redis, session_queue, file_host).await + v3::threads::message_delete(req, info, pool, redis, session_queue, file_host) + .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2/users.rs b/src/routes/v2/users.rs index 13055187..37da2f9e 100644 --- a/src/routes/v2/users.rs +++ b/src/routes/v2/users.rs @@ -1,7 +1,9 @@ use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; +use crate::models::notifications::Notification; use crate::models::projects::Project; use crate::models::users::{Badges, Role}; +use crate::models::v2::notifications::LegacyNotification; use crate::models::v2::projects::LegacyProject; use crate::queue::session::AuthQueue; use crate::routes::{v2_reroute, v3, ApiError}; @@ -36,7 +38,9 @@ pub async fn user_auth_get( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::users::user_auth_get(req, pool, redis, session_queue).await + v3::users::user_auth_get(req, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize)] @@ -50,7 +54,9 @@ pub async fn users_get( pool: web::Data, redis: web::Data, ) -> Result { - v3::users::users_get(web::Query(v3::users::UserIds { ids: ids.ids }), pool, redis).await + v3::users::users_get(web::Query(v3::users::UserIds { ids: ids.ids }), pool, redis) + .await + .or_else(v2_reroute::flatten_404_error) } #[get("{id}")] @@ -59,7 +65,9 @@ pub async fn user_get( pool: web::Data, redis: web::Data, ) -> Result { - v3::users::user_get(info, pool, redis).await + v3::users::user_get(info, pool, redis) + .await + .or_else(v2_reroute::flatten_404_error) } #[get("{user_id}/projects")] @@ -70,8 +78,9 @@ pub async fn projects_list( redis: web::Data, session_queue: web::Data, ) -> Result { - let response = - v3::users::projects_list(req, info, pool.clone(), redis.clone(), session_queue).await?; + let response = v3::users::projects_list(req, info, pool.clone(), redis.clone(), session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert to V2 projects match v2_reroute::extract_ok_json::>(response).await { @@ -135,6 +144,7 @@ pub async fn user_edit( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize)] @@ -165,6 +175,7 @@ pub async fn user_icon_edit( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Deserialize)] @@ -198,6 +209,7 @@ pub async fn user_delete( session_queue, ) .await + .or_else(v2_reroute::flatten_404_error) } #[get("{id}/follows")] @@ -208,7 +220,9 @@ pub async fn user_follows( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::users::user_follows(req, info, pool, redis, session_queue).await + v3::users::user_follows(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[get("{id}/notifications")] @@ -219,5 +233,18 @@ pub async fn user_notifications( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::users::user_notifications(req, info, pool, redis, session_queue).await + let response = v3::users::user_notifications(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(notifications) => { + let legacy_notifications: Vec = notifications + .into_iter() + .map(LegacyNotification::from) + .collect(); + Ok(HttpResponse::Ok().json(legacy_notifications)) + } + Err(response) => Ok(response), + } } diff --git a/src/routes/v2/version_file.rs b/src/routes/v2/version_file.rs index 36906347..a50d25b4 100644 --- a/src/routes/v2/version_file.rs +++ b/src/routes/v2/version_file.rs @@ -3,7 +3,7 @@ use crate::database::redis::RedisPool; use crate::models::projects::{Project, Version, VersionType}; use crate::models::v2::projects::{LegacyProject, LegacyVersion}; use crate::queue::session::AuthQueue; -use crate::routes::v3::version_file::{default_algorithm, HashQuery}; +use crate::routes::v3::version_file::HashQuery; use crate::routes::{v2_reroute, v3}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; use serde::{Deserialize, Serialize}; @@ -40,10 +40,11 @@ pub async fn get_version_from_hash( ) -> Result { let response = v3::version_file::get_version_from_hash(req, info, pool, redis, hash_query, session_queue) - .await; + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format - match v2_reroute::extract_ok_json::(response?).await { + match v2_reroute::extract_ok_json::(response).await { Ok(version) => { let v2_version = LegacyVersion::from(version); Ok(HttpResponse::Ok().json(v2_version)) @@ -62,7 +63,9 @@ pub async fn download_version( hash_query: web::Query, session_queue: web::Data, ) -> Result { - v3::version_file::download_version(req, info, pool, redis, hash_query, session_queue).await + v3::version_file::download_version(req, info, pool, redis, hash_query, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } // under /api/v1/version_file/{hash} @@ -75,7 +78,9 @@ pub async fn delete_file( hash_query: web::Query, session_queue: web::Data, ) -> Result { - v3::version_file::delete_file(req, info, pool, redis, hash_query, session_queue).await + v3::version_file::delete_file(req, info, pool, redis, hash_query, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } #[derive(Serialize, Deserialize)] @@ -119,7 +124,8 @@ pub async fn get_update_from_hash( web::Json(update_data), session_queue, ) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::(response).await { @@ -134,8 +140,7 @@ pub async fn get_update_from_hash( // Requests above with multiple versions below #[derive(Deserialize)] pub struct FileHashes { - #[serde(default = "default_algorithm")] - pub algorithm: String, + pub algorithm: Option, pub hashes: Vec, } @@ -160,7 +165,8 @@ pub async fn get_versions_from_hashes( web::Json(file_data), session_queue, ) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert to V2 match v2_reroute::extract_ok_json::>(response).await { @@ -198,7 +204,8 @@ pub async fn get_projects_from_hashes( web::Json(file_data), session_queue, ) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert to V2 match v2_reroute::extract_ok_json::>(response).await { @@ -230,8 +237,7 @@ pub async fn get_projects_from_hashes( #[derive(Deserialize)] pub struct ManyUpdateData { - #[serde(default = "default_algorithm")] - pub algorithm: String, + pub algorithm: Option, // Defaults to calculation based on size of hash pub hashes: Vec, pub loaders: Option>, pub game_versions: Option>, @@ -265,7 +271,8 @@ pub async fn update_files( let response = v3::version_file::update_files(req, pool, redis, web::Json(update_data), session_queue) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::>(response).await { @@ -293,8 +300,7 @@ pub struct FileUpdateData { #[derive(Deserialize)] pub struct ManyFileUpdateData { - #[serde(default = "default_algorithm")] - pub algorithm: String, + pub algorithm: Option, // Defaults to calculation based on size of hash pub hashes: Vec, } @@ -338,7 +344,8 @@ pub async fn update_individual_files( web::Json(update_data), session_queue, ) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::>(response).await { diff --git a/src/routes/v2/versions.rs b/src/routes/v2/versions.rs index 5236bf0c..3ff63e80 100644 --- a/src/routes/v2/versions.rs +++ b/src/routes/v2/versions.rs @@ -73,7 +73,8 @@ pub async fn version_list( let response = v3::versions::version_list(req, info, web::Query(filters), pool, redis, session_queue) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::>(response).await { @@ -98,8 +99,9 @@ pub async fn version_project_get( session_queue: web::Data, ) -> Result { let id = info.into_inner(); - let response = - v3::versions::version_project_get_helper(req, id, pool, redis, session_queue).await?; + let response = v3::versions::version_project_get_helper(req, id, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::(response).await { Ok(version) => { @@ -124,8 +126,9 @@ pub async fn versions_get( session_queue: web::Data, ) -> Result { let ids = v3::versions::VersionIds { ids: ids.ids }; - let response = - v3::versions::versions_get(req, web::Query(ids), pool, redis, session_queue).await?; + let response = v3::versions::versions_get(req, web::Query(ids), pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::>(response).await { @@ -149,7 +152,9 @@ pub async fn version_get( session_queue: web::Data, ) -> Result { let id = info.into_inner().0; - let response = v3::versions::version_get_helper(req, id, pool, redis, session_queue).await?; + let response = v3::versions::version_get_helper(req, id, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error)?; // Convert response to V2 format match v2_reroute::extract_ok_json::(response).await { Ok(version) => { @@ -248,7 +253,8 @@ pub async fn version_edit( web::Json(serde_json::to_value(new_version)?), session_queue, ) - .await?; + .await + .or_else(v2_reroute::flatten_404_error)?; Ok(response) } @@ -260,5 +266,7 @@ pub async fn version_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - v3::versions::version_delete(req, info, pool, redis, session_queue).await + v3::versions::version_delete(req, info, pool, redis, session_queue) + .await + .or_else(v2_reroute::flatten_404_error) } diff --git a/src/routes/v2_reroute.rs b/src/routes/v2_reroute.rs index 6672425b..e5cfdbcd 100644 --- a/src/routes/v2_reroute.rs +++ b/src/routes/v2_reroute.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; use super::v3::project_creation::CreateError; +use super::ApiError; use crate::models::v2::projects::LegacySideType; use crate::util::actix::{generate_multipart, MultipartSegment, MultipartSegmentData}; use actix_multipart::Multipart; @@ -14,6 +15,7 @@ pub async fn extract_ok_json(response: HttpResponse) -> Result Result { + match res { + ApiError::NotFound => Ok(HttpResponse::NotFound().body("")), + _ => Err(res), + } +} + pub async fn alter_actix_multipart( mut multipart: Multipart, mut headers: HeaderMap, diff --git a/src/routes/v3/collections.rs b/src/routes/v3/collections.rs index 5ee0d823..0fb4020a 100644 --- a/src/routes/v3/collections.rs +++ b/src/routes/v3/collections.rs @@ -43,7 +43,7 @@ pub struct CollectionCreateData { custom(function = "crate::util::validate::validate_name") )] /// The title or name of the project. - pub title: String, + pub name: String, #[validate(length(min = 3, max = 255))] /// A short description of the collection. pub description: String, @@ -94,7 +94,7 @@ pub async fn collection_create( let collection_builder_actual = collection_item::CollectionBuilder { collection_id: collection_id.into(), user_id: current_user.id.into(), - title: collection_create_data.title, + name: collection_create_data.name, description: collection_create_data.description, status: CollectionStatus::Listed, projects: initial_project_ids @@ -111,7 +111,7 @@ pub async fn collection_create( let response = crate::models::collections::Collection { id: collection_id, user: collection_builder.user_id.into(), - title: collection_builder.title.clone(), + name: collection_builder.name.clone(), description: collection_builder.description.clone(), created: now, updated: now, @@ -187,7 +187,7 @@ pub async fn collection_get( return Ok(HttpResponse::Ok().json(Collection::from(data))); } } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } #[derive(Deserialize, Validate)] @@ -196,7 +196,7 @@ pub struct EditCollection { length(min = 3, max = 64), custom(function = "crate::util::validate::validate_name") )] - pub title: Option, + pub name: Option, #[validate(length(min = 3, max = 256))] pub description: Option, pub status: Option, @@ -239,14 +239,14 @@ pub async fn collection_edit( let mut transaction = pool.begin().await?; - if let Some(title) = &new_collection.title { + if let Some(name) = &new_collection.name { sqlx::query!( " UPDATE collections - SET title = $1 + SET name = $1 WHERE (id = $2) ", - title.trim(), + name.trim(), id as database::models::ids::CollectionId, ) .execute(&mut *transaction) @@ -335,7 +335,7 @@ pub async fn collection_edit( transaction.commit().await?; Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -526,7 +526,7 @@ pub async fn collection_delete( if result.is_some() { Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/notifications.rs b/src/routes/v3/notifications.rs index 3eda349e..d2445735 100644 --- a/src/routes/v3/notifications.rs +++ b/src/routes/v3/notifications.rs @@ -93,10 +93,10 @@ pub async fn notification_get( if user.id == data.user_id.into() || user.role.is_admin() { Ok(HttpResponse::Ok().json(Notification::from(data))) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -142,7 +142,7 @@ pub async fn notification_read( )) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -188,7 +188,7 @@ pub async fn notification_delete( )) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/oauth_clients.rs b/src/routes/v3/oauth_clients.rs index 87d7222d..03e50b9c 100644 --- a/src/routes/v3/oauth_clients.rs +++ b/src/routes/v3/oauth_clients.rs @@ -92,7 +92,7 @@ pub async fn get_user_clients( Ok(HttpResponse::Ok().json(response)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -108,7 +108,7 @@ pub async fn get_client( if let Some(client) = clients.into_iter().next() { Ok(HttpResponse::Ok().json(client)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -241,7 +241,7 @@ pub async fn oauth_client_delete<'a>( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -349,7 +349,7 @@ pub async fn oauth_client_edit( Ok(HttpResponse::Ok().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/organizations.rs b/src/routes/v3/organizations.rs index d48cd2a9..61400bed 100644 --- a/src/routes/v3/organizations.rs +++ b/src/routes/v3/organizations.rs @@ -71,7 +71,7 @@ pub async fn organization_projects_get( " SELECT m.id FROM organizations o INNER JOIN mods m ON m.organization_id = o.id - WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL) + WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.name = $2 AND $2 IS NOT NULL) ", possible_organization_id.map(|x| x as i64), info @@ -95,7 +95,7 @@ pub struct NewOrganization { regex = "crate::util::validate::RE_URL_SAFE" )] // Title of the organization, also used as slug - pub title: String, + pub name: String, #[validate(length(min = 3, max = 256))] pub description: String, } @@ -124,13 +124,13 @@ pub async fn organization_create( let mut transaction = pool.begin().await?; // Try title - let title_organization_id_option: Option = - serde_json::from_str(&format!("\"{}\"", new_organization.title)).ok(); + let name_organization_id_option: Option = + serde_json::from_str(&format!("\"{}\"", new_organization.name)).ok(); let mut organization_strings = vec![]; - if let Some(title_organization_id) = title_organization_id_option { - organization_strings.push(title_organization_id.to_string()); + if let Some(name_organization_id) = name_organization_id_option { + organization_strings.push(name_organization_id.to_string()); } - organization_strings.push(new_organization.title.clone()); + organization_strings.push(new_organization.name.clone()); let results = Organization::get_many(&organization_strings, &mut *transaction, &redis).await?; if !results.is_empty() { return Err(CreateError::SlugCollision); @@ -143,6 +143,7 @@ pub async fn organization_create( members: vec![team_item::TeamMemberBuilder { user_id: current_user.id.into(), role: crate::models::teams::OWNER_ROLE.to_owned(), + is_owner: true, permissions: ProjectPermissions::all(), organization_permissions: Some(OrganizationPermissions::all()), accepted: true, @@ -155,7 +156,7 @@ pub async fn organization_create( // Create organization let organization = Organization { id: organization_id, - title: new_organization.title.clone(), + name: new_organization.name.clone(), description: new_organization.description.clone(), team_id, icon_url: None, @@ -243,7 +244,7 @@ pub async fn organization_get( let organization = models::organizations::Organization::from(data, team_members); return Ok(HttpResponse::Ok().json(organization)); } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } #[derive(Deserialize)] @@ -335,7 +336,7 @@ pub struct OrganizationEdit { regex = "crate::util::validate::RE_URL_SAFE" )] // Title of the organization, also used as slug - pub title: Option, + pub name: Option, } pub async fn organizations_edit( @@ -397,47 +398,47 @@ pub async fn organizations_edit( .await?; } - if let Some(title) = &new_organization.title { + if let Some(name) = &new_organization.name { if !perms.contains(OrganizationPermissions::EDIT_DETAILS) { return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the title of this organization!" + "You do not have the permissions to edit the name of this organization!" .to_string(), )); } - let title_organization_id_option: Option = parse_base62(title).ok(); - if let Some(title_organization_id) = title_organization_id_option { + let name_organization_id_option: Option = parse_base62(name).ok(); + if let Some(name_organization_id) = name_organization_id_option { let results = sqlx::query!( " SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1) ", - title_organization_id as i64 + name_organization_id as i64 ) .fetch_one(&mut *transaction) .await?; if results.exists.unwrap_or(true) { return Err(ApiError::InvalidInput( - "Title collides with other organization's id!".to_string(), + "name collides with other organization's id!".to_string(), )); } } - // Make sure the new title is different from the old one - // We are able to unwrap here because the title is always set - if !title.eq(&organization_item.title.clone()) { + // Make sure the new name is different from the old one + // We are able to unwrap here because the name is always set + if !name.eq(&organization_item.name.clone()) { let results = sqlx::query!( " - SELECT EXISTS(SELECT 1 FROM organizations WHERE title = LOWER($1)) + SELECT EXISTS(SELECT 1 FROM organizations WHERE name = LOWER($1)) ", - title + name ) .fetch_one(&mut *transaction) .await?; if results.exists.unwrap_or(true) { return Err(ApiError::InvalidInput( - "Title collides with other organization's id!".to_string(), + "Name collides with other organization's id!".to_string(), )); } } @@ -445,10 +446,10 @@ pub async fn organizations_edit( sqlx::query!( " UPDATE organizations - SET title = LOWER($1) + SET name = LOWER($1) WHERE (id = $2) ", - Some(title), + Some(name), id as database::models::ids::OrganizationId, ) .execute(&mut *transaction) @@ -457,7 +458,7 @@ pub async fn organizations_edit( database::models::Organization::clear_cache( organization_item.id, - Some(organization_item.title), + Some(organization_item.name), &redis, ) .await?; @@ -470,7 +471,7 @@ pub async fn organizations_edit( )) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -527,19 +528,19 @@ pub async fn organization_delete( transaction.commit().await?; - database::models::Organization::clear_cache(organization.id, Some(organization.title), &redis) + database::models::Organization::clear_cache(organization.id, Some(organization.name), &redis) .await?; if result.is_some() { Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } #[derive(Deserialize)] pub struct OrganizationProjectAdd { - pub project_id: String, // Also allow title/slug + pub project_id: String, // Also allow name/slug } pub async fn organization_projects_add( req: HttpRequest, @@ -596,11 +597,7 @@ pub async fn organization_projects_add( })?; // Require ownership of a project to add it to an organization - if !current_user.role.is_admin() - && !project_team_member - .role - .eq(crate::models::teams::OWNER_ROLE) - { + if !current_user.role.is_admin() && !project_team_member.is_owner { return Err(ApiError::CustomAuthentication( "You need to be an owner of a project to add it to an organization!".to_string(), )); @@ -824,7 +821,7 @@ pub async fn organization_icon_edit( database::models::Organization::clear_cache( organization_item.id, - Some(organization_item.title), + Some(organization_item.name), &redis, ) .await?; @@ -909,7 +906,7 @@ pub async fn delete_organization_icon( database::models::Organization::clear_cache( organization_item.id, - Some(organization_item.title), + Some(organization_item.name), &redis, ) .await?; diff --git a/src/routes/v3/project_creation.rs b/src/routes/v3/project_creation.rs index f5295c23..0c999ad9 100644 --- a/src/routes/v3/project_creation.rs +++ b/src/routes/v3/project_creation.rs @@ -158,7 +158,7 @@ pub struct ProjectCreateData { )] #[serde(alias = "mod_name")] /// The title or name of the project. - pub title: String, + pub name: String, #[validate( length(min = 3, max = 64), regex = "crate::util::validate::RE_URL_SAFE" @@ -169,11 +169,11 @@ pub struct ProjectCreateData { #[validate(length(min = 3, max = 255))] #[serde(alias = "mod_description")] /// A short description of the project. - pub description: String, + pub summary: String, #[validate(length(max = 65536))] #[serde(alias = "mod_body")] /// A long description of the project, in markdown. - pub body: String, + pub description: String, #[validate(length(max = 32))] #[validate] @@ -225,7 +225,7 @@ pub struct NewGalleryItem { pub featured: bool, #[validate(length(min = 1, max = 2048))] /// The title of the gallery item - pub title: Option, + pub name: Option, #[validate(length(min = 1, max = 2048))] /// The description of the gallery item pub description: Option, @@ -518,7 +518,7 @@ async fn project_create_inner( gallery_urls.push(crate::models::projects::GalleryItem { url: format!("{cdn_url}/{url}"), featured: item.featured, - title: item.title.clone(), + name: item.name.clone(), description: item.description.clone(), created: Utc::now(), ordering: item.ordering, @@ -616,6 +616,7 @@ async fn project_create_inner( members: vec![models::team_item::TeamMemberBuilder { user_id: current_user.id.into(), role: crate::models::teams::OWNER_ROLE.to_owned(), + is_owner: true, // Allow all permissions for project creator, even if attached to a project permissions: ProjectPermissions::all(), organization_permissions: None, @@ -679,9 +680,9 @@ async fn project_create_inner( project_id: project_id.into(), team_id, organization_id: project_create_data.organization_id.map(|x| x.into()), - title: project_create_data.title, + name: project_create_data.name, + summary: project_create_data.summary, description: project_create_data.description, - body: project_create_data.body, icon_url: icon_data.clone().map(|x| x.0), license_url: project_create_data.license_url, @@ -698,7 +699,7 @@ async fn project_create_inner( .map(|x| models::project_item::GalleryItem { image_url: x.url.clone(), featured: x.featured, - title: x.title.clone(), + name: x.name.clone(), description: x.description.clone(), created: x.created, ordering: x.ordering, @@ -783,12 +784,11 @@ async fn project_create_inner( slug: project_builder.slug.clone(), project_types, games, - team: team_id.into(), + team_id: team_id.into(), organization: project_create_data.organization_id, - title: project_builder.title.clone(), + name: project_builder.name.clone(), + summary: project_builder.summary.clone(), description: project_builder.description.clone(), - body: project_builder.body.clone(), - body_url: None, published: now, updated: now, approved: None, diff --git a/src/routes/v3/projects.rs b/src/routes/v3/projects.rs index ae91fcaf..54a0127d 100644 --- a/src/routes/v3/projects.rs +++ b/src/routes/v3/projects.rs @@ -166,7 +166,7 @@ pub async fn project_get( return Ok(HttpResponse::Ok().json(Project::from(data))); } } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } #[derive(Serialize, Deserialize, Validate)] @@ -175,11 +175,11 @@ pub struct EditProject { length(min = 3, max = 64), custom(function = "crate::util::validate::validate_name") )] - pub title: Option, + pub name: Option, #[validate(length(min = 3, max = 256))] - pub description: Option, + pub summary: Option, #[validate(length(max = 65536))] - pub body: Option, + pub description: Option, #[validate(length(max = 3))] pub categories: Option>, #[validate(length(max = 256))] @@ -272,10 +272,10 @@ pub async fn project_edit( if let Some(perms) = permissions { let mut transaction = pool.begin().await?; - if let Some(title) = &new_project.title { + if let Some(name) = &new_project.name { if !perms.contains(ProjectPermissions::EDIT_DETAILS) { return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the title of this project!" + "You do not have the permissions to edit the name of this project!" .to_string(), )); } @@ -283,20 +283,20 @@ pub async fn project_edit( sqlx::query!( " UPDATE mods - SET title = $1 + SET name = $1 WHERE (id = $2) ", - title.trim(), + name.trim(), id as db_ids::ProjectId, ) .execute(&mut *transaction) .await?; } - if let Some(description) = &new_project.description { + if let Some(summary) = &new_project.summary { if !perms.contains(ProjectPermissions::EDIT_DETAILS) { return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the description of this project!" + "You do not have the permissions to edit the summary of this project!" .to_string(), )); } @@ -304,10 +304,10 @@ pub async fn project_edit( sqlx::query!( " UPDATE mods - SET description = $1 + SET summary = $1 WHERE (id = $2) ", - description, + summary, id as db_ids::ProjectId, ) .execute(&mut *transaction) @@ -664,55 +664,57 @@ pub async fn project_edit( .await?; } if let Some(links) = &new_project.link_urls { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the links of this project!" - .to_string(), - )); - } + if !links.is_empty() { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the links of this project!" + .to_string(), + )); + } - let ids_to_delete = links - .iter() - .map(|(name, _)| name.clone()) - .collect::>(); - // Deletes all links from hashmap- either will be deleted or be replaced - sqlx::query!( - " - DELETE FROM mods_links - WHERE joining_mod_id = $1 AND joining_platform_id IN ( - SELECT id FROM link_platforms WHERE name = ANY($2) + let ids_to_delete = links + .iter() + .map(|(name, _)| name.clone()) + .collect::>(); + // Deletes all links from hashmap- either will be deleted or be replaced + sqlx::query!( + " + DELETE FROM mods_links + WHERE joining_mod_id = $1 AND joining_platform_id IN ( + SELECT id FROM link_platforms WHERE name = ANY($2) + ) + ", + id as db_ids::ProjectId, + &ids_to_delete ) - ", - id as db_ids::ProjectId, - &ids_to_delete - ) - .execute(&mut *transaction) - .await?; + .execute(&mut *transaction) + .await?; - for (platform, url) in links { - if let Some(url) = url { - let platform_id = db_models::categories::LinkPlatform::get_id( - platform, - &mut *transaction, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Platform {} does not exist.", - platform.clone() - )) - })?; - sqlx::query!( - " - INSERT INTO mods_links (joining_mod_id, joining_platform_id, url) - VALUES ($1, $2, $3) - ", - id as db_ids::ProjectId, - platform_id as db_ids::LinkPlatformId, - url - ) - .execute(&mut *transaction) - .await?; + for (platform, url) in links { + if let Some(url) = url { + let platform_id = db_models::categories::LinkPlatform::get_id( + platform, + &mut *transaction, + ) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Platform {} does not exist.", + platform.clone() + )) + })?; + sqlx::query!( + " + INSERT INTO mods_links (joining_mod_id, joining_platform_id, url) + VALUES ($1, $2, $3) + ", + id as db_ids::ProjectId, + platform_id as db_ids::LinkPlatformId, + url + ) + .execute(&mut *transaction) + .await?; + } } } } @@ -763,10 +765,10 @@ pub async fn project_edit( .await?; } - if let Some(body) = &new_project.body { + if let Some(description) = &new_project.description { if !perms.contains(ProjectPermissions::EDIT_BODY) { return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the body of this project!" + "You do not have the permissions to edit the description (body) of this project!" .to_string(), )); } @@ -774,10 +776,10 @@ pub async fn project_edit( sqlx::query!( " UPDATE mods - SET body = $1 + SET description = $1 WHERE (id = $2) ", - body, + description, id as db_ids::ProjectId, ) .execute(&mut *transaction) @@ -818,7 +820,7 @@ pub async fn project_edit( // check new description and body for links to associated images // if they no longer exist in the description or body, delete them - let checkable_strings: Vec<&str> = vec![&new_project.description, &new_project.body] + let checkable_strings: Vec<&str> = vec![&new_project.description, &new_project.summary] .into_iter() .filter_map(|x| x.as_ref().map(|y| y.as_str())) .collect(); @@ -844,7 +846,7 @@ pub async fn project_edit( )) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -918,7 +920,7 @@ pub async fn project_get_check( "id": models::ids::ProjectId::from(project.inner.id) }))) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -952,7 +954,7 @@ pub async fn dependency_list( if let Some(project) = result { if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let dependencies = @@ -1000,7 +1002,7 @@ pub async fn dependency_list( Ok(HttpResponse::Ok().json(DependencyInfo { projects, versions })) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -1125,7 +1127,7 @@ pub async fn projects_edit( if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { return Err(ApiError::CustomAuthentication(format!( "You do not have the permissions to bulk edit project {}!", - project.inner.title + project.inner.name ))); } } else if project.inner.status.is_hidden() { @@ -1136,7 +1138,7 @@ pub async fn projects_edit( } else { return Err(ApiError::CustomAuthentication(format!( "You are not a member of project {}!", - project.inner.title + project.inner.name ))); }; } @@ -1377,7 +1379,7 @@ pub async fn project_schedule( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -1591,7 +1593,7 @@ pub async fn delete_project_icon( pub struct GalleryCreateQuery { pub featured: bool, #[validate(length(min = 1, max = 255))] - pub title: Option, + pub name: Option, #[validate(length(min = 1, max = 2048))] pub description: Option, pub ordering: Option, @@ -1712,7 +1714,7 @@ pub async fn add_gallery_item( let gallery_item = vec![db_models::project_item::GalleryItem { image_url: file_url, featured: item.featured, - title: item.title, + name: item.name, description: item.description, created: Utc::now(), ordering: item.ordering.unwrap_or(0), @@ -1749,7 +1751,7 @@ pub struct GalleryEditQuery { with = "::serde_with::rust::double_option" )] #[validate(length(min = 1, max = 255))] - pub title: Option>, + pub name: Option>, #[serde( default, skip_serializing_if = "Option::is_none", @@ -1864,15 +1866,15 @@ pub async fn edit_gallery_item( .execute(&mut *transaction) .await?; } - if let Some(title) = item.title { + if let Some(name) = item.name { sqlx::query!( " UPDATE mods_gallery - SET title = $2 + SET name = $2 WHERE id = $1 ", id, - title + name ) .execute(&mut *transaction) .await?; @@ -2101,7 +2103,7 @@ pub async fn project_delete( if result.is_some() { Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -2133,7 +2135,7 @@ pub async fn project_follow( let project_id: db_ids::ProjectId = result.inner.id; if !is_authorized(&result.inner, &Some(user), &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let following = sqlx::query!( diff --git a/src/routes/v3/reports.rs b/src/routes/v3/reports.rs index 04e69c8c..3e8aa20d 100644 --- a/src/routes/v3/reports.rs +++ b/src/routes/v3/reports.rs @@ -361,13 +361,13 @@ pub async fn report_get( if let Some(report) = report { if !user.role.is_mod() && report.reporter != user.id.into() { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let report: Report = report.into(); Ok(HttpResponse::Ok().json(report)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -401,7 +401,7 @@ pub async fn report_edit( if let Some(report) = report { if !user.role.is_mod() && report.reporter != user.id.into() { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let mut transaction = pool.begin().await?; @@ -479,7 +479,7 @@ pub async fn report_edit( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -519,6 +519,6 @@ pub async fn report_delete( if result.is_some() { Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/teams.rs b/src/routes/v3/teams.rs index fef51474..81c4d601 100644 --- a/src/routes/v3/teams.rs +++ b/src/routes/v3/teams.rs @@ -59,7 +59,7 @@ pub async fn team_members_get_project( .ok(); if !is_authorized(&project.inner, ¤t_user, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let mut members_data = TeamMember::get_from_team_full(project.inner.team_id, &**pool, &redis).await?; @@ -110,7 +110,7 @@ pub async fn team_members_get_project( .collect(); Ok(HttpResponse::Ok().json(team_members)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -174,7 +174,7 @@ pub async fn team_members_get_organization( Ok(HttpResponse::Ok().json(team_members)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -343,6 +343,7 @@ pub async fn join_team( Some(true), None, None, + None, &mut transaction, ) .await?; @@ -474,12 +475,6 @@ pub async fn add_team_member( } } - if new_member.role == crate::models::teams::OWNER_ROLE { - return Err(ApiError::InvalidInput( - "The `Owner` role is restricted to one person".to_string(), - )); - } - if new_member.payouts_split < Decimal::ZERO || new_member.payouts_split > Decimal::from(5000) { return Err(ApiError::InvalidInput( "Payouts split must be between 0 and 5000!".to_string(), @@ -510,6 +505,7 @@ pub async fn add_team_member( team_id, user_id: new_member.user_id.into(), role: new_member.role.clone(), + is_owner: false, // Cannot just create an owner permissions: new_member.permissions, organization_permissions: new_member.organization_permissions, accepted: false, @@ -598,11 +594,9 @@ pub async fn edit_team_member( let mut transaction = pool.begin().await?; - if &*edit_member_db.role == crate::models::teams::OWNER_ROLE - && (edit_member.role.is_some() || edit_member.permissions.is_some()) - { + if edit_member_db.is_owner && edit_member.permissions.is_some() { return Err(ApiError::InvalidInput( - "The owner's permission and role of a team cannot be edited".to_string(), + "The owner's permission's in a team cannot be edited".to_string(), )); } @@ -683,12 +677,6 @@ pub async fn edit_team_member( } } - if edit_member.role.as_deref() == Some(crate::models::teams::OWNER_ROLE) { - return Err(ApiError::InvalidInput( - "The `Owner` role is restricted to one person".to_string(), - )); - } - TeamMember::edit_team_member( id, user_id, @@ -698,6 +686,7 @@ pub async fn edit_team_member( None, edit_member.payouts_split, edit_member.ordering, + None, &mut transaction, ) .await?; @@ -758,7 +747,7 @@ pub async fn transfer_ownership( ) })?; - if member.role != crate::models::teams::OWNER_ROLE { + if !member.is_owner { return Err(ApiError::CustomAuthentication( "You don't have permission to edit the ownership of this team".to_string(), )); @@ -779,15 +768,17 @@ pub async fn transfer_ownership( let mut transaction = pool.begin().await?; + // The following are the only places new_is_owner is modified. TeamMember::edit_team_member( id.into(), current_user.id.into(), None, None, - Some(crate::models::teams::DEFAULT_ROLE.to_string()), None, None, None, + None, + Some(false), &mut transaction, ) .await?; @@ -797,10 +788,11 @@ pub async fn transfer_ownership( new_owner.user_id.into(), Some(ProjectPermissions::all()), Some(OrganizationPermissions::all()), - Some(crate::models::teams::OWNER_ROLE.to_string()), None, None, None, + None, + Some(true), &mut transaction, ) .await?; @@ -841,7 +833,7 @@ pub async fn remove_team_member( let delete_member = TeamMember::get_from_user_id_pending(id, user_id, &**pool).await?; if let Some(delete_member) = delete_member { - if delete_member.role == crate::models::teams::OWNER_ROLE { + if delete_member.is_owner { // The owner cannot be removed from a team return Err(ApiError::CustomAuthentication( "The owner can't be removed from a team".to_string(), @@ -939,6 +931,6 @@ pub async fn remove_team_member( transaction.commit().await?; Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/threads.rs b/src/routes/v3/threads.rs index aab83aed..9dcc0022 100644 --- a/src/routes/v3/threads.rs +++ b/src/routes/v3/threads.rs @@ -263,7 +263,7 @@ pub async fn thread_get( return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user))); } } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } #[derive(Deserialize)] @@ -371,7 +371,7 @@ pub async fn thread_send_message( if let Some(thread) = result { if !is_authorized_thread(&thread, &user, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let mut transaction = pool.begin().await?; @@ -499,7 +499,7 @@ pub async fn thread_send_message( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -616,6 +616,6 @@ pub async fn message_delete( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/users.rs b/src/routes/v3/users.rs index 1afbc1b7..7d14279c 100644 --- a/src/routes/v3/users.rs +++ b/src/routes/v3/users.rs @@ -83,7 +83,7 @@ pub async fn projects_list( Ok(HttpResponse::Ok().json(response)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -143,7 +143,7 @@ pub async fn user_get( let response: crate::models::users::User = data.into(); Ok(HttpResponse::Ok().json(response)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -186,7 +186,7 @@ pub async fn collections_list( Ok(HttpResponse::Ok().json(response)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -268,7 +268,7 @@ pub async fn orgs_list( Ok(HttpResponse::Ok().json(organizations)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -458,7 +458,7 @@ pub async fn user_edit( )) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -536,7 +536,7 @@ pub async fn user_icon_edit( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } else { Err(ApiError::InvalidInput(format!( @@ -597,10 +597,10 @@ pub async fn user_delete( if result.is_some() { Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -655,7 +655,7 @@ pub async fn user_follows( Ok(HttpResponse::Ok().json(projects)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -696,6 +696,6 @@ pub async fn user_notifications( notifications.sort_by(|a, b| b.created.cmp(&a.created)); Ok(HttpResponse::Ok().json(notifications)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/version_creation.rs b/src/routes/v3/version_creation.rs index 9caad08f..62661bf9 100644 --- a/src/routes/v3/version_creation.rs +++ b/src/routes/v3/version_creation.rs @@ -421,7 +421,6 @@ async fn version_create_inner( project_types: all_project_types, games: all_games, changelog: builder.changelog.clone(), - changelog_url: None, date_published: Utc::now(), downloads: 0, version_type: version_data.release_channel, diff --git a/src/routes/v3/version_file.rs b/src/routes/v3/version_file.rs index 4820362f..2006d481 100644 --- a/src/routes/v3/version_file.rs +++ b/src/routes/v3/version_file.rs @@ -52,8 +52,12 @@ pub async fn get_version_from_hash( .map(|x| x.1) .ok(); let hash = info.into_inner().0.to_lowercase(); + let algorithm = hash_query + .algorithm + .clone() + .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])); let file = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), + algorithm, hash, hash_query.version_id.map(|x| x.into()), &**pool, @@ -64,26 +68,36 @@ pub async fn get_version_from_hash( let version = database::models::Version::get(file.version_id, &**pool, &redis).await?; if let Some(version) = version { if !is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } Ok(HttpResponse::Ok().json(models::projects::Version::from(version))) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } #[derive(Serialize, Deserialize)] pub struct HashQuery { - #[serde(default = "default_algorithm")] - pub algorithm: String, + pub algorithm: Option, // Defaults to calculation based on size of hash pub version_id: Option, } -pub fn default_algorithm() -> String { +// Calculates whether or not to use sha1 or sha512 based on the size of the hash +pub fn default_algorithm_from_hashes(hashes: &[String]) -> String { + // Gets first hash, optionally + let empty_string = "".into(); + let hash = hashes.first().unwrap_or(&empty_string); + let hash_len = hash.len(); + // Sha1 = 40 characters + // Sha512 = 128 characters + // Favour sha1 as default, unless the hash is longer or equal to 128 characters + if hash_len >= 128 { + return "sha512".into(); + } "sha1".into() } @@ -122,7 +136,10 @@ pub async fn get_update_from_hash( let hash = info.into_inner().0.to_lowercase(); if let Some(file) = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), + hash_query + .algorithm + .clone() + .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])), hash, hash_query.version_id.map(|x| x.into()), &**pool, @@ -163,7 +180,7 @@ pub async fn get_update_from_hash( if let Some(first) = versions.last() { if !is_authorized_version(&first.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } return Ok(HttpResponse::Ok().json(models::projects::Version::from(first))); @@ -171,14 +188,13 @@ pub async fn get_update_from_hash( } } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } // Requests above with multiple versions below #[derive(Deserialize)] pub struct FileHashes { - #[serde(default = "default_algorithm")] - pub algorithm: String, + pub algorithm: Option, // Defaults to calculation based on size of hash pub hashes: Vec, } @@ -200,8 +216,13 @@ pub async fn get_versions_from_hashes( .map(|x| x.1) .ok(); + let algorithm = file_data + .algorithm + .clone() + .unwrap_or_else(|| default_algorithm_from_hashes(&file_data.hashes)); + let files = database::models::Version::get_files_from_hash( - file_data.algorithm.clone(), + algorithm.clone(), &file_data.hashes, &**pool, &redis, @@ -220,7 +241,7 @@ pub async fn get_versions_from_hashes( for version in versions_data { for file in files.iter().filter(|x| x.version_id == version.id.into()) { - if let Some(hash) = file.hashes.get(&file_data.algorithm) { + if let Some(hash) = file.hashes.get(&algorithm) { response.insert(hash.clone(), version.clone()); } } @@ -247,8 +268,12 @@ pub async fn get_projects_from_hashes( .map(|x| x.1) .ok(); + let algorithm = file_data + .algorithm + .clone() + .unwrap_or_else(|| default_algorithm_from_hashes(&file_data.hashes)); let files = database::models::Version::get_files_from_hash( - file_data.algorithm.clone(), + algorithm.clone(), &file_data.hashes, &**pool, &redis, @@ -268,7 +293,7 @@ pub async fn get_projects_from_hashes( for project in projects_data { for file in files.iter().filter(|x| x.project_id == project.id.into()) { - if let Some(hash) = file.hashes.get(&file_data.algorithm) { + if let Some(hash) = file.hashes.get(&algorithm) { response.insert(hash.clone(), project.clone()); } } @@ -279,8 +304,7 @@ pub async fn get_projects_from_hashes( #[derive(Deserialize)] pub struct ManyUpdateData { - #[serde(default = "default_algorithm")] - pub algorithm: String, + pub algorithm: Option, // Defaults to calculation based on size of hash pub hashes: Vec, pub loaders: Option>, pub loader_fields: Option>>, @@ -304,8 +328,12 @@ pub async fn update_files( .map(|x| x.1) .ok(); + let algorithm = update_data + .algorithm + .clone() + .unwrap_or_else(|| default_algorithm_from_hashes(&update_data.hashes)); let files = database::models::Version::get_files_from_hash( - update_data.algorithm.clone(), + algorithm.clone(), &update_data.hashes, &**pool, &redis, @@ -366,7 +394,7 @@ pub async fn update_files( if let Some(version) = version { if is_authorized_version(&version.inner, &user_option, &pool).await? { - if let Some(hash) = file.hashes.get(&update_data.algorithm) { + if let Some(hash) = file.hashes.get(&algorithm) { response.insert( hash.clone(), models::projects::Version::from(version.clone()), @@ -390,8 +418,7 @@ pub struct FileUpdateData { #[derive(Serialize, Deserialize)] pub struct ManyFileUpdateData { - #[serde(default = "default_algorithm")] - pub algorithm: String, + pub algorithm: Option, // Defaults to calculation based on size of hash pub hashes: Vec, } @@ -413,8 +440,17 @@ pub async fn update_individual_files( .map(|x| x.1) .ok(); + let algorithm = update_data.algorithm.clone().unwrap_or_else(|| { + default_algorithm_from_hashes( + &update_data + .hashes + .iter() + .map(|x| x.hash.clone()) + .collect::>(), + ) + }); let files = database::models::Version::get_files_from_hash( - update_data.algorithm.clone(), + algorithm.clone(), &update_data .hashes .iter() @@ -445,7 +481,7 @@ pub async fn update_individual_files( for project in projects { for file in files.iter().filter(|x| x.project_id == project.inner.id) { - if let Some(hash) = file.hashes.get(&update_data.algorithm) { + if let Some(hash) = file.hashes.get(&algorithm) { if let Some(query_file) = update_data.hashes.iter().find(|x| &x.hash == hash) { let version = all_versions .iter() @@ -514,9 +550,12 @@ pub async fn delete_file( .1; let hash = info.into_inner().0.to_lowercase(); - + let algorithm = hash_query + .algorithm + .clone() + .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])); let file = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), + algorithm.clone(), hash, hash_query.version_id.map(|x| x.into()), &**pool, @@ -605,7 +644,7 @@ pub async fn delete_file( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -635,8 +674,12 @@ pub async fn download_version( .ok(); let hash = info.into_inner().0.to_lowercase(); + let algorithm = hash_query + .algorithm + .clone() + .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])); let file = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), + algorithm.clone(), hash, hash_query.version_id.map(|x| x.into()), &**pool, @@ -649,16 +692,16 @@ pub async fn download_version( if let Some(version) = version { if !is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } Ok(HttpResponse::TemporaryRedirect() .append_header(("Location", &*file.url)) .json(DownloadRedirect { url: file.url })) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/routes/v3/versions.rs b/src/routes/v3/versions.rs index 1e5a7f14..0b55c865 100644 --- a/src/routes/v3/versions.rs +++ b/src/routes/v3/versions.rs @@ -82,7 +82,7 @@ pub async fn version_project_get_helper( if let Some(project) = result { if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let versions = @@ -100,7 +100,7 @@ pub async fn version_project_get_helper( } } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } #[derive(Serialize, Deserialize)] @@ -174,7 +174,7 @@ pub async fn version_get_helper( } } - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } #[derive(Serialize, Deserialize, Validate, Default, Debug)] @@ -678,7 +678,7 @@ pub async fn version_edit_helper( )) } } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -723,7 +723,7 @@ pub async fn version_list( if let Some(project) = result { if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); + return Err(ApiError::NotFound); } let loader_field_filters = filters.loader_fields.as_ref().map(|x| { @@ -822,7 +822,7 @@ pub async fn version_list( Ok(HttpResponse::Ok().json(response)) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -924,7 +924,7 @@ pub async fn version_schedule( Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } @@ -1010,6 +1010,6 @@ pub async fn version_delete( if result.is_some() { Ok(HttpResponse::NoContent().body("")) } else { - Ok(HttpResponse::NotFound().body("")) + Err(ApiError::NotFound) } } diff --git a/src/search/indexing/local_import.rs b/src/search/indexing/local_import.rs index 0d84d932..d2a91503 100644 --- a/src/search/indexing/local_import.rs +++ b/src/search/indexing/local_import.rs @@ -62,7 +62,7 @@ pub async fn index_local( GROUP BY version_id ) - SELECT m.id id, v.id version_id, m.title title, m.description description, m.downloads downloads, m.follows follows, + SELECT m.id id, v.id version_id, m.name name, m.description description, m.downloads downloads, m.follows follows, m.icon_url icon_url, m.published published, m.approved approved, m.updated updated, m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color, u.username username, @@ -87,7 +87,7 @@ pub async fn index_local( LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id LEFT JOIN games g ON lptg.game_id = g.id LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id - INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE + INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.is_owner = TRUE AND tm.accepted = TRUE INNER JOIN users u ON tm.user_id = u.id LEFT OUTER JOIN version_fields_json vf ON v.id = vf.version_id LEFT OUTER JOIN loader_fields_json lf ON v.id = lf.version_id @@ -97,7 +97,6 @@ pub async fn index_local( ", &*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::>(), &*crate::models::projects::ProjectStatus::iterator().filter(|x| x.is_searchable()).map(|x| x.to_string()).collect::>(), - crate::models::teams::OWNER_ROLE, ) .fetch_many(&pool) .try_filter_map(|e| { @@ -147,7 +146,7 @@ pub async fn index_local( UploadSearchProject { version_id: version_id.to_string(), project_id: project_id.to_string(), - title: m.title, + name: m.name, description: m.description, categories, follows: m.follows, diff --git a/src/search/indexing/mod.rs b/src/search/indexing/mod.rs index 675b499b..2f367fdc 100644 --- a/src/search/indexing/mod.rs +++ b/src/search/indexing/mod.rs @@ -185,7 +185,7 @@ const DEFAULT_DISPLAYED_ATTRIBUTES: &[&str] = &[ "project_types", "slug", "author", - "title", + "name", "description", "categories", "display_categories", @@ -201,7 +201,7 @@ const DEFAULT_DISPLAYED_ATTRIBUTES: &[&str] = &[ "color", ]; -const DEFAULT_SEARCHABLE_ATTRIBUTES: &[&str] = &["title", "description", "author", "slug"]; +const DEFAULT_SEARCHABLE_ATTRIBUTES: &[&str] = &["name", "description", "author", "slug"]; const DEFAULT_ATTRIBUTES_FOR_FACETING: &[&str] = &[ "categories", @@ -210,7 +210,7 @@ const DEFAULT_ATTRIBUTES_FOR_FACETING: &[&str] = &[ "downloads", "follows", "author", - "title", + "name", "date_created", "created_timestamp", "date_modified", diff --git a/src/search/mod.rs b/src/search/mod.rs index 6f9a655d..09c6767c 100644 --- a/src/search/mod.rs +++ b/src/search/mod.rs @@ -79,7 +79,7 @@ pub struct UploadSearchProject { pub project_types: Vec, pub slug: Option, pub author: String, - pub title: String, + pub name: String, pub description: String, pub categories: Vec, pub display_categories: Vec, @@ -119,7 +119,7 @@ pub struct ResultSearchProject { pub project_types: Vec, pub slug: Option, pub author: String, - pub title: String, + pub name: String, pub description: String, pub categories: Vec, pub display_categories: Vec, diff --git a/src/util/webhook.rs b/src/util/webhook.rs index ff3cd9f7..06370933 100644 --- a/src/util/webhook.rs +++ b/src/util/webhook.rs @@ -85,7 +85,7 @@ pub async fn send_discord_webhook( let row = sqlx::query!( " - SELECT m.id id, m.title title, m.description description, m.color color, + SELECT m.id id, m.name name, m.description description, m.color color, m.icon_url icon_url, m.slug slug, u.username username, u.avatar_url avatar_url, ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories, @@ -136,7 +136,7 @@ pub async fn send_discord_webhook( LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id LEFT JOIN games g ON lptg.game_id = g.id LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id - INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE + INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.is_owner = TRUE AND tm.accepted = TRUE INNER JOIN users u ON tm.user_id = u.id LEFT OUTER JOIN version_fields vf on v.id = vf.version_id LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id @@ -147,7 +147,6 @@ pub async fn send_discord_webhook( ", project_id.0 as i64, &*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::>(), - crate::models::teams::OWNER_ROLE, ) .fetch_optional(pool) .await?; @@ -279,7 +278,7 @@ pub async fn send_discord_webhook( project_type, project.slug.unwrap_or_else(|| project_id.to_string()) ), - title: project.title, + title: project.name, // Do not change DiscordEmbed description: project.description, timestamp: Utc::now(), color: project.color.unwrap_or(0x1bd96a) as u32, diff --git a/tests/common/api_common/generic.rs b/tests/common/api_common/generic.rs index 76b84650..1e991898 100644 --- a/tests/common/api_common/generic.rs +++ b/tests/common/api_common/generic.rs @@ -11,6 +11,7 @@ use crate::common::{api_v2::ApiV2, api_v3::ApiV3, dummy_data::TestFile}; use super::{ models::{CommonImageData, CommonProject, CommonVersion}, + request_data::ProjectCreationRequestData, Api, ApiProject, ApiTags, ApiTeams, ApiVersion, }; @@ -65,6 +66,8 @@ delegate_api_variant!( #[async_trait(?Send)] impl ApiProject for GenericApi { [add_public_project, (CommonProject, Vec), slug: &str, version_jar: Option, modify_json: Option, pat: &str], + [get_public_project_creation_data_json, serde_json::Value, slug: &str, version_jar: Option<&TestFile>], + [create_project, ServiceResponse, creation_data: ProjectCreationRequestData, pat: &str], [remove_project, ServiceResponse, project_slug_or_id: &str, pat: &str], [get_project, ServiceResponse, id_or_slug: &str, pat: &str], [get_project_deserialized_common, CommonProject, id_or_slug: &str, pat: &str], diff --git a/tests/common/api_common/mod.rs b/tests/common/api_common/mod.rs index 48fa9d02..9ce71c01 100644 --- a/tests/common/api_common/mod.rs +++ b/tests/common/api_common/mod.rs @@ -4,6 +4,7 @@ use self::models::{ CommonCategoryData, CommonImageData, CommonLoaderData, CommonNotification, CommonProject, CommonTeamMember, CommonVersion, }; +use self::request_data::ProjectCreationRequestData; use actix_web::dev::ServiceResponse; use async_trait::async_trait; use labrinth::{ @@ -18,6 +19,7 @@ use super::dummy_data::TestFile; pub mod generic; pub mod models; +pub mod request_data; #[async_trait(?Send)] pub trait ApiBuildable: Api { async fn build(labrinth_config: LabrinthConfig) -> Self; @@ -38,6 +40,17 @@ pub trait ApiProject { modify_json: Option, pat: &str, ) -> (CommonProject, Vec); + async fn create_project( + &self, + creation_data: ProjectCreationRequestData, + pat: &str, + ) -> ServiceResponse; + async fn get_public_project_creation_data_json( + &self, + slug: &str, + version_jar: Option<&TestFile>, + ) -> serde_json::Value; + async fn remove_project(&self, id_or_slug: &str, pat: &str) -> ServiceResponse; async fn get_project(&self, id_or_slug: &str, pat: &str) -> ServiceResponse; async fn get_project_deserialized_common(&self, id_or_slug: &str, pat: &str) -> CommonProject; diff --git a/tests/common/api_common/models.rs b/tests/common/api_common/models.rs index a186b76d..cbf7ea96 100644 --- a/tests/common/api_common/models.rs +++ b/tests/common/api_common/models.rs @@ -1,12 +1,12 @@ use chrono::{DateTime, Utc}; use labrinth::models::{ - notifications::{NotificationAction, NotificationBody, NotificationId}, + notifications::NotificationId, organizations::OrganizationId, projects::{ Dependency, GalleryItem, License, ModeratorMessage, MonetizationStatus, ProjectId, ProjectStatus, VersionFile, VersionId, VersionStatus, VersionType, }, - teams::{OrganizationPermissions, ProjectPermissions, TeamId}, + teams::{ProjectPermissions, TeamId}, threads::ThreadId, users::{User, UserId}, }; @@ -31,12 +31,7 @@ pub struct CommonProject { // For any tests that require those fields, we make a separate test with separate API functions tht do not use Common models. pub id: ProjectId, pub slug: Option, - pub team: TeamId, pub organization: Option, - pub title: String, - pub description: String, - pub body: String, - pub body_url: Option, pub published: DateTime, pub updated: DateTime, pub approved: Option>, @@ -67,7 +62,6 @@ pub struct CommonVersion { pub name: String, pub version_number: String, pub changelog: String, - pub changelog_url: Option, pub date_published: DateTime, pub downloads: u32, pub version_type: VersionType, @@ -109,9 +103,7 @@ pub struct CommonTeamMember { pub user: User, pub role: String, - // TODO: Should these be removed from the Common? pub permissions: Option, - pub organization_permissions: Option, pub accepted: bool, pub payouts_split: Option, @@ -124,13 +116,13 @@ pub struct CommonNotification { pub user_id: UserId, pub read: bool, pub created: DateTime, - pub body: NotificationBody, - - // DEPRECATED: use body field instead - #[serde(rename = "type")] - pub type_: Option, - pub title: String, + // Body is absent as one of the variants differs pub text: String, pub link: String, - pub actions: Vec, + pub actions: Vec, +} + +#[derive(Deserialize)] +pub struct CommonNotificationAction { + pub action_route: (String, String), } diff --git a/tests/common/api_common/request_data.rs b/tests/common/api_common/request_data.rs new file mode 100644 index 00000000..3bfc886a --- /dev/null +++ b/tests/common/api_common/request_data.rs @@ -0,0 +1,24 @@ +// The structures for project/version creation. +// These are created differently, but are essentially the same between versions. + +use labrinth::util::actix::MultipartSegment; + +use crate::common::dummy_data::TestFile; + +pub struct ProjectCreationRequestData { + pub slug: String, + pub jar: Option, + pub segment_data: Vec, +} + +pub struct VersionCreationRequestData { + pub version: String, + pub jar: Option, + pub segment_data: Vec, +} + +pub struct ImageData { + pub filename: String, + pub extension: String, + pub icon: Vec, +} diff --git a/tests/common/api_v2/project.rs b/tests/common/api_v2/project.rs index 9e6e1702..990e5f29 100644 --- a/tests/common/api_v2/project.rs +++ b/tests/common/api_v2/project.rs @@ -1,6 +1,7 @@ use crate::common::{ api_common::{ models::{CommonImageData, CommonProject, CommonVersion}, + request_data::ProjectCreationRequestData, Api, ApiProject, }, dummy_data::TestFile, @@ -20,7 +21,10 @@ use serde_json::json; use crate::common::{asserts::assert_status, database::MOD_USER_PAT}; -use super::{request_data::get_public_project_creation_data, ApiV2}; +use super::{ + request_data::{self, get_public_project_creation_data}, + ApiV2, +}; impl ApiV2 { pub async fn get_project_deserialized(&self, id_or_slug: &str, pat: &str) -> LegacyProject { @@ -80,17 +84,13 @@ impl ApiProject for ApiV2 { let creation_data = get_public_project_creation_data(slug, version_jar, modify_json); // Add a project. - let req = TestRequest::post() - .uri("/v2/project") - .append_header(("Authorization", pat)) - .set_multipart(creation_data.segment_data) - .to_request(); - let resp = self.call(req).await; + let slug = creation_data.slug.clone(); + let resp = self.create_project(creation_data, pat).await; assert_status(&resp, StatusCode::OK); // Approve as a moderator. let req = TestRequest::patch() - .uri(&format!("/v2/project/{}", creation_data.slug)) + .uri(&format!("/v2/project/{}", slug)) .append_header(("Authorization", MOD_USER_PAT)) .set_json(json!( { @@ -101,13 +101,11 @@ impl ApiProject for ApiV2 { let resp = self.call(req).await; assert_status(&resp, StatusCode::NO_CONTENT); - let project = self - .get_project_deserialized_common(&creation_data.slug, pat) - .await; + let project = self.get_project_deserialized_common(&slug, pat).await; // Get project's versions let req = TestRequest::get() - .uri(&format!("/v2/project/{}/version", creation_data.slug)) + .uri(&format!("/v2/project/{}/version", slug)) .append_header(("Authorization", pat)) .to_request(); let resp = self.call(req).await; @@ -116,6 +114,27 @@ impl ApiProject for ApiV2 { (project, versions) } + async fn get_public_project_creation_data_json( + &self, + slug: &str, + version_jar: Option<&TestFile>, + ) -> serde_json::Value { + request_data::get_public_project_creation_data_json(slug, version_jar) + } + + async fn create_project( + &self, + creation_data: ProjectCreationRequestData, + pat: &str, + ) -> ServiceResponse { + let req = TestRequest::post() + .uri("/v2/project") + .append_header(("Authorization", pat)) + .set_multipart(creation_data.segment_data) + .to_request(); + self.call(req).await + } + async fn remove_project(&self, project_slug_or_id: &str, pat: &str) -> ServiceResponse { let req = test::TestRequest::delete() .uri(&format!("/v2/project/{project_slug_or_id}")) @@ -137,7 +156,11 @@ impl ApiProject for ApiV2 { async fn get_project_deserialized_common(&self, id_or_slug: &str, pat: &str) -> CommonProject { let resp = self.get_project(id_or_slug, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let project: LegacyProject = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(project).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_user_projects(&self, user_id_or_username: &str, pat: &str) -> ServiceResponse { @@ -155,7 +178,11 @@ impl ApiProject for ApiV2 { ) -> Vec { let resp = self.get_user_projects(user_id_or_username, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let projects: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(projects).unwrap(); + serde_json::from_value(value).unwrap() } async fn edit_project( diff --git a/tests/common/api_v2/request_data.rs b/tests/common/api_v2/request_data.rs index e0175d90..90bac7e2 100644 --- a/tests/common/api_v2/request_data.rs +++ b/tests/common/api_v2/request_data.rs @@ -1,30 +1,15 @@ #![allow(dead_code)] use serde_json::json; -use crate::common::dummy_data::{DummyImage, TestFile}; +use crate::common::{ + api_common::request_data::{ImageData, ProjectCreationRequestData, VersionCreationRequestData}, + dummy_data::{DummyImage, TestFile}, +}; use labrinth::{ models::projects::ProjectId, util::actix::{MultipartSegment, MultipartSegmentData}, }; -pub struct ProjectCreationRequestData { - pub slug: String, - pub jar: Option, - pub segment_data: Vec, -} - -pub struct VersionCreationRequestData { - pub version: String, - pub jar: Option, - pub segment_data: Vec, -} - -pub struct ImageData { - pub filename: String, - pub extension: String, - pub icon: Vec, -} - pub fn get_public_project_creation_data( slug: &str, version_jar: Option, diff --git a/tests/common/api_v2/tags.rs b/tests/common/api_v2/tags.rs index 6b41930b..c67e9594 100644 --- a/tests/common/api_v2/tags.rs +++ b/tests/common/api_v2/tags.rs @@ -72,7 +72,11 @@ impl ApiTags for ApiV2 { async fn get_loaders_deserialized_common(&self) -> Vec { let resp = self.get_loaders().await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_categories(&self) -> ServiceResponse { @@ -86,6 +90,10 @@ impl ApiTags for ApiV2 { async fn get_categories_deserialized_common(&self) -> Vec { let resp = self.get_categories().await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } } diff --git a/tests/common/api_v2/team.rs b/tests/common/api_v2/team.rs index 0b16f510..de35f206 100644 --- a/tests/common/api_v2/team.rs +++ b/tests/common/api_v2/team.rs @@ -1,7 +1,10 @@ use actix_http::StatusCode; use actix_web::{dev::ServiceResponse, test}; use async_trait::async_trait; -use labrinth::models::teams::{OrganizationPermissions, ProjectPermissions}; +use labrinth::models::{ + teams::{OrganizationPermissions, ProjectPermissions}, + v2::{notifications::LegacyNotification, teams::LegacyTeamMember}, +}; use serde_json::json; use crate::common::{ @@ -14,6 +17,38 @@ use crate::common::{ use super::ApiV2; +impl ApiV2 { + pub async fn get_organization_members_deserialized( + &self, + id_or_title: &str, + pat: &str, + ) -> Vec { + let resp = self.get_organization_members(id_or_title, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_team_members_deserialized( + &self, + team_id: &str, + pat: &str, + ) -> Vec { + let resp = self.get_team_members(team_id, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_user_notifications_deserialized( + &self, + user_id: &str, + pat: &str, + ) -> Vec { + let resp = self.get_user_notifications(user_id, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } +} + #[async_trait(?Send)] impl ApiTeams for ApiV2 { async fn get_team_members(&self, id_or_title: &str, pat: &str) -> ServiceResponse { @@ -31,6 +66,9 @@ impl ApiTeams for ApiV2 { ) -> Vec { let resp = self.get_team_members(id_or_title, pat).await; assert_eq!(resp.status(), 200); + // TODO: Note, this does NOT deserialize to any other struct first, as currently TeamMember is the same in v2 and v3. + // CommonTeamMember = TeamMember (v3) + // This may yet change, so we should keep common struct. test::read_body_json(resp).await } @@ -49,6 +87,9 @@ impl ApiTeams for ApiV2 { ) -> Vec { let resp = self.get_project_members(id_or_title, pat).await; assert_eq!(resp.status(), 200); + // TODO: Note, this does NOT deserialize to any other struct first, as currently TeamMember is the same in v2 and v3. + // CommonTeamMember = TeamMember (v3) + // This may yet change, so we should keep common struct. test::read_body_json(resp).await } @@ -67,6 +108,9 @@ impl ApiTeams for ApiV2 { ) -> Vec { let resp = self.get_organization_members(id_or_title, pat).await; assert_eq!(resp.status(), 200); + // TODO: Note, this does NOT deserialize to any other struct first, as currently TeamMember is the same in v2 and v3. + // CommonTeamMember = TeamMember (v3) + // This may yet change, so we should keep common struct. test::read_body_json(resp).await } @@ -132,7 +176,11 @@ impl ApiTeams for ApiV2 { ) -> Vec { let resp = self.get_user_notifications(user_id, pat).await; assert_status(&resp, StatusCode::OK); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn mark_notification_read(&self, notification_id: &str, pat: &str) -> ServiceResponse { diff --git a/tests/common/api_v2/version.rs b/tests/common/api_v2/version.rs index 5e94c8e4..cbfcde0b 100644 --- a/tests/common/api_v2/version.rs +++ b/tests/common/api_v2/version.rs @@ -133,7 +133,11 @@ impl ApiVersion for ApiV2 { ) .await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: LegacyVersion = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_version(&self, id: &str, pat: &str) -> ServiceResponse { @@ -147,7 +151,11 @@ impl ApiVersion for ApiV2 { async fn get_version_deserialized_common(&self, id: &str, pat: &str) -> CommonVersion { let resp = self.get_version(id, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: LegacyVersion = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn edit_version( @@ -186,7 +194,11 @@ impl ApiVersion for ApiV2 { ) -> CommonVersion { let resp = self.get_version_from_hash(hash, algorithm, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: LegacyVersion = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_versions_from_hashes( @@ -214,7 +226,11 @@ impl ApiVersion for ApiV2 { ) -> HashMap { let resp = self.get_versions_from_hashes(hashes, algorithm, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: HashMap = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_update_from_hash( @@ -253,7 +269,11 @@ impl ApiVersion for ApiV2 { .get_update_from_hash(hash, algorithm, loaders, game_versions, version_types, pat) .await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: LegacyVersion = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn update_files( @@ -299,7 +319,11 @@ impl ApiVersion for ApiV2 { ) .await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: HashMap = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } // TODO: Not all fields are tested currently in the V2 tests, only the v2-v3 relevant ones are @@ -378,7 +402,11 @@ impl ApiVersion for ApiV2 { ) .await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn edit_version_ordering( @@ -415,6 +443,10 @@ impl ApiVersion for ApiV2 { ) -> Vec { let resp = self.get_versions(version_ids, pat).await; assert_status(&resp, StatusCode::OK); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } } diff --git a/tests/common/api_v3/organization.rs b/tests/common/api_v3/organization.rs index 4e2eb0a6..268c9a1c 100644 --- a/tests/common/api_v3/organization.rs +++ b/tests/common/api_v3/organization.rs @@ -6,9 +6,9 @@ use bytes::Bytes; use labrinth::models::{organizations::Organization, v3::projects::Project}; use serde_json::json; -use crate::common::api_common::Api; +use crate::common::api_common::{request_data::ImageData, Api}; -use super::{request_data::ImageData, ApiV3}; +use super::ApiV3; impl ApiV3 { pub async fn create_organization( @@ -21,7 +21,7 @@ impl ApiV3 { .uri("/v3/organization") .append_header(("Authorization", pat)) .set_json(json!({ - "title": organization_title, + "name": organization_title, "description": description, })) .to_request(); diff --git a/tests/common/api_v3/project.rs b/tests/common/api_v3/project.rs index d2bd49ea..048b95a1 100644 --- a/tests/common/api_v3/project.rs +++ b/tests/common/api_v3/project.rs @@ -15,6 +15,7 @@ use serde_json::json; use crate::common::{ api_common::{ models::{CommonImageData, CommonProject, CommonVersion}, + request_data::ProjectCreationRequestData, Api, ApiProject, }, asserts::assert_status, @@ -22,7 +23,10 @@ use crate::common::{ dummy_data::TestFile, }; -use super::{request_data::get_public_project_creation_data, ApiV3}; +use super::{ + request_data::{self, get_public_project_creation_data}, + ApiV3, +}; #[async_trait(?Send)] impl ApiProject for ApiV3 { @@ -36,17 +40,13 @@ impl ApiProject for ApiV3 { let creation_data = get_public_project_creation_data(slug, version_jar, modify_json); // Add a project. - let req = TestRequest::post() - .uri("/v3/project") - .append_header(("Authorization", pat)) - .set_multipart(creation_data.segment_data) - .to_request(); - let resp = self.call(req).await; + let slug = creation_data.slug.clone(); + let resp = self.create_project(creation_data, pat).await; assert_status(&resp, StatusCode::OK); // Approve as a moderator. let req = TestRequest::patch() - .uri(&format!("/v3/project/{}", creation_data.slug)) + .uri(&format!("/v3/project/{}", slug)) .append_header(("Authorization", MOD_USER_PAT)) .set_json(json!( { @@ -57,12 +57,12 @@ impl ApiProject for ApiV3 { let resp = self.call(req).await; assert_status(&resp, StatusCode::NO_CONTENT); - let project = self.get_project(&creation_data.slug, pat).await; + let project = self.get_project(&slug, pat).await; let project = test::read_body_json(project).await; // Get project's versions let req = TestRequest::get() - .uri(&format!("/v3/project/{}/version", creation_data.slug)) + .uri(&format!("/v3/project/{}/version", slug)) .append_header(("Authorization", pat)) .to_request(); let resp = self.call(req).await; @@ -71,6 +71,27 @@ impl ApiProject for ApiV3 { (project, versions) } + async fn get_public_project_creation_data_json( + &self, + slug: &str, + version_jar: Option<&TestFile>, + ) -> serde_json::Value { + request_data::get_public_project_creation_data_json(slug, version_jar) + } + + async fn create_project( + &self, + creation_data: ProjectCreationRequestData, + pat: &str, + ) -> ServiceResponse { + let req = TestRequest::post() + .uri("/v3/project") + .append_header(("Authorization", pat)) + .set_multipart(creation_data.segment_data) + .to_request(); + self.call(req).await + } + async fn remove_project(&self, project_slug_or_id: &str, pat: &str) -> ServiceResponse { let req = test::TestRequest::delete() .uri(&format!("/v3/project/{project_slug_or_id}")) @@ -92,7 +113,11 @@ impl ApiProject for ApiV3 { async fn get_project_deserialized_common(&self, id_or_slug: &str, pat: &str) -> CommonProject { let resp = self.get_project(id_or_slug, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let project: Project = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(project).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_user_projects(&self, user_id_or_username: &str, pat: &str) -> ServiceResponse { @@ -110,7 +135,11 @@ impl ApiProject for ApiV3 { ) -> Vec { let resp = self.get_user_projects(user_id_or_username, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let projects: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(projects).unwrap(); + serde_json::from_value(value).unwrap() } async fn edit_project( diff --git a/tests/common/api_v3/request_data.rs b/tests/common/api_v3/request_data.rs index fdbc5500..4acc9bfe 100644 --- a/tests/common/api_v3/request_data.rs +++ b/tests/common/api_v3/request_data.rs @@ -1,30 +1,15 @@ #![allow(dead_code)] use serde_json::json; -use crate::common::dummy_data::{DummyImage, TestFile}; +use crate::common::{ + api_common::request_data::{ImageData, ProjectCreationRequestData, VersionCreationRequestData}, + dummy_data::{DummyImage, TestFile}, +}; use labrinth::{ models::projects::ProjectId, util::actix::{MultipartSegment, MultipartSegmentData}, }; -pub struct ProjectCreationRequestData { - pub slug: String, - pub jar: Option, - pub segment_data: Vec, -} - -pub struct VersionCreationRequestData { - pub version: String, - pub jar: Option, - pub segment_data: Vec, -} - -pub struct ImageData { - pub filename: String, - pub extension: String, - pub icon: Vec, -} - pub fn get_public_project_creation_data( slug: &str, version_jar: Option, @@ -110,10 +95,10 @@ pub fn get_public_project_creation_data_json( let is_draft = version_jar.is_none(); json!( { - "title": format!("Test Project {slug}"), + "name": format!("Test Project {slug}"), "slug": slug, - "description": "A dummy project for testing with.", - "body": "This project is approved, and versions are listed.", + "summary": "A dummy project for testing with.", + "description": "This project is approved, and versions are listed.", "initial_versions": initial_versions, "is_draft": is_draft, "categories": [], diff --git a/tests/common/api_v3/tags.rs b/tests/common/api_v3/tags.rs index 54d436cc..f513e8ea 100644 --- a/tests/common/api_v3/tags.rs +++ b/tests/common/api_v3/tags.rs @@ -3,8 +3,10 @@ use actix_web::{ test::{self, TestRequest}, }; use async_trait::async_trait; -use labrinth::database::models::loader_fields::LoaderFieldEnumValue; use labrinth::routes::v3::tags::{GameData, LoaderData}; +use labrinth::{ + database::models::loader_fields::LoaderFieldEnumValue, routes::v3::tags::CategoryData, +}; use crate::common::{ api_common::{ @@ -29,7 +31,11 @@ impl ApiTags for ApiV3 { async fn get_loaders_deserialized_common(&self) -> Vec { let resp = self.get_loaders().await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_categories(&self) -> ServiceResponse { @@ -43,7 +49,11 @@ impl ApiTags for ApiV3 { async fn get_categories_deserialized_common(&self) -> Vec { let resp = self.get_categories().await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } } diff --git a/tests/common/api_v3/team.rs b/tests/common/api_v3/team.rs index f32772aa..bb29e932 100644 --- a/tests/common/api_v3/team.rs +++ b/tests/common/api_v3/team.rs @@ -1,7 +1,10 @@ use actix_http::StatusCode; use actix_web::{dev::ServiceResponse, test}; use async_trait::async_trait; -use labrinth::models::teams::{OrganizationPermissions, ProjectPermissions}; +use labrinth::models::{ + notifications::Notification, + teams::{OrganizationPermissions, ProjectPermissions, TeamMember}, +}; use serde_json::json; use crate::common::{ @@ -14,6 +17,24 @@ use crate::common::{ use super::ApiV3; +impl ApiV3 { + pub async fn get_organization_members_deserialized( + &self, + id_or_title: &str, + pat: &str, + ) -> Vec { + let resp = self.get_organization_members(id_or_title, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_team_members_deserialized(&self, team_id: &str, pat: &str) -> Vec { + let resp = self.get_team_members(team_id, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } +} + #[async_trait(?Send)] impl ApiTeams for ApiV3 { async fn get_team_members(&self, id_or_title: &str, pat: &str) -> ServiceResponse { @@ -31,7 +52,11 @@ impl ApiTeams for ApiV3 { ) -> Vec { let resp = self.get_team_members(id_or_title, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_project_members(&self, id_or_title: &str, pat: &str) -> ServiceResponse { @@ -49,7 +74,11 @@ impl ApiTeams for ApiV3 { ) -> Vec { let resp = self.get_project_members(id_or_title, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_organization_members(&self, id_or_title: &str, pat: &str) -> ServiceResponse { @@ -67,7 +96,11 @@ impl ApiTeams for ApiV3 { ) -> Vec { let resp = self.get_organization_members(id_or_title, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn join_team(&self, team_id: &str, pat: &str) -> ServiceResponse { @@ -132,7 +165,11 @@ impl ApiTeams for ApiV3 { ) -> Vec { let resp = self.get_user_notifications(user_id, pat).await; assert_status(&resp, StatusCode::OK); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn mark_notification_read(&self, notification_id: &str, pat: &str) -> ServiceResponse { diff --git a/tests/common/api_v3/version.rs b/tests/common/api_v3/version.rs index 5d2463d3..43226684 100644 --- a/tests/common/api_v3/version.rs +++ b/tests/common/api_v3/version.rs @@ -138,7 +138,11 @@ impl ApiVersion for ApiV3 { ) .await; assert_status(&resp, StatusCode::OK); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Version = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_version(&self, id: &str, pat: &str) -> ServiceResponse { @@ -152,7 +156,11 @@ impl ApiVersion for ApiV3 { async fn get_version_deserialized_common(&self, id: &str, pat: &str) -> CommonVersion { let resp = self.get_version(id, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Version = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn edit_version( @@ -191,7 +199,11 @@ impl ApiVersion for ApiV3 { ) -> CommonVersion { let resp = self.get_version_from_hash(hash, algorithm, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Version = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_versions_from_hashes( @@ -219,7 +231,11 @@ impl ApiVersion for ApiV3 { ) -> HashMap { let resp = self.get_versions_from_hashes(hashes, algorithm, pat).await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: HashMap = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn get_update_from_hash( @@ -267,7 +283,11 @@ impl ApiVersion for ApiV3 { .get_update_from_hash(hash, algorithm, loaders, game_versions, version_types, pat) .await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Version = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } async fn update_files( @@ -323,7 +343,11 @@ impl ApiVersion for ApiV3 { ) .await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: HashMap = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } // TODO: Not all fields are tested currently in the v3 tests, only the v2-v3 relevant ones are @@ -402,7 +426,11 @@ impl ApiVersion for ApiV3 { ) .await; assert_eq!(resp.status(), 200); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } // TODO: remove redundancy in these functions @@ -440,6 +468,10 @@ impl ApiVersion for ApiV3 { ) -> Vec { let resp = self.get_versions(version_ids, pat).await; assert_status(&resp, StatusCode::OK); - test::read_body_json(resp).await + // First, deserialize to the non-common format (to test the response is valid for this api version) + let v: Vec = test::read_body_json(resp).await; + // Then, deserialize to the common format + let value = serde_json::to_value(v).unwrap(); + serde_json::from_value(value).unwrap() } } diff --git a/tests/common/dummy_data.rs b/tests/common/dummy_data.rs index ccb912a1..44d9e238 100644 --- a/tests/common/dummy_data.rs +++ b/tests/common/dummy_data.rs @@ -4,7 +4,10 @@ use std::io::{Cursor, Write}; use actix_http::StatusCode; use actix_web::test::{self, TestRequest}; use labrinth::models::{ - oauth_clients::OAuthClient, organizations::Organization, pats::Scopes, projects::ProjectId, + oauth_clients::OAuthClient, + organizations::Organization, + pats::Scopes, + projects::{Project, ProjectId, Version}, }; use serde_json::json; use sqlx::Executor; @@ -13,14 +16,7 @@ use zip::{write::FileOptions, CompressionMethod, ZipWriter}; use crate::common::{api_common::Api, database::USER_USER_PAT}; use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData}; -use super::{ - api_common::{ - models::{CommonProject, CommonVersion}, - ApiProject, - }, - api_v3::ApiV3, - database::TemporaryDatabase, -}; +use super::{api_common::ApiProject, api_v3::ApiV3, database::TemporaryDatabase}; use super::{asserts::assert_status, database::USER_USER_ID, get_json_val_str}; @@ -174,16 +170,16 @@ pub struct DummyData { impl DummyData { pub fn new( - project_alpha: CommonProject, - project_alpha_version: CommonVersion, - project_beta: CommonProject, - project_beta_version: CommonVersion, + project_alpha: Project, + project_alpha_version: Version, + project_beta: Project, + project_beta_version: Version, organization_zeta: Organization, oauth_client_alpha: OAuthClient, ) -> Self { DummyData { project_alpha: DummyProjectAlpha { - team_id: project_alpha.team.to_string(), + team_id: project_alpha.team_id.to_string(), project_id: project_alpha.id.to_string(), project_slug: project_alpha.slug.unwrap(), project_id_parsed: project_alpha.id, @@ -193,7 +189,7 @@ impl DummyData { }, project_beta: DummyProjectBeta { - team_id: project_beta.team.to_string(), + team_id: project_beta.team_id.to_string(), project_id: project_beta.id.to_string(), project_slug: project_beta.slug.unwrap(), project_id_parsed: project_beta.id, @@ -205,7 +201,7 @@ impl DummyData { organization_zeta: DummyOrganizationZeta { organization_id: organization_zeta.id.to_string(), team_id: organization_zeta.team_id.to_string(), - organization_title: organization_zeta.title, + organization_name: organization_zeta.name, }, oauth_client_alpha: DummyOAuthClientAlpha { @@ -247,7 +243,7 @@ pub struct DummyProjectBeta { #[derive(Clone)] pub struct DummyOrganizationZeta { pub organization_id: String, - pub organization_title: String, + pub organization_name: String, pub team_id: String, } @@ -311,7 +307,7 @@ pub async fn get_dummy_data(api: &ApiV3) -> DummyData { ) } -pub async fn add_project_alpha(api: &ApiV3) -> (CommonProject, CommonVersion) { +pub async fn add_project_alpha(api: &ApiV3) -> (Project, Version) { let (project, versions) = api .add_public_project( "alpha", @@ -320,20 +316,29 @@ pub async fn add_project_alpha(api: &ApiV3) -> (CommonProject, CommonVersion) { USER_USER_PAT, ) .await; - (project, versions.into_iter().next().unwrap()) + let alpha_project = api + .get_project_deserialized(project.id.to_string().as_str(), USER_USER_PAT) + .await; + let alpha_version = api + .get_version_deserialized( + &versions.into_iter().next().unwrap().id.to_string(), + USER_USER_PAT, + ) + .await; + (alpha_project, alpha_version) } -pub async fn add_project_beta(api: &ApiV3) -> (CommonProject, CommonVersion) { +pub async fn add_project_beta(api: &ApiV3) -> (Project, Version) { // Adds dummy data to the database with sqlx (projects, versions, threads) // Generate test project data. let jar = TestFile::DummyProjectBeta; // TODO: this shouldnt be hardcoded (nor should other similar ones be) let json_data = json!( { - "title": "Test Project Beta", + "name": "Test Project Beta", "slug": "beta", - "description": "A dummy project for testing with.", - "body": "This project is not-yet-approved, and versions are draft.", + "summary": "A dummy project for testing with.", + "description": "This project is not-yet-approved, and versions are draft.", "initial_versions": [{ "file_parts": [jar.filename()], "version_number": "1.2.3", @@ -390,7 +395,7 @@ pub async fn add_organization_zeta(api: &ApiV3) -> Organization { .uri("/v3/organization") .append_header(("Authorization", USER_USER_PAT)) .set_json(json!({ - "title": "zeta", + "name": "zeta", "description": "A dummy organization for testing with." })) .to_request(); @@ -401,14 +406,14 @@ pub async fn add_organization_zeta(api: &ApiV3) -> Organization { get_organization_zeta(api).await } -pub async fn get_project_alpha(api: &ApiV3) -> (CommonProject, CommonVersion) { +pub async fn get_project_alpha(api: &ApiV3) -> (Project, Version) { // Get project let req = TestRequest::get() .uri("/v3/project/alpha") .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = api.call(req).await; - let project: CommonProject = test::read_body_json(resp).await; + let project: Project = test::read_body_json(resp).await; // Get project's versions let req = TestRequest::get() @@ -416,13 +421,13 @@ pub async fn get_project_alpha(api: &ApiV3) -> (CommonProject, CommonVersion) { .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = api.call(req).await; - let versions: Vec = test::read_body_json(resp).await; + let versions: Vec = test::read_body_json(resp).await; let version = versions.into_iter().next().unwrap(); (project, version) } -pub async fn get_project_beta(api: &ApiV3) -> (CommonProject, CommonVersion) { +pub async fn get_project_beta(api: &ApiV3) -> (Project, Version) { // Get project let req = TestRequest::get() .uri("/v3/project/beta") @@ -431,7 +436,7 @@ pub async fn get_project_beta(api: &ApiV3) -> (CommonProject, CommonVersion) { let resp = api.call(req).await; assert_status(&resp, StatusCode::OK); let project: serde_json::Value = test::read_body_json(resp).await; - let project: CommonProject = serde_json::from_value(project).unwrap(); + let project: Project = serde_json::from_value(project).unwrap(); // Get project's versions let req = TestRequest::get() @@ -440,7 +445,7 @@ pub async fn get_project_beta(api: &ApiV3) -> (CommonProject, CommonVersion) { .to_request(); let resp = api.call(req).await; assert_status(&resp, StatusCode::OK); - let versions: Vec = test::read_body_json(resp).await; + let versions: Vec = test::read_body_json(resp).await; let version = versions.into_iter().next().unwrap(); (project, version) diff --git a/tests/common/permissions.rs b/tests/common/permissions.rs index 7e27def7..4b55b8b9 100644 --- a/tests/common/permissions.rs +++ b/tests/common/permissions.rs @@ -1018,7 +1018,11 @@ async fn create_dummy_project(setup_api: &ApiV3) -> (String, String) { .add_public_project(&slug, None, None, ADMIN_USER_PAT) .await; let project_id = project.id.to_string(); - let team_id = project.team.to_string(); + + let project = setup_api + .get_project_deserialized(&project_id, ADMIN_USER_PAT) + .await; + let team_id = project.team_id.to_string(); (project_id, team_id) } diff --git a/tests/error.rs b/tests/error.rs new file mode 100644 index 00000000..21d422b5 --- /dev/null +++ b/tests/error.rs @@ -0,0 +1,23 @@ +use actix_web::test; +use bytes::Bytes; +use common::api_common::ApiProject; + +use common::api_v3::ApiV3; +use common::database::USER_USER_PAT; +use common::environment::{with_test_environment, TestEnvironment}; + +mod common; + +#[actix_rt::test] +pub async fn error_404_body() { + with_test_environment(None, |test_env: TestEnvironment| async move { + // 3 errors should have 404 as non-blank body, for missing resources + let api = &test_env.api; + let resp = api.get_project("does-not-exist", USER_USER_PAT).await; + assert_eq!(resp.status(), 404); + let body = test::read_body(resp).await; + let empty_bytes = Bytes::from_static(b""); + assert_ne!(body, empty_bytes); + }) + .await; +} diff --git a/tests/organizations.rs b/tests/organizations.rs index e12c1ea4..7597ef8d 100644 --- a/tests/organizations.rs +++ b/tests/organizations.rs @@ -65,13 +65,13 @@ async fn create_organization() { let theta = api .get_organization_deserialized("theta", USER_USER_PAT) .await; - assert_eq!(theta.title, "theta"); + assert_eq!(theta.name, "theta"); assert_eq!(theta.description, "not url safe%&^!#$##!@#$%^&"); assert_eq!(resp.status(), 200); // Get created team let members = api - .get_organization_members_deserialized_common("theta", USER_USER_PAT) + .get_organization_members_deserialized("theta", USER_USER_PAT) .await; // Should only be one member, which is USER_USER_ID, and is the owner with full permissions @@ -81,6 +81,7 @@ async fn create_organization() { Some(OrganizationPermissions::all()) ); assert_eq!(members[0].role, "Owner"); + assert!(members[0].is_owner); }) .await; } @@ -118,7 +119,7 @@ async fn patch_organization() { .edit_organization( zeta_organization_id, json!({ - "title": title, + "name": title, "description": "theta_description" }), USER_USER_PAT, @@ -148,7 +149,7 @@ async fn patch_organization() { .edit_organization( zeta_organization_id, json!({ - "title": "new_title", + "name": "new_title", "description": "not url safe%&^!#$##!@#$%^&" // not-URL-safe description should still work }), USER_USER_PAT, @@ -160,7 +161,7 @@ async fn patch_organization() { let new_title = api .get_organization_deserialized("new_title", USER_USER_PAT) .await; - assert_eq!(new_title.title, "new_title"); + assert_eq!(new_title.name, "new_title"); assert_eq!(new_title.description, "not url safe%&^!#$##!@#$%^&"); }) .await; @@ -297,7 +298,7 @@ async fn permissions_patch_organization() { // For each permission covered by EDIT_DETAILS, ensure the permission is required let edit_details = OrganizationPermissions::EDIT_DETAILS; let test_pairs = [ - ("title", json!("")), // generated in the test to not collide slugs + ("name", json!("")), // generated in the test to not collide slugs ("description", json!("New description")), ]; @@ -309,7 +310,7 @@ async fn permissions_patch_organization() { ctx.organization_id.unwrap() )) .set_json(json!({ - key: if key == "title" { + key: if key == "name" { json!(generate_random_name("randomslug")) } else { value.clone() diff --git a/tests/pats.rs b/tests/pats.rs index c32708fe..68dfacad 100644 --- a/tests/pats.rs +++ b/tests/pats.rs @@ -62,7 +62,7 @@ pub async fn pat_full_test() { .uri("/v3/collection") .append_header(("Authorization", token)) .set_json(json!({ - "title": "Test Collection 1", + "name": "Test Collection 1", "description": "Test Collection Description" })) .to_request(); diff --git a/tests/project.rs b/tests/project.rs index 238e5f43..bb35761d 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -17,10 +17,9 @@ use labrinth::models::teams::ProjectPermissions; use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData}; use serde_json::json; +use crate::common::api_common::request_data::ProjectCreationRequestData; use crate::common::api_common::{ApiProject, ApiVersion}; -use crate::common::api_v3::request_data::get_public_project_creation_data_json; use crate::common::dummy_data::TestFile; - mod common; #[actix_rt::test] @@ -109,8 +108,10 @@ async fn test_add_remove_project() { with_test_environment(None, |test_env: TestEnvironment| async move { let api = &test_env.api; - let mut json_data = - get_public_project_creation_data_json("demo", Some(&TestFile::BasicMod)); + // Generate test project data. + let mut json_data = api + .get_public_project_creation_data_json("demo", Some(&TestFile::BasicMod)) + .await; // Basic json let json_segment = MultipartSegment { @@ -167,12 +168,16 @@ async fn test_add_remove_project() { }; // Add a project- simple, should work. - let req = test::TestRequest::post() - .uri("/v3/project") - .append_header(("Authorization", USER_USER_PAT)) - .set_multipart(vec![json_segment.clone(), file_segment.clone()]) - .to_request(); - let resp = test_env.call(req).await; + let resp = api + .create_project( + ProjectCreationRequestData { + slug: "demo".to_string(), + segment_data: vec![json_segment.clone(), file_segment.clone()], + jar: None, // File not needed at this point + }, + USER_USER_PAT, + ) + .await; let status = resp.status(); assert_eq!(status, 200); @@ -195,42 +200,51 @@ async fn test_add_remove_project() { // Reusing with a different slug and the same file should fail // Even if that file is named differently - let req = test::TestRequest::post() - .uri("/v3/project") - .append_header(("Authorization", USER_USER_PAT)) - .set_multipart(vec![ - json_diff_slug_file_segment.clone(), // Different slug, different file name - file_diff_name_segment.clone(), // Different file name, same content - ]) - .to_request(); - - let resp = test_env.call(req).await; + let resp = api + .create_project( + ProjectCreationRequestData { + slug: "demo".to_string(), + segment_data: vec![ + json_diff_slug_file_segment.clone(), + file_diff_name_segment.clone(), + ], + jar: None, // File not needed at this point + }, + USER_USER_PAT, + ) + .await; assert_eq!(resp.status(), 400); // Reusing with the same slug and a different file should fail - let req = test::TestRequest::post() - .uri("/v3/project") - .append_header(("Authorization", USER_USER_PAT)) - .set_multipart(vec![ - json_diff_file_segment.clone(), // Same slug, different file name - file_diff_name_content_segment.clone(), // Different file name, different content - ]) - .to_request(); - - let resp = test_env.call(req).await; + let resp = api + .create_project( + ProjectCreationRequestData { + slug: "demo".to_string(), + segment_data: vec![ + json_diff_file_segment.clone(), + file_diff_name_content_segment.clone(), + ], + jar: None, // File not needed at this point + }, + USER_USER_PAT, + ) + .await; assert_eq!(resp.status(), 400); // Different slug, different file should succeed - let req = test::TestRequest::post() - .uri("/v3/project") - .append_header(("Authorization", USER_USER_PAT)) - .set_multipart(vec![ - json_diff_slug_file_segment.clone(), // Different slug, different file name - file_diff_name_content_segment.clone(), // Different file name, same content - ]) - .to_request(); - - let resp = test_env.call(req).await; + let resp = api + .create_project( + ProjectCreationRequestData { + slug: "demo".to_string(), + segment_data: vec![ + json_diff_slug_file_segment.clone(), + file_diff_name_content_segment.clone(), + ], + jar: None, // File not needed at this point + }, + USER_USER_PAT, + ) + .await; assert_eq!(resp.status(), 200); // Get @@ -283,7 +297,7 @@ pub async fn test_patch_project() { .edit_project( alpha_project_slug, json!({ - "title": "Test_Add_Project project - test 1", + "name": "Test_Add_Project project - test 1", }), ENEMY_USER_PAT, ) @@ -388,9 +402,6 @@ pub async fn test_patch_project() { alpha_project_slug, json!({ "slug": "newslug", - "title": "New successful title", - "description": "New successful description", - "body": "New successful body", "categories": [DUMMY_CATEGORIES[0]], "license_id": "MIT", "link_urls": @@ -404,7 +415,6 @@ pub async fn test_patch_project() { USER_USER_PAT, ) .await; - println!("{:?}", resp.response().body()); assert_eq!(resp.status(), 204); // Old slug no longer works @@ -415,9 +425,6 @@ pub async fn test_patch_project() { let project = api.get_project_deserialized("newslug", USER_USER_PAT).await; assert_eq!(project.slug.unwrap(), "newslug"); - assert_eq!(project.title, "New successful title"); - assert_eq!(project.description, "New successful description"); - assert_eq!(project.body, "New successful body"); assert_eq!(project.categories, vec![DUMMY_CATEGORIES[0]]); assert_eq!(project.license.id, "MIT"); @@ -449,7 +456,6 @@ pub async fn test_patch_project() { USER_USER_PAT, ) .await; - println!("{:?}", resp.response().body()); assert_eq!(resp.status(), 204); let project = api.get_project_deserialized("newslug", USER_USER_PAT).await; assert_eq!(project.link_urls.len(), 3); @@ -458,6 +464,39 @@ pub async fn test_patch_project() { .await; } +#[actix_rt::test] +pub async fn test_patch_v3() { + // Hits V3-specific patchable fields + with_test_environment(None, |test_env: TestEnvironment| async move { + let api = &test_env.api; + + let alpha_project_slug = &test_env.dummy.as_ref().unwrap().project_alpha.project_slug; + + // Sucessful request to patch many fields. + let resp = api + .edit_project( + alpha_project_slug, + json!({ + "name": "New successful title", + "summary": "New successful summary", + "description": "New successful description", + }), + USER_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 204); + + let project = api + .get_project_deserialized(alpha_project_slug, USER_USER_PAT) + .await; + + assert_eq!(project.name, "New successful title"); + assert_eq!(project.summary, "New successful summary"); + assert_eq!(project.description, "New successful description"); + }) + .await; +} + #[actix_rt::test] pub async fn test_bulk_edit_categories() { with_test_environment_all(None, |test_env| async move { @@ -551,32 +590,31 @@ pub async fn test_bulk_edit_links() { } #[actix_rt::test] -async fn permissions_patch_project() { - with_test_environment_all(Some(8), |test_env| async move { +async fn permissions_patch_project_v3() { + with_test_environment(Some(8), |test_env: TestEnvironment| async move { let alpha_project_id = &test_env.dummy.as_ref().unwrap().project_alpha.project_id; let alpha_team_id = &test_env.dummy.as_ref().unwrap().project_alpha.team_id; + // TODO: This should be a separate test from v3 + // - only a couple of these fields are v3-specific + // once we have permissions/scope tests setup to not just take closures, we can split this up + // For each permission covered by EDIT_DETAILS, ensure the permission is required let edit_details = ProjectPermissions::EDIT_DETAILS; let test_pairs = [ // Body, status, requested_status tested separately ("slug", json!("")), // generated in the test to not collide slugs - ("title", json!("randomname")), + ("name", json!("randomname")), ("description", json!("randomdescription")), ("categories", json!(["combat", "economy"])), ("additional_categories", json!(["decoration"])), - ("issues_url", json!("https://issues.com")), - ("source_url", json!("https://source.com")), - ("wiki_url", json!("https://wiki.com")), ( - "donation_urls", - json!([{ - "id": "paypal", - "platform": "Paypal", - "url": "https://paypal.com" - }]), + "links", + json!({ + "issues": "https://issues.com", + "source": "https://source.com", + }), ), - ("discord_url", json!("https://discord.com")), ("license_id", json!("MIT")), ]; @@ -645,7 +683,7 @@ async fn permissions_patch_project() { test::TestRequest::patch() .uri(&format!("/v3/project/{}", ctx.project_id.unwrap())) .set_json(json!({ - "body": "new body!", + "description": "new description!", })) }; PermissionsTest::new(&test_env) @@ -1027,7 +1065,7 @@ async fn project_permissions_consistency_test() { test::TestRequest::patch() .uri(&format!("/v3/project/{}", ctx.project_id.unwrap())) .set_json(json!({ - "title": "Example title - changed.", + "name": "Example title - changed.", })) }; PermissionsTest::new(&test_env) @@ -1044,7 +1082,7 @@ async fn project_permissions_consistency_test() { test::TestRequest::patch() .uri(&format!("/v3/project/{}", ctx.project_id.unwrap())) .set_json(json!({ - "title": "Example title - changed.", + "name": "Example title - changed.", })) }; PermissionsTest::new(&test_env) diff --git a/tests/scopes.rs b/tests/scopes.rs index 391b657b..335621ce 100644 --- a/tests/scopes.rs +++ b/tests/scopes.rs @@ -1,7 +1,6 @@ use actix_web::test::{self, TestRequest}; use bytes::Bytes; use chrono::{Duration, Utc}; - use common::api_v3::request_data::{ get_public_project_creation_data, get_public_version_creation_data, }; @@ -207,8 +206,11 @@ pub async fn notifications_scopes() { // Project version creation scopes #[actix_rt::test] -pub async fn project_version_create_scopes() { +pub async fn project_version_create_scopes_v3() { with_test_environment(None, |test_env: TestEnvironment| async move { + // TODO: If possible, find a way to use generic api functions with the Permissions/Scopes test, then this can be recombined with the V2 version of this test + // let api = &test_env.api; + // Create project let create_project = Scopes::PROJECT_CREATE; let req_gen = || { @@ -510,7 +512,7 @@ pub async fn project_write_scopes() { .uri(&format!("/v3/project/{beta_project_id}")) .set_json(json!( { - "title": "test_project_version_write_scopes Title", + "name": "test_project_version_write_scopes Title", } )) }; @@ -1081,7 +1083,7 @@ pub async fn collections_scopes() { test::TestRequest::post() .uri("/v3/collection") .set_json(json!({ - "title": "Test Collection", + "name": "Test Collection", "description": "Test Collection Description", "projects": [alpha_project_id] })) @@ -1099,7 +1101,7 @@ pub async fn collections_scopes() { test::TestRequest::patch() .uri(&format!("/v3/collection/{collection_id}")) .set_json(json!({ - "title": "Test Collection patch", + "name": "Test Collection patch", "status": "private", })) }; @@ -1182,7 +1184,7 @@ pub async fn organization_scopes() { test::TestRequest::post() .uri("/v3/organization") .set_json(json!({ - "title": "TestOrg", + "name": "TestOrg", "description": "TestOrg Description", })) }; diff --git a/tests/search.rs b/tests/search.rs index 76698bee..949918b9 100644 --- a/tests/search.rs +++ b/tests/search.rs @@ -96,7 +96,7 @@ async fn search_projects() { let modify_json = serde_json::from_value(json!([ { "op": "add", "path": "/categories", "value": DUMMY_CATEGORIES[0..2] }, { "op": "add", "path": "/initial_versions/0/server_only", "value": true }, - { "op": "add", "path": "/title", "value": "Mysterious Project" }, + { "op": "add", "path": "/name", "value": "Mysterious Project" }, ])) .unwrap(); project_creation_futures.push(create_async_future( @@ -112,7 +112,7 @@ async fn search_projects() { { "op": "add", "path": "/categories", "value": DUMMY_CATEGORIES[0..3] }, { "op": "add", "path": "/initial_versions/0/server_only", "value": true }, { "op": "add", "path": "/initial_versions/0/game_versions", "value": ["1.20.4"] }, - { "op": "add", "path": "/title", "value": "Mysterious Project" }, + { "op": "add", "path": "/name", "value": "Mysterious Project" }, { "op": "add", "path": "/license_id", "value": "LicenseRef-All-Rights-Reserved" }, ])) .unwrap(); @@ -240,7 +240,7 @@ async fn search_projects() { (json!([["server_only:true"]]), vec![0, 2, 3, 6, 7]), (json!([["open_source:true"]]), vec![0, 1, 2, 4, 5, 6, 7]), (json!([["license:MIT"]]), vec![1, 2, 4]), - (json!([[r#"title:'Mysterious Project'"#]]), vec![2, 3]), + (json!([[r#"name:'Mysterious Project'"#]]), vec![2, 3]), (json!([["author:user"]]), vec![0, 1, 2, 4, 5, 7]), (json!([["game_versions:1.20.5"]]), vec![4, 5]), // bug fix diff --git a/tests/teams.rs b/tests/teams.rs index 929d0bec..c4bf7cda 100644 --- a/tests/teams.rs +++ b/tests/teams.rs @@ -1,7 +1,11 @@ -use crate::common::database::*; +use crate::common::{api_common::ApiTeams, database::*}; use actix_web::test; -use common::environment::with_test_environment_all; +use common::{ + api_v3::ApiV3, + environment::{with_test_environment, with_test_environment_all, TestEnvironment}, +}; use labrinth::models::teams::{OrganizationPermissions, ProjectPermissions}; +use rust_decimal::Decimal; use serde_json::json; mod common; @@ -209,152 +213,69 @@ async fn test_get_team_project_orgs() { async fn test_patch_project_team_member() { // Test setup and dummy data with_test_environment_all(None, |test_env| async move { + let api = &test_env.api; + let alpha_team_id = &test_env.dummy.as_ref().unwrap().project_alpha.team_id; // Edit team as admin/mod but not a part of the team should be OK - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/members/{USER_USER_ID}")) - .set_json(json!({})) - .append_header(("Authorization", ADMIN_USER_PAT)) - .to_request(); - let resp = test_env.call(req).await; + let resp = api.edit_team_member(alpha_team_id, USER_USER_ID, json!({}), ADMIN_USER_PAT).await; assert_eq!(resp.status(), 204); - // As a non-owner with full permissions, attempt to edit the owner's permissions/roles - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/members/{USER_USER_ID}")) - .append_header(("Authorization", ADMIN_USER_PAT)) - .set_json(json!({ - "role": "member" - })) - .to_request(); - let resp = test_env.call(req).await; - assert_eq!(resp.status(), 400); - - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/members/{USER_USER_ID}")) - .append_header(("Authorization", ADMIN_USER_PAT)) - .set_json(json!({ - "permissions": 0 - })) - .to_request(); - let resp = test_env.call(req).await; - + // As a non-owner with full permissions, attempt to edit the owner's permissions + let resp = api.edit_team_member(alpha_team_id, USER_USER_ID, json!({ + "permissions": 0 + }), ADMIN_USER_PAT).await; assert_eq!(resp.status(), 400); // Should not be able to edit organization permissions of a project team - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/members/{USER_USER_ID}")) - .append_header(("Authorization", USER_USER_PAT)) - .set_json(json!({ - "organization_permissions": 0 - })) - .to_request(); - let resp = test_env.call(req).await; - + let resp = api.edit_team_member(alpha_team_id, USER_USER_ID, json!({ + "organization_permissions": 0 + }), USER_USER_PAT).await; assert_eq!(resp.status(), 400); // Should not be able to add permissions to a user that the adding-user does not have // (true for both project and org) // first, invite friend - let req = test::TestRequest::post() - .uri(&format!("/v3/team/{alpha_team_id}/members")) - .append_header(("Authorization", USER_USER_PAT)) - .set_json(json!({ - "user_id": FRIEND_USER_ID, - "permissions": (ProjectPermissions::EDIT_MEMBER | ProjectPermissions::EDIT_BODY).bits(), - })) - .to_request(); - let resp = test_env.call(req).await; + let resp = api.add_user_to_team(alpha_team_id, FRIEND_USER_ID, + Some(ProjectPermissions::EDIT_MEMBER | ProjectPermissions::EDIT_BODY), + None, USER_USER_PAT).await; assert_eq!(resp.status(), 204); // accept - let req = test::TestRequest::post() - .uri(&format!("/v3/team/{alpha_team_id}/join")) - .append_header(("Authorization", FRIEND_USER_PAT)) - .to_request(); - let resp = test_env.call(req).await; + let resp = api.join_team(alpha_team_id, FRIEND_USER_PAT).await; assert_eq!(resp.status(), 204); // try to add permissions - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/members/{FRIEND_USER_ID}")) - .append_header(("Authorization", FRIEND_USER_PAT)) - .set_json(json!({ - "permissions": (ProjectPermissions::EDIT_MEMBER | ProjectPermissions::EDIT_DETAILS).bits() - })) - .to_request(); - let resp = test_env.call(req).await; - assert_eq!(resp.status(), 400); - - // Cannot set a user to Owner - let req = test::TestRequest::patch() - .uri(&format!( - "/v3/team/{alpha_team_id}/members/{FRIEND_USER_ID}" - )) - .append_header(("Authorization", USER_USER_PAT)) - .set_json(json!({ - "role": "Owner" - })) - .to_request(); - let resp = test_env.call(req).await; + let resp = api.edit_team_member(alpha_team_id, FRIEND_USER_ID, json!({ + "permissions": (ProjectPermissions::EDIT_MEMBER | ProjectPermissions::EDIT_DETAILS).bits() + }), FRIEND_USER_PAT).await; // should this be friend_user_pat assert_eq!(resp.status(), 400); // Cannot set payouts outside of 0 and 5000 for payout in [-1, 5001] { - let req = test::TestRequest::patch() - .uri(&format!( - "/v3/team/{alpha_team_id}/members/{FRIEND_USER_ID}" - )) - .append_header(("Authorization", USER_USER_PAT)) - .set_json(json!({ - "payouts_split": payout - })) - .to_request(); - let resp = test_env.call(req).await; - + let resp = api.edit_team_member(alpha_team_id, FRIEND_USER_ID, json!({ + "payouts_split": payout + }), USER_USER_PAT).await; assert_eq!(resp.status(), 400); } // Successful patch - let req = test::TestRequest::patch() - .uri(&format!( - "/v3/team/{alpha_team_id}/members/{FRIEND_USER_ID}" - )) - .append_header(("Authorization", FRIEND_USER_PAT)) - .set_json(json!({ + let resp = api.edit_team_member(alpha_team_id, FRIEND_USER_ID, json!({ "payouts_split": 51, "permissions": ProjectPermissions::EDIT_MEMBER.bits(), // reduces permissions - "role": "member", + "role": "membe2r", "ordering": 5 - })) - .to_request(); - let resp = test_env.call(req).await; + }), FRIEND_USER_PAT).await; assert_eq!(resp.status(), 204); // Check results - let req = test::TestRequest::get() - .uri(&format!("/v3/team/{alpha_team_id}/members")) - .append_header(("Authorization", FRIEND_USER_PAT)) - .to_request(); - let resp = test_env.call(req).await; - assert_eq!(resp.status(), 200); - let value: serde_json::Value = test::read_body_json(resp).await; - let member = value - .as_array() - .unwrap() - .iter() - .find(|x| x["user"]["id"] == FRIEND_USER_ID) - .unwrap(); - assert_eq!(member["payouts_split"], 51.0); - assert_eq!( - member["permissions"], - ProjectPermissions::EDIT_MEMBER.bits() - ); - assert_eq!(member["role"], "member"); - assert_eq!(member["ordering"], 5); - + let members = api.get_team_members_deserialized_common(alpha_team_id, FRIEND_USER_PAT).await; + let member = members.iter().find(|x| x.user.id.0 == FRIEND_USER_ID_PARSED as u64).unwrap(); + assert_eq!(member.payouts_split, Decimal::from_f64_retain(51.0)); + assert_eq!(member.permissions.unwrap(), ProjectPermissions::EDIT_MEMBER); + assert_eq!(member.role, "membe2r"); + assert_eq!(member.ordering, 5); }).await; } @@ -374,17 +295,7 @@ async fn test_patch_organization_team_member() { let resp = test_env.call(req).await; assert_eq!(resp.status(), 204); - // As a non-owner with full permissions, attempt to edit the owner's permissions/roles - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{zeta_team_id}/members/{USER_USER_ID}")) - .append_header(("Authorization", ADMIN_USER_PAT)) - .set_json(json!({ - "role": "member" - })) - .to_request(); - let resp = test_env.call(req).await; - assert_eq!(resp.status(), 400); - + // As a non-owner with full permissions, attempt to edit the owner's permissions let req = test::TestRequest::patch() .uri(&format!("/v3/team/{zeta_team_id}/members/{USER_USER_ID}")) .append_header(("Authorization", ADMIN_USER_PAT)) @@ -429,18 +340,6 @@ async fn test_patch_organization_team_member() { assert_eq!(resp.status(), 400); - // Cannot set a user to Owner - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{zeta_team_id}/members/{FRIEND_USER_ID}")) - .append_header(("Authorization", USER_USER_PAT)) - .set_json(json!({ - "role": "Owner" - })) - .to_request(); - let resp = test_env.call(req).await; - - assert_eq!(resp.status(), 400); - // Cannot set payouts outside of 0 and 5000 for payout in [-1, 5001] { let req = test::TestRequest::patch() @@ -462,7 +361,7 @@ async fn test_patch_organization_team_member() { "payouts_split": 51, "organization_permissions": (OrganizationPermissions::EDIT_MEMBER).bits(), // reduces permissions "permissions": (ProjectPermissions::EDIT_MEMBER).bits(), - "role": "member", + "role": "very-cool-member", "ordering": 5 })) .to_request(); @@ -493,7 +392,7 @@ async fn test_patch_organization_team_member() { member["permissions"], ProjectPermissions::EDIT_MEMBER.bits() ); - assert_eq!(member["role"], "member"); + assert_eq!(member["role"], "very-cool-member"); assert_eq!(member["ordering"], 5); }).await; @@ -501,104 +400,102 @@ async fn test_patch_organization_team_member() { // trasnfer ownership (requires being owner, etc) #[actix_rt::test] -async fn transfer_ownership() { +async fn transfer_ownership_v3() { // Test setup and dummy data - with_test_environment_all(None, |test_env| async move { + with_test_environment(None, |test_env: TestEnvironment| async move { + let api = &test_env.api; + let alpha_team_id = &test_env.dummy.as_ref().unwrap().project_alpha.team_id; // Cannot set friend as owner (not a member) - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/owner")) - .set_json(json!({ - "user_id": FRIEND_USER_ID - })) - .append_header(("Authorization", USER_USER_ID)) - .to_request(); - let resp = test_env.call(req).await; + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; + assert_eq!(resp.status(), 400); + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, FRIEND_USER_PAT) + .await; assert_eq!(resp.status(), 401); // first, invite friend - let req = test::TestRequest::post() - .uri(&format!("/v3/team/{alpha_team_id}/members")) - .append_header(("Authorization", USER_USER_PAT)) - .set_json(json!({ - "user_id": FRIEND_USER_ID, - })) - .to_request(); - let resp = test_env.call(req).await; + let resp = api + .add_user_to_team(alpha_team_id, FRIEND_USER_ID, None, None, USER_USER_PAT) + .await; assert_eq!(resp.status(), 204); + // still cannot set friend as owner (not accepted) + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; + assert_eq!(resp.status(), 400); + // accept - let req = test::TestRequest::post() - .uri(&format!("/v3/team/{alpha_team_id}/join")) - .append_header(("Authorization", FRIEND_USER_PAT)) - .to_request(); - let resp = test_env.call(req).await; + let resp = api.join_team(alpha_team_id, FRIEND_USER_PAT).await; assert_eq!(resp.status(), 204); - // Cannot set ourselves as owner - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/owner")) - .set_json(json!({ - "user_id": FRIEND_USER_ID - })) - .append_header(("Authorization", FRIEND_USER_PAT)) - .to_request(); - let resp = test_env.call(req).await; + // Cannot set ourselves as owner if we are not owner + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, FRIEND_USER_PAT) + .await; assert_eq!(resp.status(), 401); // Can set friend as owner - let req = test::TestRequest::patch() - .uri(&format!("/v3/team/{alpha_team_id}/owner")) - .set_json(json!({ - "user_id": FRIEND_USER_ID - })) - .append_header(("Authorization", USER_USER_PAT)) - .to_request(); - let resp = test_env.call(req).await; + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; assert_eq!(resp.status(), 204); // Check - let req = test::TestRequest::get() - .uri(&format!("/v3/team/{alpha_team_id}/members")) - .set_json(json!({ - "user_id": FRIEND_USER_ID - })) - .append_header(("Authorization", USER_USER_PAT)) - .to_request(); - let resp = test_env.call(req).await; - assert_eq!(resp.status(), 200); - let value: serde_json::Value = test::read_body_json(resp).await; - let friend_member = value - .as_array() - .unwrap() + let members = api + .get_team_members_deserialized(alpha_team_id, USER_USER_PAT) + .await; + let friend_member = members .iter() - .find(|x| x["user"]["id"] == FRIEND_USER_ID) + .find(|x| x.user.id.0 == FRIEND_USER_ID_PARSED as u64) .unwrap(); - assert_eq!(friend_member["role"], "Owner"); + assert_eq!(friend_member.role, "Member"); // her role does not actually change, but is_owner is set to true + assert!(friend_member.is_owner); assert_eq!( - friend_member["permissions"], - ProjectPermissions::all().bits() + friend_member.permissions.unwrap(), + ProjectPermissions::all() ); - let user_member = value - .as_array() - .unwrap() + + let user_member = members .iter() - .find(|x| x["user"]["id"] == USER_USER_ID) + .find(|x| x.user.id.0 == USER_USER_ID_PARSED as u64) .unwrap(); - assert_eq!(user_member["role"], "Member"); - assert_eq!(user_member["permissions"], ProjectPermissions::all().bits()); + assert_eq!(user_member.role, "Owner"); // We are the 'owner', but we are not actually the owner! + assert!(!user_member.is_owner); + assert_eq!(user_member.permissions.unwrap(), ProjectPermissions::all()); // Confirm that user, a user who still has full permissions, cannot then remove the owner - let req = test::TestRequest::delete() - .uri(&format!( - "/v3/team/{alpha_team_id}/members/{FRIEND_USER_ID}" - )) - .append_header(("Authorization", USER_USER_PAT)) - .to_request(); - - let resp = test_env.call(req).await; + let resp = api + .remove_from_team(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; assert_eq!(resp.status(), 401); + + // V3 only- confirm the owner can change their role without losing ownership + let resp = api + .edit_team_member( + alpha_team_id, + FRIEND_USER_ID, + json!({ + "role": "Member" + }), + FRIEND_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 204); + + let members = api + .get_team_members_deserialized(alpha_team_id, USER_USER_PAT) + .await; + let friend_member = members + .iter() + .find(|x| x.user.id.0 == FRIEND_USER_ID_PARSED as u64) + .unwrap(); + assert_eq!(friend_member.role, "Member"); + assert!(friend_member.is_owner); }) .await; } diff --git a/tests/v2/error.rs b/tests/v2/error.rs new file mode 100644 index 00000000..e87d0887 --- /dev/null +++ b/tests/v2/error.rs @@ -0,0 +1,22 @@ +use crate::common::api_common::ApiProject; +use actix_web::test; +use bytes::Bytes; + +use crate::common::database::USER_USER_PAT; +use crate::common::{ + api_v2::ApiV2, + environment::{with_test_environment, TestEnvironment}, +}; +#[actix_rt::test] +pub async fn error_404_empty() { + with_test_environment(None, |test_env: TestEnvironment| async move { + // V2 errors should have 404 as blank body, for missing resources + let api = &test_env.api; + let resp = api.get_project("does-not-exist", USER_USER_PAT).await; + assert_eq!(resp.status(), 404); + let body = test::read_body(resp).await; + let empty_bytes = Bytes::from_static(b""); + assert_eq!(body, empty_bytes); + }) + .await; +} diff --git a/tests/v2/notifications.rs b/tests/v2/notifications.rs new file mode 100644 index 00000000..d31f0ad6 --- /dev/null +++ b/tests/v2/notifications.rs @@ -0,0 +1,31 @@ +use crate::common::{ + api_common::ApiTeams, + api_v2::ApiV2, + database::{FRIEND_USER_ID, FRIEND_USER_PAT, USER_USER_PAT}, + environment::{with_test_environment, TestEnvironment}, +}; + +#[actix_rt::test] +pub async fn get_user_notifications_after_team_invitation_returns_notification() { + with_test_environment(None, |test_env: TestEnvironment| async move { + let alpha_team_id = test_env + .dummy + .as_ref() + .unwrap() + .project_alpha + .team_id + .clone(); + let api = test_env.api; + api.add_user_to_team(&alpha_team_id, FRIEND_USER_ID, None, None, USER_USER_PAT) + .await; + + let notifications = api + .get_user_notifications_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT) + .await; + assert_eq!(1, notifications.len()); + + // Check to make sure type_ is correct + assert_eq!(notifications[0].type_.as_ref().unwrap(), "team_invite"); + }) + .await; +} diff --git a/tests/v2/project.rs b/tests/v2/project.rs index 490779f6..ad2ee21f 100644 --- a/tests/v2/project.rs +++ b/tests/v2/project.rs @@ -5,15 +5,15 @@ use crate::common::{ ApiV2, }, database::{ - ADMIN_USER_PAT, ENEMY_USER_PAT, FRIEND_USER_ID, FRIEND_USER_PAT, MOD_USER_PAT, - USER_USER_PAT, + generate_random_name, ADMIN_USER_PAT, FRIEND_USER_ID, FRIEND_USER_PAT, USER_USER_PAT, }, - dummy_data::{TestFile, DUMMY_CATEGORIES}, + dummy_data::TestFile, environment::{with_test_environment, TestEnvironment}, permissions::{PermissionsTest, PermissionsTestContext}, }; use actix_http::StatusCode; use actix_web::test; +use futures::StreamExt; use itertools::Itertools; use labrinth::{ database::models::project_item::PROJECTS_SLUGS_NAMESPACE, @@ -336,165 +336,102 @@ async fn permissions_upload_version() { } #[actix_rt::test] -pub async fn test_patch_project() { +pub async fn test_patch_v2() { + // Hits V3-specific patchable fields + // Other fields are tested in test_patch_project (the v2 version of that test) with_test_environment(None, |test_env: TestEnvironment| async move { let api = &test_env.api; let alpha_project_slug = &test_env.dummy.as_ref().unwrap().project_alpha.project_slug; - let beta_project_slug = &test_env.dummy.as_ref().unwrap().project_beta.project_slug; - - // First, we do some patch requests that should fail. - // Failure because the user is not authorized. - let resp = api - .edit_project( - alpha_project_slug, - json!({ - "title": "Test_Add_Project project - test 1", - }), - ENEMY_USER_PAT, - ) - .await; - assert_eq!(resp.status(), 401); - - // Failure because we are setting URL fields to invalid urls. - for url_type in ["issues_url", "source_url", "wiki_url", "discord_url"] { - let resp = api - .edit_project( - alpha_project_slug, - json!({ - url_type: "w.fake.url", - }), - USER_USER_PAT, - ) - .await; - assert_eq!(resp.status(), 400); - } - - // Failure because these are illegal requested statuses for a normal user. - for req in ["unknown", "processing", "withheld", "scheduled"] { - let resp = api - .edit_project( - alpha_project_slug, - json!({ - "requested_status": req, - }), - USER_USER_PAT, - ) - .await; - assert_eq!(resp.status(), 400); - } - - // Failure because these should not be able to be set by a non-mod - for key in ["moderation_message", "moderation_message_body"] { - let resp = api - .edit_project( - alpha_project_slug, - json!({ - key: "test", - }), - USER_USER_PAT, - ) - .await; - assert_eq!(resp.status(), 401); - - // (should work for a mod, though) - let resp = api - .edit_project( - alpha_project_slug, - json!({ - key: "test", - }), - MOD_USER_PAT, - ) - .await; - assert_eq!(resp.status(), 204); - } - - // Failed patch to alpha slug: - // - slug collision with beta - // - too short slug - // - too long slug - // - not url safe slug - // - not url safe slug - for slug in [ - beta_project_slug, - "a", - &"a".repeat(100), - "not url safe%&^!#$##!@#$%^&*()", - ] { - let resp = api - .edit_project( - alpha_project_slug, - json!({ - "slug": slug, // the other dummy project has this slug - }), - USER_USER_PAT, - ) - .await; - assert_eq!(resp.status(), 400); - } - - // Not allowed to directly set status, as 'beta_project_slug' (the other project) is "processing" and cannot have its status changed like this. - let resp = api - .edit_project( - beta_project_slug, - json!({ - "status": "private" - }), - USER_USER_PAT, - ) - .await; - assert_eq!(resp.status(), 401); // Sucessful request to patch many fields. let resp = api .edit_project( alpha_project_slug, json!({ - "slug": "newslug", - "title": "New successful title", - "description": "New successful description", - "body": "New successful body", - "categories": [DUMMY_CATEGORIES[0]], - "license_id": "MIT", - "issues_url": "https://github.com", - "discord_url": "https://discord.gg", - "wiki_url": "https://wiki.com", "client_side": "unsupported", "server_side": "required", - "donation_urls": [{ - "id": "patreon", - "platform": "Patreon", - "url": "https://patreon.com" - }] }), USER_USER_PAT, ) .await; assert_eq!(resp.status(), 204); - // Old slug no longer works - let resp = api.get_project(alpha_project_slug, USER_USER_PAT).await; - assert_eq!(resp.status(), 404); + let project = api + .get_project_deserialized(alpha_project_slug, USER_USER_PAT) + .await; - // New slug does work - let project = api.get_project_deserialized("newslug", USER_USER_PAT).await; - assert_eq!(project.slug.unwrap(), "newslug"); - assert_eq!(project.title, "New successful title"); - assert_eq!(project.description, "New successful description"); - assert_eq!(project.body, "New successful body"); - assert_eq!(project.categories, vec![DUMMY_CATEGORIES[0]]); - assert_eq!(project.license.id, "MIT"); - assert_eq!(project.issues_url, Some("https://github.com".to_string())); - assert_eq!(project.discord_url, Some("https://discord.gg".to_string())); - assert_eq!(project.wiki_url, Some("https://wiki.com".to_string())); // Note: the original V2 value of this was "optional", // but Required/Optional is no longer a carried combination in v3, as the changes made were lossy. // Now, the test Required/Unsupported combination is tested instead. // Setting Required/Optional in v2 will not work, this is known and accepteed. assert_eq!(project.client_side.as_str(), "unsupported"); assert_eq!(project.server_side.as_str(), "required"); - assert_eq!(project.donation_urls.unwrap()[0].url, "https://patreon.com"); + }) + .await; +} + +#[actix_rt::test] +async fn permissions_patch_project_v2() { + with_test_environment(Some(8), |test_env: TestEnvironment| async move { + // TODO: This only includes v2 ones (as it should. See v3) + // For each permission covered by EDIT_DETAILS, ensure the permission is required + let edit_details = ProjectPermissions::EDIT_DETAILS; + let test_pairs = [ + ("description", json!("description")), + ("issues_url", json!("https://issues.com")), + ("source_url", json!("https://source.com")), + ("wiki_url", json!("https://wiki.com")), + ( + "donation_urls", + json!([{ + "id": "paypal", + "platform": "Paypal", + "url": "https://paypal.com" + }]), + ), + ("discord_url", json!("https://discord.com")), + ]; + + futures::stream::iter(test_pairs) + .map(|(key, value)| { + let test_env = test_env.clone(); + async move { + let req_gen = |ctx: &PermissionsTestContext| { + test::TestRequest::patch() + .uri(&format!("/v2/project/{}", ctx.project_id.unwrap())) + .set_json(json!({ + key: if key == "slug" { + json!(generate_random_name("randomslug")) + } else { + value.clone() + }, + })) + }; + PermissionsTest::new(&test_env) + .simple_project_permissions_test(edit_details, req_gen) + .await + .into_iter(); + } + }) + .buffer_unordered(4) + .collect::>() + .await; + + // Edit body + // Cannot bulk edit body + let edit_body = ProjectPermissions::EDIT_BODY; + let req_gen = |ctx: &PermissionsTestContext| { + test::TestRequest::patch() + .uri(&format!("/v2/project/{}", ctx.project_id.unwrap())) + .set_json(json!({ + "body": "new body!", // new body + })) + }; + PermissionsTest::new(&test_env) + .simple_project_permissions_test(edit_body, req_gen) + .await + .unwrap(); }) .await; } diff --git a/tests/v2/teams.rs b/tests/v2/teams.rs new file mode 100644 index 00000000..3d664594 --- /dev/null +++ b/tests/v2/teams.rs @@ -0,0 +1,105 @@ +use labrinth::models::teams::ProjectPermissions; +use serde_json::json; + +use crate::common::{ + api_common::ApiTeams, + api_v2::ApiV2, + database::{ + FRIEND_USER_ID, FRIEND_USER_ID_PARSED, FRIEND_USER_PAT, USER_USER_ID_PARSED, USER_USER_PAT, + }, + environment::{with_test_environment, TestEnvironment}, +}; + +// trasnfer ownership (requires being owner, etc) +#[actix_rt::test] +async fn transfer_ownership_v2() { + // Test setup and dummy data + with_test_environment(None, |test_env: TestEnvironment| async move { + let api = &test_env.api; + + let alpha_team_id = &test_env.dummy.as_ref().unwrap().project_alpha.team_id; + + // Cannot set friend as owner (not a member) + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; + assert_eq!(resp.status(), 400); + + // first, invite friend + let resp = api + .add_user_to_team(alpha_team_id, FRIEND_USER_ID, None, None, USER_USER_PAT) + .await; + assert_eq!(resp.status(), 204); + + // still cannot set friend as owner (not accepted) + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; + assert_eq!(resp.status(), 400); + + // accept + let resp = api.join_team(alpha_team_id, FRIEND_USER_PAT).await; + assert_eq!(resp.status(), 204); + + // Cannot set ourselves as owner if we are not owner + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, FRIEND_USER_PAT) + .await; + assert_eq!(resp.status(), 401); + + // Can set friend as owner + let resp = api + .transfer_team_ownership(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; + assert_eq!(resp.status(), 204); + + // Check + let members = api + .get_team_members_deserialized(alpha_team_id, USER_USER_PAT) + .await; + let friend_member = members + .iter() + .find(|x| x.user.id.0 == FRIEND_USER_ID_PARSED as u64) + .unwrap(); + assert_eq!(friend_member.role, "Owner"); + assert_eq!( + friend_member.permissions.unwrap(), + ProjectPermissions::all() + ); + + let user_member = members + .iter() + .find(|x| x.user.id.0 == USER_USER_ID_PARSED as u64) + .unwrap(); + assert_eq!(user_member.role, "Member"); + assert_eq!(user_member.permissions.unwrap(), ProjectPermissions::all()); + + // Confirm that user, a user who still has full permissions, cannot then remove the owner + let resp = api + .remove_from_team(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT) + .await; + assert_eq!(resp.status(), 401); + + // V2 only- confirm the owner changing the role to member does nothing + let resp = api + .edit_team_member( + alpha_team_id, + FRIEND_USER_ID, + json!({ + "role": "Member" + }), + FRIEND_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 204); + let members = api + .get_team_members_deserialized(alpha_team_id, USER_USER_PAT) + .await; + let friend_member = members + .iter() + .find(|x| x.user.id.0 == FRIEND_USER_ID_PARSED as u64) + .unwrap(); + assert_eq!(friend_member.role, "Owner"); + }) + .await; +} diff --git a/tests/v2_tests.rs b/tests/v2_tests.rs index 839cc303..808bcb1b 100644 --- a/tests/v2_tests.rs +++ b/tests/v2_tests.rs @@ -8,9 +8,12 @@ mod common; // Such V2 tests are exported here mod v2 { + mod error; + mod notifications; mod project; mod scopes; mod search; mod tags; + mod teams; mod version; }