Search test + v3 (#731)
* search patch for accurate loader/gv filtering * backup * basic search test * finished test * incomplete commit; backing up * Working multipat reroute backup * working rough draft v3 * most tests passing * works * search v2 conversion * added some tags.rs v2 conversions * Worked through warnings, unwraps, prints * refactors * new search test * version files changes fixes * redesign to revs * removed old caches * removed games * fmt clippy * merge conflicts * fmt, prepare * moved v2 routes over to v3 * fixes; tests passing * project type changes * moved files over * fmt, clippy, prepare, etc * loaders to loader_fields, added tests * fmt, clippy, prepare * fixed sorting bug * reversed back- wrong order for consistency * fmt; clippy; prepare --------- Co-authored-by: Jai A <jaiagr+gpg@pm.me>
This commit is contained in:
parent
97ccb7df94
commit
ae1c5342f2
0
.editorconfig
Normal file
0
.editorconfig
Normal file
25
.sqlx/query-09ab64836127f6edb22a5deaa33ab77d9e8155386a5be60e01b3ad7db8541a27.json
generated
Normal file
25
.sqlx/query-09ab64836127f6edb22a5deaa33ab77d9e8155386a5be60e01b3ad7db8541a27.json
generated
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n INSERT INTO loader_field_enum_values (enum_id, value, created, metadata)\n VALUES ($1, $2, COALESCE($3, timezone('utc', now())), $4)\n ON CONFLICT (enum_id, value) DO UPDATE\n SET metadata = COALESCE($4, loader_field_enum_values.metadata),\n created = COALESCE($3, loader_field_enum_values.created)\n RETURNING id\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int4",
|
||||||
|
"Varchar",
|
||||||
|
"Timestamp",
|
||||||
|
"Jsonb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "09ab64836127f6edb22a5deaa33ab77d9e8155386a5be60e01b3ad7db8541a27"
|
||||||
|
}
|
||||||
40
.sqlx/query-0b52dc08a903a9c82234f6e1a2c59fdb631955011988910f033dd740b6a3b79b.json
generated
Normal file
40
.sqlx/query-0b52dc08a903a9c82234f6e1a2c59fdb631955011988910f033dd740b6a3b79b.json
generated
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable \n FROM loader_field_enums lfe\n WHERE lfe.enum_name = $1\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "enum_name",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "ordering",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "hidable",
|
||||||
|
"type_info": "Bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "0b52dc08a903a9c82234f6e1a2c59fdb631955011988910f033dd740b6a3b79b"
|
||||||
|
}
|
||||||
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n UPDATE mods\n SET game_versions = (\n SELECT COALESCE(ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null), array[]::varchar[])\n FROM versions v\n INNER JOIN game_versions_versions gvv ON v.id = gvv.joining_version_id\n INNER JOIN game_versions gv on gvv.game_version_id = gv.id\n WHERE v.mod_id = mods.id AND v.status != ALL($2)\n )\n WHERE id = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8",
|
|
||||||
"TextArray"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "177716d2b04fd2a2b63b2e14c8ffdfa554d84254b14053496c118dec24bf5049"
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT name FROM project_types pt\n INNER JOIN mods ON mods.project_type = pt.id\n WHERE mods.id = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "1d6f3e926fc4a27c5af172f672b7f825f9f5fe2d538b06337ef182ab1a553398"
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT id FROM side_types\n WHERE name = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Text"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "1db6be78a74ff04c52ee105e0df30acf5bbf18f1de328980bb7f3da7f5f6569e"
|
|
||||||
}
|
|
||||||
@ -1,124 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering,\n JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int8"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 1,
|
|
||||||
"name": "mod_id",
|
|
||||||
"type_info": "Int8"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 2,
|
|
||||||
"name": "author_id",
|
|
||||||
"type_info": "Int8"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 3,
|
|
||||||
"name": "version_name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 4,
|
|
||||||
"name": "version_number",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 5,
|
|
||||||
"name": "changelog",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 6,
|
|
||||||
"name": "date_published",
|
|
||||||
"type_info": "Timestamptz"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 7,
|
|
||||||
"name": "downloads",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 8,
|
|
||||||
"name": "version_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 9,
|
|
||||||
"name": "featured",
|
|
||||||
"type_info": "Bool"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 10,
|
|
||||||
"name": "status",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 11,
|
|
||||||
"name": "requested_status",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 12,
|
|
||||||
"name": "ordering",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 13,
|
|
||||||
"name": "game_versions",
|
|
||||||
"type_info": "Jsonb"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 14,
|
|
||||||
"name": "loaders",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 15,
|
|
||||||
"name": "files",
|
|
||||||
"type_info": "Jsonb"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 16,
|
|
||||||
"name": "hashes",
|
|
||||||
"type_info": "Jsonb"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 17,
|
|
||||||
"name": "dependencies",
|
|
||||||
"type_info": "Jsonb"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8Array"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "1e735a003ce305624ce8bbf181c99e41fbe8fcd836e926daf3e73aa3bb5552a6"
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT name FROM project_types pt\n INNER JOIN mods ON mods.project_type = pt.id\n WHERE mods.id = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "21ef50f46b7b3e62b91e7d067c1cb33806e14c33bb76d63c2711f822c44261f6"
|
|
||||||
}
|
|
||||||
126
.sqlx/query-3afbc93a8945e7ae07e39a88752f400c06f9c8a8132fd7a05dcc55c6eab5d2e7.json
generated
Normal file
126
.sqlx/query-3afbc93a8945e7ae07e39a88752f400c06f9c8a8132fd7a05dcc55c6eab5d2e7.json
generated
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT m.id id, m.title title, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug,\n pt.name project_type, u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'lf_id', lf.id,\n 'loader_name', lo.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id\n WHERE m.id = $1\n GROUP BY m.id, pt.id, u.id;\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "description",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "color",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "icon_url",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "slug",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "project_type",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "username",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 8,
|
||||||
|
"name": "avatar_url",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 9,
|
||||||
|
"name": "categories",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 10,
|
||||||
|
"name": "loaders",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 11,
|
||||||
|
"name": "project_types",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 12,
|
||||||
|
"name": "games",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 13,
|
||||||
|
"name": "gallery",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 14,
|
||||||
|
"name": "featured_gallery",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 15,
|
||||||
|
"name": "version_fields",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 16,
|
||||||
|
"name": "loader_fields",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 17,
|
||||||
|
"name": "loader_field_enum_values",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int8",
|
||||||
|
"TextArray",
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "3afbc93a8945e7ae07e39a88752f400c06f9c8a8132fd7a05dcc55c6eab5d2e7"
|
||||||
|
}
|
||||||
@ -1,44 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT gv.id id, gv.version version_, gv.type type_, gv.created created, gv.major FROM game_versions gv\n ORDER BY created DESC\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 1,
|
|
||||||
"name": "version_",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 2,
|
|
||||||
"name": "type_",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 3,
|
|
||||||
"name": "created",
|
|
||||||
"type_info": "Timestamptz"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 4,
|
|
||||||
"name": "major",
|
|
||||||
"type_info": "Bool"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": []
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "3d384766d179f804c17e03d1917da65cc6043f88971ddc3fd23ba3be00717dfc"
|
|
||||||
}
|
|
||||||
@ -1,168 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,\n m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color,\n cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, u.username username,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null) versions,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($1)\n LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.status = ANY($2)\n GROUP BY m.id, cs.id, ss.id, pt.id, u.id;\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int8"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 1,
|
|
||||||
"name": "project_type",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 2,
|
|
||||||
"name": "title",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 3,
|
|
||||||
"name": "description",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 4,
|
|
||||||
"name": "downloads",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 5,
|
|
||||||
"name": "follows",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 6,
|
|
||||||
"name": "icon_url",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 7,
|
|
||||||
"name": "published",
|
|
||||||
"type_info": "Timestamptz"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 8,
|
|
||||||
"name": "approved",
|
|
||||||
"type_info": "Timestamptz"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 9,
|
|
||||||
"name": "updated",
|
|
||||||
"type_info": "Timestamptz"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 10,
|
|
||||||
"name": "team_id",
|
|
||||||
"type_info": "Int8"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 11,
|
|
||||||
"name": "license",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 12,
|
|
||||||
"name": "slug",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 13,
|
|
||||||
"name": "status_name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 14,
|
|
||||||
"name": "color",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 15,
|
|
||||||
"name": "client_side_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 16,
|
|
||||||
"name": "server_side_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 17,
|
|
||||||
"name": "project_type_name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 18,
|
|
||||||
"name": "username",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 19,
|
|
||||||
"name": "categories",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 20,
|
|
||||||
"name": "additional_categories",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 21,
|
|
||||||
"name": "loaders",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 22,
|
|
||||||
"name": "versions",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 23,
|
|
||||||
"name": "gallery",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 24,
|
|
||||||
"name": "featured_gallery",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"TextArray",
|
|
||||||
"TextArray",
|
|
||||||
"Text"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "4514723bdc1eb8a781215075bec51af1cc6fabe88a469338d5a59533eabf80c5"
|
|
||||||
}
|
|
||||||
52
.sqlx/query-458630d00e46183c65f95729d2647d3635f629cfb892fc8ac1964d8ecc269576.json
generated
Normal file
52
.sqlx/query-458630d00e46183c65f95729d2647d3635f629cfb892fc8ac1964d8ecc269576.json
generated
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values\n WHERE enum_id = ANY($1)\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "enum_id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "value",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "ordering",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "metadata",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "created",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int4Array"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "458630d00e46183c65f95729d2647d3635f629cfb892fc8ac1964d8ecc269576"
|
||||||
|
}
|
||||||
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n UPDATE mods\n SET server_side = $1\n WHERE (id = $2)\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int4",
|
|
||||||
"Int8"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "4a54d350b4695c32a802675506e85b0506fc62a63ca0ee5f38890824301d6515"
|
|
||||||
}
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n DELETE FROM game_versions_versions WHERE joining_version_id = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "507314fdcacaa3c7751738c9d0baee2b90aec719b6b203f922824eced5ea8369"
|
|
||||||
}
|
|
||||||
@ -1,38 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT l.id id, l.loader loader, l.icon icon,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types\n FROM loaders l\n LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id\n LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id\n GROUP BY l.id;\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 1,
|
|
||||||
"name": "loader",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 2,
|
|
||||||
"name": "icon",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 3,
|
|
||||||
"name": "project_types",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": []
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
null
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "5295fba2053675c8414c0b37a59943535b9a438a642ea1c68045e987f05ade13"
|
|
||||||
}
|
|
||||||
@ -1,114 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT m.id id, m.title title, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug, cs.name client_side_type, ss.name server_side_type,\n pt.name project_type, u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', gv.id, 'version', gv.version, 'type', gv.type, 'created', gv.created, 'major', gv.major)) filter (where gv.version is not null) versions,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.id = $1\n GROUP BY m.id, cs.id, ss.id, pt.id, u.id;\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int8"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 1,
|
|
||||||
"name": "title",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 2,
|
|
||||||
"name": "description",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 3,
|
|
||||||
"name": "color",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 4,
|
|
||||||
"name": "icon_url",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 5,
|
|
||||||
"name": "slug",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 6,
|
|
||||||
"name": "client_side_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 7,
|
|
||||||
"name": "server_side_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 8,
|
|
||||||
"name": "project_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 9,
|
|
||||||
"name": "username",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 10,
|
|
||||||
"name": "avatar_url",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 11,
|
|
||||||
"name": "categories",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 12,
|
|
||||||
"name": "loaders",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 13,
|
|
||||||
"name": "versions",
|
|
||||||
"type_info": "Jsonb"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 14,
|
|
||||||
"name": "gallery",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 15,
|
|
||||||
"name": "featured_gallery",
|
|
||||||
"type_info": "VarcharArray"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8",
|
|
||||||
"TextArray",
|
|
||||||
"Text"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "59e95e832615c375753bfc9a56b07c02d916399adfa52fb11a79b8f7b56ecf8b"
|
|
||||||
}
|
|
||||||
56
.sqlx/query-622496d06b9d1e5019d7dcb45ac768558305f1270c1c43ef767f54b9baf5b5af.json
generated
Normal file
56
.sqlx/query-622496d06b9d1e5019d7dcb45ac768558305f1270c1c43ef767f54b9baf5b5af.json
generated
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type\n FROM loader_fields lf\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "field",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "field_type",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "optional",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "min_val",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "max_val",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "enum_type",
|
||||||
|
"type_info": "Int4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "622496d06b9d1e5019d7dcb45ac768558305f1270c1c43ef767f54b9baf5b5af"
|
||||||
|
}
|
||||||
28
.sqlx/query-683e186dc086ef21d2f82c0d427fcee16c613fb93ea74d6eb0da684363ca7b13.json
generated
Normal file
28
.sqlx/query-683e186dc086ef21d2f82c0d427fcee16c613fb93ea74d6eb0da684363ca7b13.json
generated
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT id, project_type FROM categories\n WHERE category = $1\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "project_type",
|
||||||
|
"type_info": "Int4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "683e186dc086ef21d2f82c0d427fcee16c613fb93ea74d6eb0da684363ca7b13"
|
||||||
|
}
|
||||||
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n UPDATE mods\n SET loaders = (\n SELECT COALESCE(ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null), array[]::varchar[])\n FROM versions v\n INNER JOIN loaders_versions lv ON lv.version_id = v.id\n INNER JOIN loaders l on lv.loader_id = l.id\n WHERE v.mod_id = mods.id AND v.status != ALL($2)\n )\n WHERE id = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8",
|
|
||||||
"TextArray"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "6b89c2b2557e304c2a3a02d7824327685f9be696254bf2370d0c995aafc6a2d8"
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n INSERT INTO game_versions (version, type, created)\n VALUES ($1, COALESCE($2, 'other'), COALESCE($3, timezone('utc', now())))\n ON CONFLICT (version) DO UPDATE\n SET type = COALESCE($2, game_versions.type),\n created = COALESCE($3, game_versions.created)\n RETURNING id\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Varchar",
|
|
||||||
"Text",
|
|
||||||
"Timestamp"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "72c75313688dfd88a659c5250c71b9899abd6186ab32a067a7d4b8a0846ebd18"
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT id FROM categories\n WHERE category = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Text"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "72d6b5f2f11d88981db82c7247c9e7e5ebfd8d34985a1a8209d6628e66490f37"
|
|
||||||
}
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT name FROM side_types\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": []
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "85c6de008681d9fc9dc51b17330bed09204010813111e66a7ca84bc0e603f537"
|
|
||||||
}
|
|
||||||
18
.sqlx/query-8cfa1380907e20fe18180d4f2ae929b7178f81056788ffb207a6c5e4bbcc7a7d.json
generated
Normal file
18
.sqlx/query-8cfa1380907e20fe18180d4f2ae929b7178f81056788ffb207a6c5e4bbcc7a7d.json
generated
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n INSERT INTO version_fields (field_id, version_id, int_value, string_value, enum_value)\n SELECT * FROM UNNEST($1::integer[], $2::bigint[], $3::integer[], $4::text[], $5::integer[])\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int4Array",
|
||||||
|
"Int8Array",
|
||||||
|
"Int4Array",
|
||||||
|
"TextArray",
|
||||||
|
"Int4Array"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "8cfa1380907e20fe18180d4f2ae929b7178f81056788ffb207a6c5e4bbcc7a7d"
|
||||||
|
}
|
||||||
44
.sqlx/query-923d1d1e5e9b879479a244479952df15841d35b96fbdcadc7d5af8d6b4671f9e.json
generated
Normal file
44
.sqlx/query-923d1d1e5e9b879479a244479952df15841d35b96fbdcadc7d5af8d6b4671f9e.json
generated
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT l.id id, l.loader loader, l.icon icon,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games\n FROM loaders l \n LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id\n LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id\n LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id\n LEFT OUTER JOIN games g ON lptg.game_id = g.id\n GROUP BY l.id;\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "loader",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "icon",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "project_types",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "games",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
null,
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "923d1d1e5e9b879479a244479952df15841d35b96fbdcadc7d5af8d6b4671f9e"
|
||||||
|
}
|
||||||
14
.sqlx/query-a2f510708f04ad72fe36af9fa96bfb775fb088579fe23bcb87f50f5a8578f3c0.json
generated
Normal file
14
.sqlx/query-a2f510708f04ad72fe36af9fa96bfb775fb088579fe23bcb87f50f5a8578f3c0.json
generated
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n DELETE FROM version_fields vf\n WHERE vf.version_id = $1\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int8"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "a2f510708f04ad72fe36af9fa96bfb775fb088579fe23bcb87f50f5a8578f3c0"
|
||||||
|
}
|
||||||
15
.sqlx/query-acd2e72610008d4fe240cdfadc1c70c997443f7319a5c535df967d56d24bd54a.json
generated
Normal file
15
.sqlx/query-acd2e72610008d4fe240cdfadc1c70c997443f7319a5c535df967d56d24bd54a.json
generated
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n DELETE FROM version_fields \n WHERE version_id = $1\n AND field_id = ANY($2)\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int8",
|
||||||
|
"Int4Array"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "acd2e72610008d4fe240cdfadc1c70c997443f7319a5c535df967d56d24bd54a"
|
||||||
|
}
|
||||||
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n DELETE FROM game_versions_versions gvv\n WHERE gvv.joining_version_id = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "bee1abe8313d17a56d93b06a31240e338c3973bc7a7374799ced3df5e38d3134"
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT id FROM game_versions\n WHERE version = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "id",
|
|
||||||
"type_info": "Int4"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Text"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "c1fddbf97350871b79cb0c235b1f7488c6616b7c1dfbde76a712fd57e91ba158"
|
|
||||||
}
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n UPDATE mods\n SET client_side = $1\n WHERE (id = $2)\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int4",
|
|
||||||
"Int8"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "c5d44333c62223bd3e68185d1fb3f95152fafec593da8d06c9b2b665218a02be"
|
|
||||||
}
|
|
||||||
180
.sqlx/query-cab90ea34929643f9e9814150c4dbd027fc0bd427bfba5e6eb99c989af53b680.json
generated
Normal file
180
.sqlx/query-cab90ea34929643f9e9814150c4dbd027fc0bd427bfba5e6eb99c989af53b680.json
generated
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT m.id id, v.id version_id, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,\n m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color,\n pt.name project_type_name, u.username username,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'lf_id', lf.id,\n 'loader_name', lo.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n\n FROM versions v\n INNER JOIN mods m ON v.mod_id = m.id AND m.status = ANY($2)\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id\n WHERE v.status != ANY($1)\n GROUP BY v.id, m.id, pt.id, u.id;\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "version_id",
|
||||||
|
"type_info": "Int8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "description",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "downloads",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "follows",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "icon_url",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "published",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 8,
|
||||||
|
"name": "approved",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 9,
|
||||||
|
"name": "updated",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 10,
|
||||||
|
"name": "team_id",
|
||||||
|
"type_info": "Int8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 11,
|
||||||
|
"name": "license",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 12,
|
||||||
|
"name": "slug",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 13,
|
||||||
|
"name": "status_name",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 14,
|
||||||
|
"name": "color",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 15,
|
||||||
|
"name": "project_type_name",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 16,
|
||||||
|
"name": "username",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 17,
|
||||||
|
"name": "categories",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 18,
|
||||||
|
"name": "additional_categories",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 19,
|
||||||
|
"name": "loaders",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 20,
|
||||||
|
"name": "project_types",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 21,
|
||||||
|
"name": "games",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 22,
|
||||||
|
"name": "gallery",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 23,
|
||||||
|
"name": "featured_gallery",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 24,
|
||||||
|
"name": "version_fields",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 25,
|
||||||
|
"name": "loader_fields",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 26,
|
||||||
|
"name": "loader_field_enum_values",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"TextArray",
|
||||||
|
"TextArray",
|
||||||
|
"Text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "cab90ea34929643f9e9814150c4dbd027fc0bd427bfba5e6eb99c989af53b680"
|
||||||
|
}
|
||||||
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n SELECT name FROM project_types pt\n INNER JOIN mods ON mods.project_type = pt.id\n WHERE mods.id = $1\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"ordinal": 0,
|
|
||||||
"name": "name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int8"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": [
|
|
||||||
false
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hash": "ef59f99fc0ab66ff5779d0e71c4a2134e2f26eed002ff9ea5626ea3e23518594"
|
|
||||||
}
|
|
||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"db_name": "PostgreSQL",
|
"db_name": "PostgreSQL",
|
||||||
"query": "\n SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.body body, m.published published,\n m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,\n m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,\n m.team_id team_id, m.organization_id organization_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,\n cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.webhook_sent, m.color,\n t.id thread_id, m.monetization_status monetization_status, m.loaders loaders, m.game_versions game_versions,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,\n JSONB_AGG(DISTINCT jsonb_build_object('image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering)) filter (where mg.image_url is not null) gallery,\n JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations\n FROM mods m\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN threads t ON t.mod_id = m.id\n LEFT JOIN mods_gallery mg ON mg.mod_id = m.id\n LEFT JOIN mods_donations md ON md.joining_mod_id = m.id\n LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON mc.joining_category_id = c.id\n LEFT JOIN versions v ON v.mod_id = m.id AND v.status = ANY($3)\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY pt.id, cs.id, ss.id, t.id, m.id;\n ",
|
"query": "\n SELECT m.id id, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.body body, m.published published,\n m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,\n m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,\n m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,\n m.webhook_sent, m.color,\n t.id thread_id, m.monetization_status monetization_status,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,\n JSONB_AGG(DISTINCT jsonb_build_object('image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering)) filter (where mg.image_url is not null) gallery,\n JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations\n FROM mods m \n INNER JOIN threads t ON t.mod_id = m.id\n LEFT JOIN mods_gallery mg ON mg.mod_id = m.id\n LEFT JOIN mods_donations md ON md.joining_mod_id = m.id\n LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON mc.joining_category_id = c.id\n LEFT JOIN versions v ON v.mod_id = m.id AND v.status = ANY($3)\n LEFT JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT JOIN loaders l on lv.loader_id = l.id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY t.id, m.id;\n ",
|
||||||
"describe": {
|
"describe": {
|
||||||
"columns": [
|
"columns": [
|
||||||
{
|
{
|
||||||
@ -10,201 +10,176 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 1,
|
"ordinal": 1,
|
||||||
"name": "project_type",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 2,
|
|
||||||
"name": "title",
|
"name": "title",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 3,
|
"ordinal": 2,
|
||||||
"name": "description",
|
"name": "description",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 4,
|
"ordinal": 3,
|
||||||
"name": "downloads",
|
"name": "downloads",
|
||||||
"type_info": "Int4"
|
"type_info": "Int4"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 5,
|
"ordinal": 4,
|
||||||
"name": "follows",
|
"name": "follows",
|
||||||
"type_info": "Int4"
|
"type_info": "Int4"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 6,
|
"ordinal": 5,
|
||||||
"name": "icon_url",
|
"name": "icon_url",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 7,
|
"ordinal": 6,
|
||||||
"name": "body",
|
"name": "body",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 8,
|
"ordinal": 7,
|
||||||
"name": "published",
|
"name": "published",
|
||||||
"type_info": "Timestamptz"
|
"type_info": "Timestamptz"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 9,
|
"ordinal": 8,
|
||||||
"name": "updated",
|
"name": "updated",
|
||||||
"type_info": "Timestamptz"
|
"type_info": "Timestamptz"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 10,
|
"ordinal": 9,
|
||||||
"name": "approved",
|
"name": "approved",
|
||||||
"type_info": "Timestamptz"
|
"type_info": "Timestamptz"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 11,
|
"ordinal": 10,
|
||||||
"name": "queued",
|
"name": "queued",
|
||||||
"type_info": "Timestamptz"
|
"type_info": "Timestamptz"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 12,
|
"ordinal": 11,
|
||||||
"name": "status",
|
"name": "status",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 13,
|
"ordinal": 12,
|
||||||
"name": "requested_status",
|
"name": "requested_status",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 14,
|
"ordinal": 13,
|
||||||
"name": "issues_url",
|
"name": "issues_url",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 15,
|
"ordinal": 14,
|
||||||
"name": "source_url",
|
"name": "source_url",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 16,
|
"ordinal": 15,
|
||||||
"name": "wiki_url",
|
"name": "wiki_url",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 17,
|
"ordinal": 16,
|
||||||
"name": "discord_url",
|
"name": "discord_url",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 18,
|
"ordinal": 17,
|
||||||
"name": "license_url",
|
"name": "license_url",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 19,
|
"ordinal": 18,
|
||||||
"name": "team_id",
|
"name": "team_id",
|
||||||
"type_info": "Int8"
|
"type_info": "Int8"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 20,
|
"ordinal": 19,
|
||||||
"name": "organization_id",
|
"name": "organization_id",
|
||||||
"type_info": "Int8"
|
"type_info": "Int8"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 21,
|
"ordinal": 20,
|
||||||
"name": "client_side",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 22,
|
|
||||||
"name": "server_side",
|
|
||||||
"type_info": "Int4"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 23,
|
|
||||||
"name": "license",
|
"name": "license",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 24,
|
"ordinal": 21,
|
||||||
"name": "slug",
|
"name": "slug",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 25,
|
"ordinal": 22,
|
||||||
"name": "moderation_message",
|
"name": "moderation_message",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 26,
|
"ordinal": 23,
|
||||||
"name": "moderation_message_body",
|
"name": "moderation_message_body",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 27,
|
"ordinal": 24,
|
||||||
"name": "client_side_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 28,
|
|
||||||
"name": "server_side_type",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 29,
|
|
||||||
"name": "project_type_name",
|
|
||||||
"type_info": "Varchar"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ordinal": 30,
|
|
||||||
"name": "webhook_sent",
|
"name": "webhook_sent",
|
||||||
"type_info": "Bool"
|
"type_info": "Bool"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 31,
|
"ordinal": 25,
|
||||||
"name": "color",
|
"name": "color",
|
||||||
"type_info": "Int4"
|
"type_info": "Int4"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 32,
|
"ordinal": 26,
|
||||||
"name": "thread_id",
|
"name": "thread_id",
|
||||||
"type_info": "Int8"
|
"type_info": "Int8"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 33,
|
"ordinal": 27,
|
||||||
"name": "monetization_status",
|
"name": "monetization_status",
|
||||||
"type_info": "Varchar"
|
"type_info": "Varchar"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 34,
|
"ordinal": 28,
|
||||||
"name": "loaders",
|
"name": "loaders",
|
||||||
"type_info": "VarcharArray"
|
"type_info": "VarcharArray"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 35,
|
"ordinal": 29,
|
||||||
"name": "game_versions",
|
"name": "project_types",
|
||||||
"type_info": "VarcharArray"
|
"type_info": "VarcharArray"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 36,
|
"ordinal": 30,
|
||||||
|
"name": "games",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 31,
|
||||||
"name": "categories",
|
"name": "categories",
|
||||||
"type_info": "VarcharArray"
|
"type_info": "VarcharArray"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 37,
|
"ordinal": 32,
|
||||||
"name": "additional_categories",
|
"name": "additional_categories",
|
||||||
"type_info": "VarcharArray"
|
"type_info": "VarcharArray"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 38,
|
"ordinal": 33,
|
||||||
"name": "versions",
|
"name": "versions",
|
||||||
"type_info": "Jsonb"
|
"type_info": "Jsonb"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 39,
|
"ordinal": 34,
|
||||||
"name": "gallery",
|
"name": "gallery",
|
||||||
"type_info": "Jsonb"
|
"type_info": "Jsonb"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ordinal": 40,
|
"ordinal": 35,
|
||||||
"name": "donations",
|
"name": "donations",
|
||||||
"type_info": "Jsonb"
|
"type_info": "Jsonb"
|
||||||
}
|
}
|
||||||
@ -222,7 +197,6 @@
|
|||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
true,
|
true,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
@ -239,20 +213,16 @@
|
|||||||
false,
|
false,
|
||||||
true,
|
true,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
true,
|
true,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
null,
|
||||||
false,
|
null,
|
||||||
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
@ -260,5 +230,5 @@
|
|||||||
null
|
null
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"hash": "ffcc8c65721465514ad39a0e9bd6138eda0fa32dd3399a8e850a76beb1f1bf16"
|
"hash": "f73ffab12a96eb9480615e333d40cde031df280039cd8e435cfca5e15ed3d1c4"
|
||||||
}
|
}
|
||||||
148
.sqlx/query-f7aee6fbd3415c7819d9ae1a75a0ae5753aaa3373c3ac9bc04adb3087781b49f.json
generated
Normal file
148
.sqlx/query-f7aee6fbd3415c7819d9ae1a75a0ae5753aaa3373c3ac9bc04adb3087781b49f.json
generated
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies,\n \n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'lf_id', lf.id,\n 'loader_name', l.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n \n FROM versions v\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN loaders_project_types lpt on l.id = lpt.joining_loader_id\n LEFT JOIN project_types pt on lpt.joining_project_type_id = pt.id\n LEFT OUTER JOIN loaders_project_types_games lptg on l.id = lptg.loader_id AND pt.id = lptg.project_type_id\n LEFT JOIN games g on lptg.game_id = g.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfe.id = lfev.enum_id\n\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Int8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "mod_id",
|
||||||
|
"type_info": "Int8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "author_id",
|
||||||
|
"type_info": "Int8"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "version_name",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "version_number",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "changelog",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "date_published",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "downloads",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 8,
|
||||||
|
"name": "version_type",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 9,
|
||||||
|
"name": "featured",
|
||||||
|
"type_info": "Bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 10,
|
||||||
|
"name": "status",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 11,
|
||||||
|
"name": "requested_status",
|
||||||
|
"type_info": "Varchar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 12,
|
||||||
|
"name": "ordering",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 13,
|
||||||
|
"name": "loaders",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 14,
|
||||||
|
"name": "project_types",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 15,
|
||||||
|
"name": "games",
|
||||||
|
"type_info": "VarcharArray"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 16,
|
||||||
|
"name": "files",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 17,
|
||||||
|
"name": "hashes",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 18,
|
||||||
|
"name": "dependencies",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 19,
|
||||||
|
"name": "version_fields",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 20,
|
||||||
|
"name": "loader_fields",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 21,
|
||||||
|
"name": "loader_field_enum_values",
|
||||||
|
"type_info": "Jsonb"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int8Array"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "f7aee6fbd3415c7819d9ae1a75a0ae5753aaa3373c3ac9bc04adb3087781b49f"
|
||||||
|
}
|
||||||
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n INSERT INTO game_versions_versions (game_version_id, joining_version_id)\n SELECT * FROM UNNEST($1::integer[], $2::bigint[])\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Int4Array",
|
|
||||||
"Int8Array"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "fa54ed32004b883daa44eeb413fc2e07b45883608afc6ac91ac6f74736a12256"
|
|
||||||
}
|
|
||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"db_name": "PostgreSQL",
|
"db_name": "PostgreSQL",
|
||||||
"query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, issues_url,\n source_url, wiki_url, status, requested_status, discord_url,\n client_side, server_side, license_url, license,\n slug, project_type, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8, $9,\n $10, $11, $12, $13, $14,\n $15, $16, $17, $18,\n LOWER($19), $20, $21, $22\n )\n ",
|
"query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, issues_url,\n source_url, wiki_url, status, requested_status, discord_url,\n license_url, license,\n slug, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8, $9,\n $10, $11, $12, $13, $14,\n $15, $16, \n LOWER($17), $18, $19\n )\n ",
|
||||||
"describe": {
|
"describe": {
|
||||||
"columns": [],
|
"columns": [],
|
||||||
"parameters": {
|
"parameters": {
|
||||||
@ -19,17 +19,14 @@
|
|||||||
"Varchar",
|
"Varchar",
|
||||||
"Varchar",
|
"Varchar",
|
||||||
"Varchar",
|
"Varchar",
|
||||||
"Int4",
|
|
||||||
"Int4",
|
|
||||||
"Varchar",
|
"Varchar",
|
||||||
"Varchar",
|
"Varchar",
|
||||||
"Text",
|
"Text",
|
||||||
"Int4",
|
"Int4",
|
||||||
"Int4",
|
|
||||||
"Varchar"
|
"Varchar"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"nullable": []
|
"nullable": []
|
||||||
},
|
},
|
||||||
"hash": "b36877d60945eaae76680770a5d28d2cbb26cfbb0ec94ecc8f0741f48178ec1c"
|
"hash": "fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019"
|
||||||
}
|
}
|
||||||
151
migrations/20231005230721_dynamic-fields.sql
Normal file
151
migrations/20231005230721_dynamic-fields.sql
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
CREATE TABLE games (
|
||||||
|
id int PRIMARY KEY, -- Only used in db
|
||||||
|
name varchar(64),
|
||||||
|
CONSTRAINT unique_game_name UNIQUE (name)
|
||||||
|
);
|
||||||
|
INSERT INTO games(id, name) VALUES (1, 'minecraft-java');
|
||||||
|
INSERT INTO games(id, name) VALUES (2, 'minecraft-bedrock');
|
||||||
|
|
||||||
|
ALTER TABLE loaders ADD CONSTRAINT unique_loader_name UNIQUE (loader);
|
||||||
|
|
||||||
|
CREATE TABLE loader_field_enums (
|
||||||
|
id serial PRIMARY KEY,
|
||||||
|
enum_name varchar(64) NOT NULL,
|
||||||
|
ordering int NULL,
|
||||||
|
hidable BOOLEAN NOT NULL DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE loader_field_enum_values (
|
||||||
|
id serial PRIMARY KEY,
|
||||||
|
enum_id integer REFERENCES loader_field_enums NOT NULL,
|
||||||
|
value varchar(64) NOT NULL,
|
||||||
|
ordering int NULL,
|
||||||
|
created timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
-- metadata is json of all the extra data for this enum value
|
||||||
|
metadata jsonb NULL,
|
||||||
|
|
||||||
|
original_id integer, -- This is for mapping only- it is dropped before the end of the migration
|
||||||
|
|
||||||
|
CONSTRAINT unique_variant_per_enum UNIQUE (enum_id, value)
|
||||||
|
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE loader_fields (
|
||||||
|
id serial PRIMARY KEY,
|
||||||
|
field varchar(64) UNIQUE NOT NULL,
|
||||||
|
-- "integer", "text", "enum", "bool",
|
||||||
|
-- "array_integer", "array_text", "array_enum", "array_bool"
|
||||||
|
field_type varchar(64) NOT NULL,
|
||||||
|
-- only for enum
|
||||||
|
enum_type integer REFERENCES loader_field_enums NULL,
|
||||||
|
optional BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
-- for int- min/max val, for text- min len, for enum- min items, for bool- nothing
|
||||||
|
min_val integer NULL,
|
||||||
|
max_val integer NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE loader_fields_loaders (
|
||||||
|
loader_id integer REFERENCES loaders NOT NULL,
|
||||||
|
loader_field_id integer REFERENCES loader_fields NOT NULL,
|
||||||
|
CONSTRAINT unique_loader_field UNIQUE (loader_id, loader_field_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE loaders ADD COLUMN hidable boolean NOT NULL default false;
|
||||||
|
|
||||||
|
CREATE TABLE version_fields (
|
||||||
|
version_id bigint REFERENCES versions NOT NULL,
|
||||||
|
field_id integer REFERENCES loader_fields NOT NULL,
|
||||||
|
-- for int/bool values
|
||||||
|
int_value integer NULL,
|
||||||
|
enum_value integer REFERENCES loader_field_enum_values NULL,
|
||||||
|
string_value text NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Convert side_types
|
||||||
|
INSERT INTO loader_field_enums (id, enum_name, hidable) VALUES (1, 'side_types', true);
|
||||||
|
INSERT INTO loader_field_enum_values (original_id, enum_id, value) SELECT id, 1, name FROM side_types st;
|
||||||
|
|
||||||
|
INSERT INTO loader_fields (field, field_type, enum_type, optional, min_val, max_val) SELECT 'client_side', 'enum', 1, false, 1, 1;
|
||||||
|
INSERT INTO loader_fields ( field, field_type, enum_type, optional, min_val, max_val) SELECT 'server_side', 'enum', 1, false, 1, 1;
|
||||||
|
|
||||||
|
INSERT INTO loader_fields_loaders (loader_id, loader_field_id) SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'client_side' AND l.loader = ANY( ARRAY['forge', 'fabric', 'quilt', 'modloader','rift','liteloader', 'neoforge']);
|
||||||
|
INSERT INTO loader_fields_loaders (loader_id, loader_field_id) SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'server_side' AND l.loader = ANY( ARRAY['forge', 'fabric', 'quilt', 'modloader','rift','liteloader', 'neoforge']);
|
||||||
|
|
||||||
|
INSERT INTO version_fields (version_id, field_id, enum_value)
|
||||||
|
SELECT v.id, 1, m.client_side
|
||||||
|
FROM versions v
|
||||||
|
INNER JOIN mods m ON v.mod_id = m.id
|
||||||
|
INNER JOIN loader_field_enum_values lfev ON m.client_side = lfev.original_id
|
||||||
|
WHERE client_side IS NOT NULL AND lfev.enum_id = 1;
|
||||||
|
|
||||||
|
INSERT INTO version_fields (version_id, field_id, enum_value)
|
||||||
|
SELECT v.id, 1, m.server_side
|
||||||
|
FROM versions v
|
||||||
|
INNER JOIN mods m ON v.mod_id = m.id
|
||||||
|
INNER JOIN loader_field_enum_values lfev ON m.client_side = lfev.original_id
|
||||||
|
WHERE server_side IS NOT NULL AND lfev.enum_id = 1;
|
||||||
|
|
||||||
|
ALTER TABLE mods DROP COLUMN client_side;
|
||||||
|
ALTER TABLE mods DROP COLUMN server_side;
|
||||||
|
DROP TABLE side_types;
|
||||||
|
|
||||||
|
-- Convert game_versions
|
||||||
|
INSERT INTO loader_field_enums (id, enum_name, hidable) VALUES (2, 'game_versions', true);
|
||||||
|
INSERT INTO loader_field_enum_values (original_id, enum_id, value, created, metadata)
|
||||||
|
SELECT id, 2, version, created, json_build_object('type', type, 'major', major) FROM game_versions;
|
||||||
|
|
||||||
|
INSERT INTO loader_fields (field, field_type, enum_type, optional, min_val) VALUES('game_versions', 'array_enum', 2, false, 0);
|
||||||
|
|
||||||
|
INSERT INTO version_fields(version_id, field_id, enum_value)
|
||||||
|
SELECT gvv.joining_version_id, 2, lfev.id
|
||||||
|
FROM game_versions_versions gvv INNER JOIN loader_field_enum_values lfev ON gvv.game_version_id = lfev.original_id
|
||||||
|
WHERE lfev.enum_id = 2;
|
||||||
|
|
||||||
|
ALTER TABLE mods DROP COLUMN loaders;
|
||||||
|
ALTER TABLE mods DROP COLUMN game_versions;
|
||||||
|
DROP TABLE game_versions_versions;
|
||||||
|
DROP TABLE game_versions;
|
||||||
|
|
||||||
|
-- Convert project types
|
||||||
|
-- we are creating a new loader type- 'mrpack'- for minecraft modpacks
|
||||||
|
INSERT INTO loaders (loader) VALUES ('mrpack');
|
||||||
|
|
||||||
|
-- For the loader 'mrpack', we create loader fields for every loader
|
||||||
|
-- That way we keep information like "this modpack is a fabric modpack"
|
||||||
|
INSERT INTO loader_field_enums (id, enum_name, hidable) VALUES (3, 'mrpack_loaders', true);
|
||||||
|
INSERT INTO loader_field_enum_values (original_id, enum_id, value) SELECT id, 2, loader FROM loaders WHERE loader != 'mrpack';
|
||||||
|
INSERT INTO loader_fields (field, field_type, enum_type, optional, min_val) VALUES('mrpack_loaders', 'array_enum', 3, false, 0);
|
||||||
|
INSERT INTO loader_fields_loaders (loader_id, loader_field_id)
|
||||||
|
SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'mrpack_loaders' AND l.loader = 'mrpack';
|
||||||
|
|
||||||
|
INSERT INTO version_fields(version_id, field_id, enum_value)
|
||||||
|
SELECT v.id, lf.id, lfev.id
|
||||||
|
FROM versions v
|
||||||
|
INNER JOIN mods m ON v.mod_id = m.id
|
||||||
|
INNER JOIN loaders_versions lv ON v.id = lv.version_id
|
||||||
|
INNER JOIN loaders l ON lv.loader_id = l.id
|
||||||
|
CROSS JOIN loader_fields lf
|
||||||
|
LEFT JOIN loader_field_enum_values lfev ON lf.enum_type = lfev.enum_id
|
||||||
|
WHERE m.project_type = (SELECT id FROM project_types WHERE name = 'modpack') AND lf.field = 'mrpack_loaders';
|
||||||
|
|
||||||
|
INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) SELECT DISTINCT l.id, pt.id FROM loaders l CROSS JOIN project_types pt WHERE pt.name = 'modpack' AND l.loader = 'mrpack';
|
||||||
|
|
||||||
|
--- Non-mrpack loaders no longer support modpacks
|
||||||
|
DELETE FROM loaders_project_types WHERE joining_loader_id != (SELECT id FROM loaders WHERE loader = 'mrpack') AND joining_project_type_id = (SELECT id FROM project_types WHERE name = 'modpack');
|
||||||
|
|
||||||
|
CREATE TABLE loaders_project_types_games (
|
||||||
|
loader_id integer REFERENCES loaders NOT NULL,
|
||||||
|
project_type_id integer REFERENCES project_types NOT NULL,
|
||||||
|
game_id integer REFERENCES games NOT NULL,
|
||||||
|
PRIMARY KEY (loader_id, project_type_id, game_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- all past loader_project_types are minecraft-java as the only game before this migration is minecraft-java
|
||||||
|
INSERT INTO loaders_project_types_games (loader_id, project_type_id, game_id) SELECT joining_loader_id, joining_project_type_id, 1 FROM loaders_project_types;
|
||||||
|
|
||||||
|
-- Now that loaders are inferred, we can drop the project_type column from mods
|
||||||
|
ALTER TABLE mods DROP COLUMN project_type;
|
||||||
|
|
||||||
|
|
||||||
|
-- Drop original_id columns
|
||||||
|
ALTER TABLE loader_field_enum_values DROP COLUMN original_id;
|
||||||
@ -1,9 +1,9 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
|
|
||||||
use super::ids::*;
|
use super::ids::*;
|
||||||
use super::DatabaseError;
|
use super::DatabaseError;
|
||||||
use chrono::DateTime;
|
|
||||||
use chrono::Utc;
|
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@ -14,29 +14,6 @@ pub struct ProjectType {
|
|||||||
pub name: String,
|
pub name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SideType {
|
|
||||||
pub id: SideTypeId,
|
|
||||||
pub name: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub struct Loader {
|
|
||||||
pub id: LoaderId,
|
|
||||||
pub loader: String,
|
|
||||||
pub icon: String,
|
|
||||||
pub supported_project_types: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
|
||||||
pub struct GameVersion {
|
|
||||||
pub id: GameVersionId,
|
|
||||||
pub version: String,
|
|
||||||
#[serde(rename = "type")]
|
|
||||||
pub type_: String,
|
|
||||||
pub created: DateTime<Utc>,
|
|
||||||
pub major: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct Category {
|
pub struct Category {
|
||||||
pub id: CategoryId,
|
pub id: CategoryId,
|
||||||
@ -59,21 +36,32 @@ pub struct DonationPlatform {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Category {
|
impl Category {
|
||||||
pub async fn get_id<'a, E>(name: &str, exec: E) -> Result<Option<CategoryId>, DatabaseError>
|
// Gets hashmap of category ids matching a name
|
||||||
|
// Multiple categories can have the same name, but different project types, so we need to return a hashmap
|
||||||
|
// ProjectTypeId -> CategoryId
|
||||||
|
pub async fn get_ids<'a, E>(
|
||||||
|
name: &str,
|
||||||
|
exec: E,
|
||||||
|
) -> Result<HashMap<ProjectTypeId, CategoryId>, DatabaseError>
|
||||||
where
|
where
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
{
|
{
|
||||||
let result = sqlx::query!(
|
let result = sqlx::query!(
|
||||||
"
|
"
|
||||||
SELECT id FROM categories
|
SELECT id, project_type FROM categories
|
||||||
WHERE category = $1
|
WHERE category = $1
|
||||||
",
|
",
|
||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
.fetch_optional(exec)
|
.fetch_all(exec)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(result.map(|r| CategoryId(r.id)))
|
let mut map = HashMap::new();
|
||||||
|
for r in result {
|
||||||
|
map.insert(ProjectTypeId(r.project_type), CategoryId(r.id));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(map)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_id_project<'a, E>(
|
pub async fn get_id_project<'a, E>(
|
||||||
@ -139,221 +127,6 @@ impl Category {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Loader {
|
|
||||||
pub async fn get_id<'a, E>(name: &str, exec: E) -> Result<Option<LoaderId>, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id FROM loaders
|
|
||||||
WHERE loader = $1
|
|
||||||
",
|
|
||||||
name
|
|
||||||
)
|
|
||||||
.fetch_optional(exec)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(result.map(|r| LoaderId(r.id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<Loader>, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
let res: Option<Vec<Loader>> = redis
|
|
||||||
.get_deserialized_from_json(TAGS_NAMESPACE, "loader")
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if let Some(res) = res {
|
|
||||||
return Ok(res);
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT l.id id, l.loader loader, l.icon icon,
|
|
||||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types
|
|
||||||
FROM loaders l
|
|
||||||
LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id
|
|
||||||
LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id
|
|
||||||
GROUP BY l.id;
|
|
||||||
"
|
|
||||||
)
|
|
||||||
.fetch_many(exec)
|
|
||||||
.try_filter_map(|e| async {
|
|
||||||
Ok(e.right().map(|x| Loader {
|
|
||||||
id: LoaderId(x.id),
|
|
||||||
loader: x.loader,
|
|
||||||
icon: x.icon,
|
|
||||||
supported_project_types: x
|
|
||||||
.project_types
|
|
||||||
.unwrap_or_default()
|
|
||||||
.iter()
|
|
||||||
.map(|x| x.to_string())
|
|
||||||
.collect(),
|
|
||||||
}))
|
|
||||||
})
|
|
||||||
.try_collect::<Vec<_>>()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
redis
|
|
||||||
.set_serialized_to_json(TAGS_NAMESPACE, "loader", &result, None)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct GameVersionBuilder<'a> {
|
|
||||||
pub version: Option<&'a str>,
|
|
||||||
pub version_type: Option<&'a str>,
|
|
||||||
pub date: Option<&'a DateTime<Utc>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GameVersion {
|
|
||||||
pub fn builder() -> GameVersionBuilder<'static> {
|
|
||||||
GameVersionBuilder::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_id<'a, E>(
|
|
||||||
version: &str,
|
|
||||||
exec: E,
|
|
||||||
) -> Result<Option<GameVersionId>, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id FROM game_versions
|
|
||||||
WHERE version = $1
|
|
||||||
",
|
|
||||||
version
|
|
||||||
)
|
|
||||||
.fetch_optional(exec)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(result.map(|r| GameVersionId(r.id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<GameVersion>, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
let res: Option<Vec<GameVersion>> = redis
|
|
||||||
.get_deserialized_from_json(TAGS_NAMESPACE, "game_version")
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if let Some(res) = res {
|
|
||||||
return Ok(res);
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT gv.id id, gv.version version_, gv.type type_, gv.created created, gv.major FROM game_versions gv
|
|
||||||
ORDER BY created DESC
|
|
||||||
"
|
|
||||||
)
|
|
||||||
.fetch_many(exec)
|
|
||||||
.try_filter_map(|e| async { Ok(e.right().map(|c| GameVersion {
|
|
||||||
id: GameVersionId(c.id),
|
|
||||||
version: c.version_,
|
|
||||||
type_: c.type_,
|
|
||||||
created: c.created,
|
|
||||||
major: c.major
|
|
||||||
})) })
|
|
||||||
.try_collect::<Vec<GameVersion>>()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
redis
|
|
||||||
.set_serialized_to_json(TAGS_NAMESPACE, "game_version", &result, None)
|
|
||||||
.await?;
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list_filter<'a, E>(
|
|
||||||
version_type_option: Option<&str>,
|
|
||||||
major_option: Option<bool>,
|
|
||||||
exec: E,
|
|
||||||
redis: &RedisPool,
|
|
||||||
) -> Result<Vec<GameVersion>, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
let result = Self::list(exec, redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| {
|
|
||||||
let mut bool = true;
|
|
||||||
|
|
||||||
if let Some(version_type) = version_type_option {
|
|
||||||
bool &= &*x.type_ == version_type;
|
|
||||||
}
|
|
||||||
if let Some(major) = major_option {
|
|
||||||
bool &= x.major == major;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> GameVersionBuilder<'a> {
|
|
||||||
/// The game version. Spaces must be replaced with '_' for it to be valid
|
|
||||||
pub fn version(self, version: &'a str) -> Result<GameVersionBuilder<'a>, DatabaseError> {
|
|
||||||
Ok(Self {
|
|
||||||
version: Some(version),
|
|
||||||
..self
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn version_type(
|
|
||||||
self,
|
|
||||||
version_type: &'a str,
|
|
||||||
) -> Result<GameVersionBuilder<'a>, DatabaseError> {
|
|
||||||
Ok(Self {
|
|
||||||
version_type: Some(version_type),
|
|
||||||
..self
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn created(self, created: &'a DateTime<Utc>) -> GameVersionBuilder<'a> {
|
|
||||||
Self {
|
|
||||||
date: Some(created),
|
|
||||||
..self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn insert<'b, E>(self, exec: E) -> Result<GameVersionId, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'b, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
// This looks like a mess, but it *should* work
|
|
||||||
// This allows game versions to be partially updated without
|
|
||||||
// replacing the unspecified fields with defaults.
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
INSERT INTO game_versions (version, type, created)
|
|
||||||
VALUES ($1, COALESCE($2, 'other'), COALESCE($3, timezone('utc', now())))
|
|
||||||
ON CONFLICT (version) DO UPDATE
|
|
||||||
SET type = COALESCE($2, game_versions.type),
|
|
||||||
created = COALESCE($3, game_versions.created)
|
|
||||||
RETURNING id
|
|
||||||
",
|
|
||||||
self.version,
|
|
||||||
self.version_type,
|
|
||||||
self.date.map(chrono::DateTime::naive_utc),
|
|
||||||
)
|
|
||||||
.fetch_one(exec)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(GameVersionId(result.id))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DonationPlatform {
|
impl DonationPlatform {
|
||||||
pub async fn get_id<'a, E>(
|
pub async fn get_id<'a, E>(
|
||||||
id: &str,
|
id: &str,
|
||||||
@ -509,51 +282,3 @@ impl ProjectType {
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SideType {
|
|
||||||
pub async fn get_id<'a, E>(name: &str, exec: E) -> Result<Option<SideTypeId>, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id FROM side_types
|
|
||||||
WHERE name = $1
|
|
||||||
",
|
|
||||||
name
|
|
||||||
)
|
|
||||||
.fetch_optional(exec)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(result.map(|r| SideTypeId(r.id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<String>, DatabaseError>
|
|
||||||
where
|
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
|
||||||
{
|
|
||||||
let res: Option<Vec<String>> = redis
|
|
||||||
.get_deserialized_from_json(TAGS_NAMESPACE, "side_type")
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if let Some(res) = res {
|
|
||||||
return Ok(res);
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT name FROM side_types
|
|
||||||
"
|
|
||||||
)
|
|
||||||
.fetch_many(exec)
|
|
||||||
.try_filter_map(|e| async { Ok(e.right().map(|c| c.name)) })
|
|
||||||
.try_collect::<Vec<String>>()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
redis
|
|
||||||
.set_serialized_to_json(TAGS_NAMESPACE, "side_type", &result, None)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -202,7 +202,7 @@ pub struct OrganizationId(pub i64);
|
|||||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct ProjectId(pub i64);
|
pub struct ProjectId(pub i64);
|
||||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct ProjectTypeId(pub i32);
|
pub struct ProjectTypeId(pub i32);
|
||||||
|
|
||||||
@ -219,10 +219,7 @@ pub struct DonationPlatformId(pub i32);
|
|||||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct VersionId(pub i64);
|
pub struct VersionId(pub i64);
|
||||||
#[derive(Copy, Clone, Debug, Type, Deserialize, Serialize)]
|
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||||
#[sqlx(transparent)]
|
|
||||||
pub struct GameVersionId(pub i32);
|
|
||||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct LoaderId(pub i32);
|
pub struct LoaderId(pub i32);
|
||||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||||
@ -270,6 +267,18 @@ pub struct SessionId(pub i64);
|
|||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct ImageId(pub i64);
|
pub struct ImageId(pub i64);
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||||
|
#[sqlx(transparent)]
|
||||||
|
pub struct LoaderFieldId(pub i32);
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||||
|
#[sqlx(transparent)]
|
||||||
|
pub struct LoaderFieldEnumId(pub i32);
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||||
|
#[sqlx(transparent)]
|
||||||
|
pub struct LoaderFieldEnumValueId(pub i32);
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct OAuthClientId(pub i64);
|
pub struct OAuthClientId(pub i64);
|
||||||
|
|||||||
208
src/database/models/legacy_loader_fields.rs
Normal file
208
src/database/models/legacy_loader_fields.rs
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
// In V3, we switched to dynamic loader fields for a better support for more loaders, games, and potential metadata.
|
||||||
|
// This file contains the legacy loader fields, which are still used by V2 projects.
|
||||||
|
// They are still useful to have in several places where minecraft-java functionality is hardcoded- for example,
|
||||||
|
// for fetching data from forge, maven, etc.
|
||||||
|
// These fields only apply to minecraft-java, and are hardcoded to the minecraft-java game.
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
loader_fields::{LoaderFieldEnum, LoaderFieldEnumValue, VersionField, VersionFieldValue},
|
||||||
|
DatabaseError, LoaderFieldEnumValueId,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||||
|
pub struct MinecraftGameVersion {
|
||||||
|
pub id: LoaderFieldEnumValueId,
|
||||||
|
pub version: String,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub type_: String,
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
pub major: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MinecraftGameVersion {
|
||||||
|
// The name under which this legacy field is stored as a LoaderField
|
||||||
|
pub const FIELD_NAME: &'static str = "game_versions";
|
||||||
|
|
||||||
|
pub fn builder() -> MinecraftGameVersionBuilder<'static> {
|
||||||
|
MinecraftGameVersionBuilder::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list<'a, E>(
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Vec<MinecraftGameVersion>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||||
|
{
|
||||||
|
let game_version_enum = LoaderFieldEnum::get(Self::FIELD_NAME, exec, redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
DatabaseError::SchemaError("Could not find game version enum.".to_string())
|
||||||
|
})?;
|
||||||
|
let game_version_enum_values =
|
||||||
|
LoaderFieldEnumValue::list(game_version_enum.id, exec, redis).await?;
|
||||||
|
Ok(game_version_enum_values
|
||||||
|
.into_iter()
|
||||||
|
.map(MinecraftGameVersion::from_enum_value)
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: remove this
|
||||||
|
pub async fn list_transaction(
|
||||||
|
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Vec<MinecraftGameVersion>, DatabaseError> {
|
||||||
|
let game_version_enum = LoaderFieldEnum::get(Self::FIELD_NAME, &mut **transaction, redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
DatabaseError::SchemaError("Could not find game version enum.".to_string())
|
||||||
|
})?;
|
||||||
|
let game_version_enum_values =
|
||||||
|
LoaderFieldEnumValue::list(game_version_enum.id, &mut **transaction, redis).await?;
|
||||||
|
Ok(game_version_enum_values
|
||||||
|
.into_iter()
|
||||||
|
.map(MinecraftGameVersion::from_enum_value)
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tries to create a MinecraftGameVersion from a VersionField
|
||||||
|
// Clones on success
|
||||||
|
pub fn try_from_version_field(
|
||||||
|
version_field: &VersionField,
|
||||||
|
) -> Result<Vec<Self>, DatabaseError> {
|
||||||
|
if version_field.field_name != Self::FIELD_NAME {
|
||||||
|
return Err(DatabaseError::SchemaError(format!(
|
||||||
|
"Field name {} is not {}",
|
||||||
|
version_field.field_name,
|
||||||
|
Self::FIELD_NAME
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
let game_versions = match version_field.clone() {
|
||||||
|
VersionField {
|
||||||
|
value: VersionFieldValue::ArrayEnum(_, values),
|
||||||
|
..
|
||||||
|
} => values.into_iter().map(Self::from_enum_value).collect(),
|
||||||
|
VersionField {
|
||||||
|
value: VersionFieldValue::Enum(_, value),
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
vec![Self::from_enum_value(value)]
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(DatabaseError::SchemaError(format!(
|
||||||
|
"Game version requires field value to be an enum: {:?}",
|
||||||
|
version_field
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(game_versions)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_enum_value(loader_field_enum_value: LoaderFieldEnumValue) -> MinecraftGameVersion {
|
||||||
|
MinecraftGameVersion {
|
||||||
|
id: loader_field_enum_value.id,
|
||||||
|
version: loader_field_enum_value.value,
|
||||||
|
created: loader_field_enum_value.created,
|
||||||
|
type_: loader_field_enum_value
|
||||||
|
.metadata
|
||||||
|
.get("type")
|
||||||
|
.and_then(|x| x.as_str())
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
major: loader_field_enum_value
|
||||||
|
.metadata
|
||||||
|
.get("major")
|
||||||
|
.and_then(|x| x.as_bool())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct MinecraftGameVersionBuilder<'a> {
|
||||||
|
pub version: Option<&'a str>,
|
||||||
|
pub version_type: Option<&'a str>,
|
||||||
|
pub date: Option<&'a DateTime<Utc>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> MinecraftGameVersionBuilder<'a> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self::default()
|
||||||
|
}
|
||||||
|
/// The game version. Spaces must be replaced with '_' for it to be valid
|
||||||
|
pub fn version(
|
||||||
|
self,
|
||||||
|
version: &'a str,
|
||||||
|
) -> Result<MinecraftGameVersionBuilder<'a>, DatabaseError> {
|
||||||
|
Ok(Self {
|
||||||
|
version: Some(version),
|
||||||
|
..self
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn version_type(
|
||||||
|
self,
|
||||||
|
version_type: &'a str,
|
||||||
|
) -> Result<MinecraftGameVersionBuilder<'a>, DatabaseError> {
|
||||||
|
Ok(Self {
|
||||||
|
version_type: Some(version_type),
|
||||||
|
..self
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn created(self, created: &'a DateTime<Utc>) -> MinecraftGameVersionBuilder<'a> {
|
||||||
|
Self {
|
||||||
|
date: Some(created),
|
||||||
|
..self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert<'b, E>(
|
||||||
|
self,
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<LoaderFieldEnumValueId, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'b, Database = sqlx::Postgres> + Copy,
|
||||||
|
{
|
||||||
|
let game_versions_enum = LoaderFieldEnum::get("game_versions", exec, redis)
|
||||||
|
.await?
|
||||||
|
.ok_or(DatabaseError::SchemaError(
|
||||||
|
"Missing loaders field: 'game_versions'".to_string(),
|
||||||
|
))?;
|
||||||
|
|
||||||
|
// Get enum id for game versions
|
||||||
|
let metadata = json!({
|
||||||
|
"type": self.version_type,
|
||||||
|
"major": false
|
||||||
|
});
|
||||||
|
|
||||||
|
// This looks like a mess, but it *should* work
|
||||||
|
// This allows game versions to be partially updated without
|
||||||
|
// replacing the unspecified fields with defaults.
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"
|
||||||
|
INSERT INTO loader_field_enum_values (enum_id, value, created, metadata)
|
||||||
|
VALUES ($1, $2, COALESCE($3, timezone('utc', now())), $4)
|
||||||
|
ON CONFLICT (enum_id, value) DO UPDATE
|
||||||
|
SET metadata = COALESCE($4, loader_field_enum_values.metadata),
|
||||||
|
created = COALESCE($3, loader_field_enum_values.created)
|
||||||
|
RETURNING id
|
||||||
|
",
|
||||||
|
game_versions_enum.id.0,
|
||||||
|
self.version,
|
||||||
|
self.date.map(chrono::DateTime::naive_utc),
|
||||||
|
metadata
|
||||||
|
)
|
||||||
|
.fetch_one(exec)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(LoaderFieldEnumValueId(result.id))
|
||||||
|
}
|
||||||
|
}
|
||||||
959
src/database/models/loader_fields.rs
Normal file
959
src/database/models/loader_fields.rs
Normal file
@ -0,0 +1,959 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::ids::*;
|
||||||
|
use super::DatabaseError;
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use chrono::DateTime;
|
||||||
|
use chrono::Utc;
|
||||||
|
use futures::TryStreamExt;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
const LOADER_ID: &str = "loader_id";
|
||||||
|
const LOADERS_LIST_NAMESPACE: &str = "loaders";
|
||||||
|
const LOADER_FIELDS_NAMESPACE: &str = "loader_fields";
|
||||||
|
const LOADER_FIELD_ENUMS_ID_NAMESPACE: &str = "loader_field_enums";
|
||||||
|
const LOADER_FIELD_ENUM_VALUES_NAMESPACE: &str = "loader_field_enum_values";
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, Copy)]
|
||||||
|
pub enum Game {
|
||||||
|
MinecraftJava,
|
||||||
|
// MinecraftBedrock
|
||||||
|
// Future games
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Game {
|
||||||
|
pub fn name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Game::MinecraftJava => "minecraft-java",
|
||||||
|
// Game::MinecraftBedrock => "minecraft-bedrock"
|
||||||
|
// Future games
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_name(name: &str) -> Option<Game> {
|
||||||
|
match name {
|
||||||
|
"minecraft-java" => Some(Game::MinecraftJava),
|
||||||
|
// "minecraft-bedrock" => Some(Game::MinecraftBedrock)
|
||||||
|
// Future games
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
|
pub struct Loader {
|
||||||
|
pub id: LoaderId,
|
||||||
|
pub loader: String,
|
||||||
|
pub icon: String,
|
||||||
|
pub supported_project_types: Vec<String>,
|
||||||
|
pub supported_games: Vec<Game>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Loader {
|
||||||
|
pub async fn get_id<'a, E>(
|
||||||
|
name: &str,
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Option<LoaderId>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let cached_id: Option<i32> = redis.get_deserialized_from_json(LOADER_ID, name).await?;
|
||||||
|
if let Some(cached_id) = cached_id {
|
||||||
|
return Ok(Some(LoaderId(cached_id)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT id FROM loaders
|
||||||
|
WHERE loader = $1
|
||||||
|
",
|
||||||
|
name
|
||||||
|
)
|
||||||
|
.fetch_optional(exec)
|
||||||
|
.await?
|
||||||
|
.map(|r| LoaderId(r.id));
|
||||||
|
|
||||||
|
if let Some(result) = result {
|
||||||
|
redis
|
||||||
|
.set_serialized_to_json(LOADER_ID, name, &result.0, None)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<Loader>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let cached_loaders: Option<Vec<Loader>> = redis
|
||||||
|
.get_deserialized_from_json(LOADERS_LIST_NAMESPACE, "all")
|
||||||
|
.await?;
|
||||||
|
if let Some(cached_loaders) = cached_loaders {
|
||||||
|
return Ok(cached_loaders);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT l.id id, l.loader loader, l.icon icon,
|
||||||
|
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||||
|
ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games
|
||||||
|
FROM loaders l
|
||||||
|
LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id
|
||||||
|
LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id
|
||||||
|
LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id
|
||||||
|
LEFT OUTER JOIN games g ON lptg.game_id = g.id
|
||||||
|
GROUP BY l.id;
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.fetch_many(exec)
|
||||||
|
.try_filter_map(|e| async {
|
||||||
|
Ok(e.right().map(|x| Loader {
|
||||||
|
id: LoaderId(x.id),
|
||||||
|
loader: x.loader,
|
||||||
|
icon: x.icon,
|
||||||
|
supported_project_types: x
|
||||||
|
.project_types
|
||||||
|
.unwrap_or_default()
|
||||||
|
.iter()
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect(),
|
||||||
|
supported_games: x
|
||||||
|
.games
|
||||||
|
.unwrap_or_default()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|x| Game::from_name(x))
|
||||||
|
.collect(),
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.try_collect::<Vec<_>>()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
redis
|
||||||
|
.set_serialized_to_json(LOADERS_LIST_NAMESPACE, "all", &result, None)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||||
|
pub struct LoaderField {
|
||||||
|
pub id: LoaderFieldId,
|
||||||
|
pub field: String,
|
||||||
|
pub field_type: LoaderFieldType,
|
||||||
|
pub optional: bool,
|
||||||
|
pub min_val: Option<i32>,
|
||||||
|
pub max_val: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||||
|
pub enum LoaderFieldType {
|
||||||
|
Integer,
|
||||||
|
Text,
|
||||||
|
Enum(LoaderFieldEnumId),
|
||||||
|
Boolean,
|
||||||
|
ArrayInteger,
|
||||||
|
ArrayText,
|
||||||
|
ArrayEnum(LoaderFieldEnumId),
|
||||||
|
ArrayBoolean,
|
||||||
|
}
|
||||||
|
impl LoaderFieldType {
|
||||||
|
pub fn build(field_type_name: &str, loader_field_enum: Option<i32>) -> Option<LoaderFieldType> {
|
||||||
|
Some(match (field_type_name, loader_field_enum) {
|
||||||
|
("integer", _) => LoaderFieldType::Integer,
|
||||||
|
("text", _) => LoaderFieldType::Text,
|
||||||
|
("boolean", _) => LoaderFieldType::Boolean,
|
||||||
|
("array_integer", _) => LoaderFieldType::ArrayInteger,
|
||||||
|
("array_text", _) => LoaderFieldType::ArrayText,
|
||||||
|
("array_boolean", _) => LoaderFieldType::ArrayBoolean,
|
||||||
|
("enum", Some(id)) => LoaderFieldType::Enum(LoaderFieldEnumId(id)),
|
||||||
|
("array_enum", Some(id)) => LoaderFieldType::ArrayEnum(LoaderFieldEnumId(id)),
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
LoaderFieldType::Integer => "integer",
|
||||||
|
LoaderFieldType::Text => "text",
|
||||||
|
LoaderFieldType::Boolean => "boolean",
|
||||||
|
LoaderFieldType::ArrayInteger => "array_integer",
|
||||||
|
LoaderFieldType::ArrayText => "array_text",
|
||||||
|
LoaderFieldType::ArrayBoolean => "array_boolean",
|
||||||
|
LoaderFieldType::Enum(_) => "enum",
|
||||||
|
LoaderFieldType::ArrayEnum(_) => "array_enum",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||||
|
pub struct LoaderFieldEnum {
|
||||||
|
pub id: LoaderFieldEnumId,
|
||||||
|
pub enum_name: String,
|
||||||
|
pub ordering: Option<i32>,
|
||||||
|
pub hidable: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
|
||||||
|
pub struct LoaderFieldEnumValue {
|
||||||
|
pub id: LoaderFieldEnumValueId,
|
||||||
|
pub enum_id: LoaderFieldEnumId,
|
||||||
|
pub value: String,
|
||||||
|
pub ordering: Option<i32>,
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub metadata: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
|
||||||
|
pub struct VersionField {
|
||||||
|
pub version_id: VersionId,
|
||||||
|
pub field_id: LoaderFieldId,
|
||||||
|
pub field_name: String,
|
||||||
|
pub value: VersionFieldValue,
|
||||||
|
}
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
|
||||||
|
pub enum VersionFieldValue {
|
||||||
|
Integer(i32),
|
||||||
|
Text(String),
|
||||||
|
Enum(LoaderFieldEnumId, LoaderFieldEnumValue),
|
||||||
|
Boolean(bool),
|
||||||
|
ArrayInteger(Vec<i32>),
|
||||||
|
ArrayText(Vec<String>),
|
||||||
|
ArrayEnum(LoaderFieldEnumId, Vec<LoaderFieldEnumValue>),
|
||||||
|
ArrayBoolean(Vec<bool>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||||
|
pub struct QueryVersionField {
|
||||||
|
pub version_id: VersionId,
|
||||||
|
pub field_id: LoaderFieldId,
|
||||||
|
pub int_value: Option<i32>,
|
||||||
|
pub enum_value: Option<LoaderFieldEnumValue>,
|
||||||
|
pub string_value: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl QueryVersionField {
|
||||||
|
pub fn with_int_value(mut self, int_value: i32) -> Self {
|
||||||
|
self.int_value = Some(int_value);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_enum_value(mut self, enum_value: LoaderFieldEnumValue) -> Self {
|
||||||
|
self.enum_value = Some(enum_value);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_string_value(mut self, string_value: String) -> Self {
|
||||||
|
self.string_value = Some(string_value);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||||
|
pub struct SideType {
|
||||||
|
pub id: SideTypeId,
|
||||||
|
pub name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LoaderField {
|
||||||
|
pub async fn get_field<'a, E>(
|
||||||
|
field: &str,
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Option<LoaderField>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let fields = Self::get_fields(exec, redis).await?;
|
||||||
|
Ok(fields.into_iter().find(|f| f.field == field))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gets all fields for a given loader
|
||||||
|
// Returns all as this there are probably relatively few fields per loader
|
||||||
|
// TODO: in the future, this should be to get all fields in relation to something
|
||||||
|
// - e.g. get all fields for a given game?
|
||||||
|
pub async fn get_fields<'a, E>(
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Vec<LoaderField>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let cached_fields = redis
|
||||||
|
.get_deserialized_from_json(LOADER_FIELDS_NAMESPACE, 0) // 0 => whatever we search for fields by
|
||||||
|
.await?;
|
||||||
|
if let Some(cached_fields) = cached_fields {
|
||||||
|
return Ok(cached_fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type
|
||||||
|
FROM loader_fields lf
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.fetch_many(exec)
|
||||||
|
.try_filter_map(|e| async {
|
||||||
|
Ok(e.right().and_then(|r| {
|
||||||
|
Some(LoaderField {
|
||||||
|
id: LoaderFieldId(r.id),
|
||||||
|
field_type: LoaderFieldType::build(&r.field_type, r.enum_type)?,
|
||||||
|
field: r.field,
|
||||||
|
optional: r.optional,
|
||||||
|
min_val: r.min_val,
|
||||||
|
max_val: r.max_val,
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.try_collect::<Vec<LoaderField>>()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
redis
|
||||||
|
.set_serialized_to_json(LOADER_FIELDS_NAMESPACE, &0, &result, None)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LoaderFieldEnum {
|
||||||
|
pub async fn get<'a, E>(
|
||||||
|
enum_name: &str, // Note: NOT loader field name
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Option<LoaderFieldEnum>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let cached_enum = redis
|
||||||
|
.get_deserialized_from_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name)
|
||||||
|
.await?;
|
||||||
|
if let Some(cached_enum) = cached_enum {
|
||||||
|
return Ok(cached_enum);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable
|
||||||
|
FROM loader_field_enums lfe
|
||||||
|
WHERE lfe.enum_name = $1
|
||||||
|
",
|
||||||
|
enum_name
|
||||||
|
)
|
||||||
|
.fetch_optional(exec)
|
||||||
|
.await?
|
||||||
|
.map(|l| LoaderFieldEnum {
|
||||||
|
id: LoaderFieldEnumId(l.id),
|
||||||
|
enum_name: l.enum_name,
|
||||||
|
ordering: l.ordering,
|
||||||
|
hidable: l.hidable,
|
||||||
|
});
|
||||||
|
|
||||||
|
redis
|
||||||
|
.set_serialized_to_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name, &result, None)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LoaderFieldEnumValue {
|
||||||
|
pub async fn list<'a, E>(
|
||||||
|
loader_field_enum_id: LoaderFieldEnumId,
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Vec<LoaderFieldEnumValue>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
Ok(Self::list_many(&[loader_field_enum_id], exec, redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.map(|x| x.1)
|
||||||
|
.unwrap_or_default())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_many_loader_fields<'a, E>(
|
||||||
|
loader_fields: &[LoaderField],
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<HashMap<LoaderFieldId, Vec<LoaderFieldEnumValue>>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let get_enum_id = |x: &LoaderField| match x.field_type {
|
||||||
|
LoaderFieldType::Enum(id) | LoaderFieldType::ArrayEnum(id) => Some(id),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let enum_ids = loader_fields
|
||||||
|
.iter()
|
||||||
|
.filter_map(|x| get_enum_id(x))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let values = Self::list_many(&enum_ids, exec, redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
|
let mut res = HashMap::new();
|
||||||
|
for lf in loader_fields {
|
||||||
|
if let Some(id) = get_enum_id(lf) {
|
||||||
|
res.insert(lf.id, values.get(&id).unwrap_or(&Vec::new()).to_vec());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_many<'a, E>(
|
||||||
|
loader_field_enum_ids: &[LoaderFieldEnumId],
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Vec<(LoaderFieldEnumId, Vec<LoaderFieldEnumValue>)>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let mut found_enums = Vec::new();
|
||||||
|
let mut remaining_enums: Vec<LoaderFieldEnumId> = loader_field_enum_ids.to_vec();
|
||||||
|
|
||||||
|
if !remaining_enums.is_empty() {
|
||||||
|
let enums = redis
|
||||||
|
.multi_get::<String, _>(
|
||||||
|
LOADER_FIELD_ENUM_VALUES_NAMESPACE,
|
||||||
|
loader_field_enum_ids.iter().map(|x| x.0),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for lfe in enums {
|
||||||
|
if let Some(lfe) = lfe.and_then(|x| {
|
||||||
|
serde_json::from_str::<(LoaderFieldEnumId, Vec<LoaderFieldEnumValue>)>(&x).ok()
|
||||||
|
}) {
|
||||||
|
remaining_enums.retain(|x| lfe.0 .0 != x.0);
|
||||||
|
found_enums.push(lfe.1);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let remaining_enums = remaining_enums.iter().map(|x| x.0).collect::<Vec<_>>();
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values
|
||||||
|
WHERE enum_id = ANY($1)
|
||||||
|
",
|
||||||
|
&remaining_enums
|
||||||
|
)
|
||||||
|
.fetch_many(exec)
|
||||||
|
.try_filter_map(|e| async {
|
||||||
|
Ok(e.right().map(|c| LoaderFieldEnumValue {
|
||||||
|
id: LoaderFieldEnumValueId(c.id),
|
||||||
|
enum_id: LoaderFieldEnumId(c.enum_id),
|
||||||
|
value: c.value,
|
||||||
|
ordering: c.ordering,
|
||||||
|
created: c.created,
|
||||||
|
metadata: c.metadata.unwrap_or_default(),
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.try_collect::<Vec<LoaderFieldEnumValue>>()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Convert from an Vec<LoaderFieldEnumValue> to a Vec<(LoaderFieldEnumId, Vec<LoaderFieldEnumValue>)>
|
||||||
|
let cachable_enum_sets: Vec<(LoaderFieldEnumId, Vec<LoaderFieldEnumValue>)> = result
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.group_by(|x| x.enum_id)
|
||||||
|
.into_iter()
|
||||||
|
.map(|(k, v)| (k, v.collect::<Vec<_>>().to_vec()))
|
||||||
|
.collect();
|
||||||
|
for (k, v) in cachable_enum_sets.iter() {
|
||||||
|
redis
|
||||||
|
.set_serialized_to_json(LOADER_FIELD_ENUM_VALUES_NAMESPACE, k.0, v, None)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(cachable_enum_sets)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Matches filter against metadata of enum values
|
||||||
|
pub async fn list_filter<'a, E>(
|
||||||
|
loader_field_enum_id: LoaderFieldEnumId,
|
||||||
|
filter: HashMap<String, serde_json::Value>,
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Vec<LoaderFieldEnumValue>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let result = Self::list(loader_field_enum_id, exec, redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| {
|
||||||
|
let mut bool = true;
|
||||||
|
for (key, value) in filter.iter() {
|
||||||
|
if let Some(metadata_value) = x.metadata.get(key) {
|
||||||
|
bool &= metadata_value == value;
|
||||||
|
} else {
|
||||||
|
bool = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bool
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VersionField {
|
||||||
|
pub async fn insert_many(
|
||||||
|
items: Vec<Self>,
|
||||||
|
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||||
|
) -> Result<(), DatabaseError> {
|
||||||
|
let mut query_version_fields = vec![];
|
||||||
|
for item in items {
|
||||||
|
let base = QueryVersionField {
|
||||||
|
version_id: item.version_id,
|
||||||
|
field_id: item.field_id,
|
||||||
|
int_value: None,
|
||||||
|
enum_value: None,
|
||||||
|
string_value: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
match item.value {
|
||||||
|
VersionFieldValue::Integer(i) => {
|
||||||
|
query_version_fields.push(base.clone().with_int_value(i))
|
||||||
|
}
|
||||||
|
VersionFieldValue::Text(s) => {
|
||||||
|
query_version_fields.push(base.clone().with_string_value(s))
|
||||||
|
}
|
||||||
|
VersionFieldValue::Boolean(b) => {
|
||||||
|
query_version_fields.push(base.clone().with_int_value(if b { 1 } else { 0 }))
|
||||||
|
}
|
||||||
|
VersionFieldValue::ArrayInteger(v) => {
|
||||||
|
for i in v {
|
||||||
|
query_version_fields.push(base.clone().with_int_value(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VersionFieldValue::ArrayText(v) => {
|
||||||
|
for s in v {
|
||||||
|
query_version_fields.push(base.clone().with_string_value(s));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VersionFieldValue::ArrayBoolean(v) => {
|
||||||
|
for b in v {
|
||||||
|
query_version_fields.push(base.clone().with_int_value(if b {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VersionFieldValue::Enum(_, v) => {
|
||||||
|
query_version_fields.push(base.clone().with_enum_value(v))
|
||||||
|
}
|
||||||
|
VersionFieldValue::ArrayEnum(_, v) => {
|
||||||
|
for ev in v {
|
||||||
|
query_version_fields.push(base.clone().with_enum_value(ev));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let (field_ids, version_ids, int_values, enum_values, string_values): (
|
||||||
|
Vec<_>,
|
||||||
|
Vec<_>,
|
||||||
|
Vec<_>,
|
||||||
|
Vec<_>,
|
||||||
|
Vec<_>,
|
||||||
|
) = query_version_fields
|
||||||
|
.iter()
|
||||||
|
.map(|l| {
|
||||||
|
(
|
||||||
|
l.field_id.0,
|
||||||
|
l.version_id.0,
|
||||||
|
l.int_value,
|
||||||
|
l.enum_value.as_ref().map(|e| e.id.0),
|
||||||
|
l.string_value.clone(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.multiunzip();
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
INSERT INTO version_fields (field_id, version_id, int_value, string_value, enum_value)
|
||||||
|
SELECT * FROM UNNEST($1::integer[], $2::bigint[], $3::integer[], $4::text[], $5::integer[])
|
||||||
|
",
|
||||||
|
&field_ids[..],
|
||||||
|
&version_ids[..],
|
||||||
|
&int_values[..] as &[Option<i32>],
|
||||||
|
&string_values[..] as &[Option<String>],
|
||||||
|
&enum_values[..] as &[Option<i32>]
|
||||||
|
)
|
||||||
|
.execute(&mut **transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check_parse(
|
||||||
|
version_id: VersionId,
|
||||||
|
loader_field: LoaderField,
|
||||||
|
value: serde_json::Value,
|
||||||
|
enum_variants: Vec<LoaderFieldEnumValue>,
|
||||||
|
) -> Result<VersionField, String> {
|
||||||
|
let value = VersionFieldValue::parse(&loader_field, value, enum_variants)?;
|
||||||
|
Ok(VersionField {
|
||||||
|
version_id,
|
||||||
|
field_id: loader_field.id,
|
||||||
|
field_name: loader_field.field,
|
||||||
|
value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_query_json(
|
||||||
|
version_id: i64,
|
||||||
|
loader_fields: Option<serde_json::Value>,
|
||||||
|
version_fields: Option<serde_json::Value>,
|
||||||
|
loader_field_enum_values: Option<serde_json::Value>,
|
||||||
|
) -> Vec<VersionField> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct JsonLoaderField {
|
||||||
|
lf_id: i32,
|
||||||
|
field: String,
|
||||||
|
field_type: String,
|
||||||
|
enum_type: Option<i32>,
|
||||||
|
min_val: Option<i32>,
|
||||||
|
max_val: Option<i32>,
|
||||||
|
optional: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct JsonVersionField {
|
||||||
|
field_id: i32,
|
||||||
|
int_value: Option<i32>,
|
||||||
|
enum_value: Option<i32>,
|
||||||
|
string_value: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct JsonLoaderFieldEnumValue {
|
||||||
|
id: i32,
|
||||||
|
enum_id: i32,
|
||||||
|
value: String,
|
||||||
|
ordering: Option<i32>,
|
||||||
|
created: DateTime<Utc>,
|
||||||
|
metadata: Option<serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let query_loader_fields: Vec<JsonLoaderField> = loader_fields
|
||||||
|
.and_then(|x| serde_json::from_value(x).ok())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let query_version_field_combined: Vec<JsonVersionField> = version_fields
|
||||||
|
.and_then(|x| serde_json::from_value(x).ok())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let query_loader_field_enum_values: Vec<JsonLoaderFieldEnumValue> =
|
||||||
|
loader_field_enum_values
|
||||||
|
.and_then(|x| serde_json::from_value(x).ok())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let version_id = VersionId(version_id);
|
||||||
|
query_loader_fields
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|q| {
|
||||||
|
let loader_field_type = match LoaderFieldType::build(&q.field_type, q.enum_type) {
|
||||||
|
Some(lft) => lft,
|
||||||
|
None => return None,
|
||||||
|
};
|
||||||
|
let loader_field = LoaderField {
|
||||||
|
id: LoaderFieldId(q.lf_id),
|
||||||
|
field: q.field.clone(),
|
||||||
|
field_type: loader_field_type,
|
||||||
|
optional: q.optional,
|
||||||
|
min_val: q.min_val,
|
||||||
|
max_val: q.max_val,
|
||||||
|
};
|
||||||
|
let values = query_version_field_combined
|
||||||
|
.iter()
|
||||||
|
.filter_map(|qvf| {
|
||||||
|
if qvf.field_id == q.lf_id {
|
||||||
|
let lfev = query_loader_field_enum_values
|
||||||
|
.iter()
|
||||||
|
.find(|x| Some(x.id) == qvf.enum_value);
|
||||||
|
|
||||||
|
Some(QueryVersionField {
|
||||||
|
version_id,
|
||||||
|
field_id: LoaderFieldId(qvf.field_id),
|
||||||
|
int_value: qvf.int_value,
|
||||||
|
enum_value: lfev.map(|lfev| LoaderFieldEnumValue {
|
||||||
|
id: LoaderFieldEnumValueId(lfev.id),
|
||||||
|
enum_id: LoaderFieldEnumId(lfev.enum_id),
|
||||||
|
value: lfev.value.clone(),
|
||||||
|
ordering: lfev.ordering,
|
||||||
|
created: lfev.created,
|
||||||
|
metadata: lfev.metadata.clone().unwrap_or_default(),
|
||||||
|
}),
|
||||||
|
string_value: qvf.string_value.clone(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
VersionField::build(loader_field, version_id, values).ok()
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(
|
||||||
|
loader_field: LoaderField,
|
||||||
|
version_id: VersionId,
|
||||||
|
query_version_fields: Vec<QueryVersionField>,
|
||||||
|
) -> Result<VersionField, DatabaseError> {
|
||||||
|
let value = VersionFieldValue::build(&loader_field.field_type, query_version_fields)?;
|
||||||
|
Ok(VersionField {
|
||||||
|
version_id,
|
||||||
|
field_id: loader_field.id,
|
||||||
|
field_name: loader_field.field,
|
||||||
|
value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VersionFieldValue {
|
||||||
|
// Build from user-submitted JSON data
|
||||||
|
// value is the attempted value of the field, which will be tried to parse to the correct type
|
||||||
|
// enum_array is the list of valid enum variants for the field, if it is an enum (see LoaderFieldEnumValue::list_many_loader_fields)
|
||||||
|
pub fn parse(
|
||||||
|
loader_field: &LoaderField,
|
||||||
|
value: serde_json::Value,
|
||||||
|
enum_array: Vec<LoaderFieldEnumValue>,
|
||||||
|
) -> Result<VersionFieldValue, String> {
|
||||||
|
let field_name = &loader_field.field;
|
||||||
|
let field_type = &loader_field.field_type;
|
||||||
|
|
||||||
|
let error_value = value.clone();
|
||||||
|
let incorrect_type_error = |field_type: &str| {
|
||||||
|
format!(
|
||||||
|
"Provided value '{v}' for {field_name} could not be parsed to {field_type} ",
|
||||||
|
v = serde_json::to_string(&error_value).unwrap_or_default()
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(match field_type {
|
||||||
|
LoaderFieldType::Integer => VersionFieldValue::Integer(
|
||||||
|
serde_json::from_value(value).map_err(|_| incorrect_type_error("integer"))?,
|
||||||
|
),
|
||||||
|
LoaderFieldType::Text => VersionFieldValue::Text(
|
||||||
|
value
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| incorrect_type_error("string"))?
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
LoaderFieldType::Boolean => VersionFieldValue::Boolean(
|
||||||
|
value
|
||||||
|
.as_bool()
|
||||||
|
.ok_or_else(|| incorrect_type_error("boolean"))?,
|
||||||
|
),
|
||||||
|
LoaderFieldType::ArrayInteger => VersionFieldValue::ArrayInteger({
|
||||||
|
let array_values: Vec<i32> = serde_json::from_value(value)
|
||||||
|
.map_err(|_| incorrect_type_error("array of integers"))?;
|
||||||
|
array_values.into_iter().collect()
|
||||||
|
}),
|
||||||
|
LoaderFieldType::ArrayText => VersionFieldValue::ArrayText({
|
||||||
|
let array_values: Vec<String> = serde_json::from_value(value)
|
||||||
|
.map_err(|_| incorrect_type_error("array of strings"))?;
|
||||||
|
array_values.into_iter().collect()
|
||||||
|
}),
|
||||||
|
LoaderFieldType::ArrayBoolean => VersionFieldValue::ArrayBoolean({
|
||||||
|
let array_values: Vec<i64> = serde_json::from_value(value)
|
||||||
|
.map_err(|_| incorrect_type_error("array of booleans"))?;
|
||||||
|
array_values.into_iter().map(|v| v != 0).collect()
|
||||||
|
}),
|
||||||
|
LoaderFieldType::Enum(id) => VersionFieldValue::Enum(*id, {
|
||||||
|
let enum_value = value.as_str().ok_or_else(|| incorrect_type_error("enum"))?;
|
||||||
|
if let Some(ev) = enum_array.into_iter().find(|v| v.value == enum_value) {
|
||||||
|
ev
|
||||||
|
} else {
|
||||||
|
return Err(format!(
|
||||||
|
"Provided value '{enum_value}' is not a valid variant for {field_name}"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
LoaderFieldType::ArrayEnum(id) => VersionFieldValue::ArrayEnum(*id, {
|
||||||
|
let array_values: Vec<String> = serde_json::from_value(value)
|
||||||
|
.map_err(|_| incorrect_type_error("array of enums"))?;
|
||||||
|
let mut enum_values = vec![];
|
||||||
|
for av in array_values {
|
||||||
|
if let Some(ev) = enum_array.iter().find(|v| v.value == av) {
|
||||||
|
enum_values.push(ev.clone());
|
||||||
|
} else {
|
||||||
|
return Err(format!(
|
||||||
|
"Provided value '{av}' is not a valid variant for {field_name}"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
enum_values
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build from internal query data
|
||||||
|
// This encapsulates reundant behavior in db querie -> object conversions
|
||||||
|
pub fn build(
|
||||||
|
field_type: &LoaderFieldType,
|
||||||
|
qvfs: Vec<QueryVersionField>,
|
||||||
|
) -> Result<VersionFieldValue, DatabaseError> {
|
||||||
|
let field_name = field_type.to_str();
|
||||||
|
let get_first = |qvfs: Vec<QueryVersionField>| -> Result<QueryVersionField, DatabaseError> {
|
||||||
|
if qvfs.len() > 1 {
|
||||||
|
return Err(DatabaseError::SchemaError(format!(
|
||||||
|
"Multiple fields for field {}",
|
||||||
|
field_name
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
qvfs.into_iter().next().ok_or_else(|| {
|
||||||
|
DatabaseError::SchemaError(format!("No version fields for field {}", field_name))
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
let did_not_exist_error = |field_name: &str, desired_field: &str| {
|
||||||
|
DatabaseError::SchemaError(format!(
|
||||||
|
"Field name {} for field {} in does not exist",
|
||||||
|
desired_field, field_name
|
||||||
|
))
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(match field_type {
|
||||||
|
LoaderFieldType::Integer => VersionFieldValue::Integer(
|
||||||
|
get_first(qvfs)?
|
||||||
|
.int_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "int_value"))?,
|
||||||
|
),
|
||||||
|
LoaderFieldType::Text => VersionFieldValue::Text(
|
||||||
|
get_first(qvfs)?
|
||||||
|
.string_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "string_value"))?,
|
||||||
|
),
|
||||||
|
LoaderFieldType::Boolean => VersionFieldValue::Boolean(
|
||||||
|
get_first(qvfs)?
|
||||||
|
.int_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "int_value"))?
|
||||||
|
!= 0,
|
||||||
|
),
|
||||||
|
LoaderFieldType::ArrayInteger => VersionFieldValue::ArrayInteger(
|
||||||
|
qvfs.into_iter()
|
||||||
|
.map(|qvf| {
|
||||||
|
qvf.int_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "int_value"))
|
||||||
|
})
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
|
),
|
||||||
|
LoaderFieldType::ArrayText => VersionFieldValue::ArrayText(
|
||||||
|
qvfs.into_iter()
|
||||||
|
.map(|qvf| {
|
||||||
|
qvf.string_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "string_value"))
|
||||||
|
})
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
|
),
|
||||||
|
LoaderFieldType::ArrayBoolean => VersionFieldValue::ArrayBoolean(
|
||||||
|
qvfs.into_iter()
|
||||||
|
.map(|qvf| {
|
||||||
|
Ok::<bool, DatabaseError>(
|
||||||
|
qvf.int_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "int_value"))?
|
||||||
|
!= 0,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
|
),
|
||||||
|
|
||||||
|
LoaderFieldType::Enum(id) => VersionFieldValue::Enum(
|
||||||
|
*id,
|
||||||
|
get_first(qvfs)?
|
||||||
|
.enum_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "enum_value"))?,
|
||||||
|
),
|
||||||
|
LoaderFieldType::ArrayEnum(id) => VersionFieldValue::ArrayEnum(
|
||||||
|
*id,
|
||||||
|
qvfs.into_iter()
|
||||||
|
.map(|qvf| {
|
||||||
|
qvf.enum_value
|
||||||
|
.ok_or(did_not_exist_error(field_name, "enum_value"))
|
||||||
|
})
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
|
),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialize to internal value, such as for converting to user-facing JSON
|
||||||
|
pub fn serialize_internal(&self) -> serde_json::Value {
|
||||||
|
match self {
|
||||||
|
VersionFieldValue::Integer(i) => serde_json::Value::Number((*i).into()),
|
||||||
|
VersionFieldValue::Text(s) => serde_json::Value::String(s.clone()),
|
||||||
|
VersionFieldValue::Boolean(b) => serde_json::Value::Bool(*b),
|
||||||
|
VersionFieldValue::ArrayInteger(v) => serde_json::Value::Array(
|
||||||
|
v.iter()
|
||||||
|
.map(|i| serde_json::Value::Number((*i).into()))
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
VersionFieldValue::ArrayText(v) => serde_json::Value::Array(
|
||||||
|
v.iter()
|
||||||
|
.map(|s| serde_json::Value::String(s.clone()))
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
VersionFieldValue::ArrayBoolean(v) => {
|
||||||
|
serde_json::Value::Array(v.iter().map(|b| serde_json::Value::Bool(*b)).collect())
|
||||||
|
}
|
||||||
|
VersionFieldValue::Enum(_, v) => serde_json::Value::String(v.value.clone()),
|
||||||
|
VersionFieldValue::ArrayEnum(_, v) => serde_json::Value::Array(
|
||||||
|
v.iter()
|
||||||
|
.map(|v| serde_json::Value::String(v.value.clone()))
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For conversion to an interanl string(s), such as for search facets, filtering, or direct hardcoding
|
||||||
|
// No matter the type, it will be converted to a Vec<String>, whre the non-array types will have a single element
|
||||||
|
pub fn as_strings(&self) -> Vec<String> {
|
||||||
|
match self {
|
||||||
|
VersionFieldValue::Integer(i) => vec![i.to_string()],
|
||||||
|
VersionFieldValue::Text(s) => vec![s.clone()],
|
||||||
|
VersionFieldValue::Boolean(b) => vec![b.to_string()],
|
||||||
|
VersionFieldValue::ArrayInteger(v) => v.iter().map(|i| i.to_string()).collect(),
|
||||||
|
VersionFieldValue::ArrayText(v) => v.clone(),
|
||||||
|
VersionFieldValue::ArrayBoolean(v) => v.iter().map(|b| b.to_string()).collect(),
|
||||||
|
VersionFieldValue::Enum(_, v) => vec![v.value.clone()],
|
||||||
|
VersionFieldValue::ArrayEnum(_, v) => v.iter().map(|v| v.value.clone()).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn contains_json_value(&self, value: &serde_json::Value) -> bool {
|
||||||
|
match self {
|
||||||
|
VersionFieldValue::Integer(i) => value.as_i64() == Some(*i as i64),
|
||||||
|
VersionFieldValue::Text(s) => value.as_str() == Some(s),
|
||||||
|
VersionFieldValue::Boolean(b) => value.as_bool() == Some(*b),
|
||||||
|
VersionFieldValue::ArrayInteger(v) => value
|
||||||
|
.as_i64()
|
||||||
|
.map(|i| v.contains(&(i as i32)))
|
||||||
|
.unwrap_or(false),
|
||||||
|
VersionFieldValue::ArrayText(v) => value
|
||||||
|
.as_str()
|
||||||
|
.map(|s| v.contains(&s.to_string()))
|
||||||
|
.unwrap_or(false),
|
||||||
|
VersionFieldValue::ArrayBoolean(v) => {
|
||||||
|
value.as_bool().map(|b| v.contains(&b)).unwrap_or(false)
|
||||||
|
}
|
||||||
|
VersionFieldValue::Enum(_, v) => value.as_str() == Some(&v.value),
|
||||||
|
VersionFieldValue::ArrayEnum(_, v) => value
|
||||||
|
.as_str()
|
||||||
|
.map(|s| v.iter().any(|v| v.value == s))
|
||||||
|
.unwrap_or(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -5,6 +5,8 @@ pub mod collection_item;
|
|||||||
pub mod flow_item;
|
pub mod flow_item;
|
||||||
pub mod ids;
|
pub mod ids;
|
||||||
pub mod image_item;
|
pub mod image_item;
|
||||||
|
pub mod legacy_loader_fields;
|
||||||
|
pub mod loader_fields;
|
||||||
pub mod notification_item;
|
pub mod notification_item;
|
||||||
pub mod oauth_client_authorization_item;
|
pub mod oauth_client_authorization_item;
|
||||||
pub mod oauth_client_item;
|
pub mod oauth_client_item;
|
||||||
@ -43,4 +45,6 @@ pub enum DatabaseError {
|
|||||||
RedisPool(#[from] deadpool_redis::PoolError),
|
RedisPool(#[from] deadpool_redis::PoolError),
|
||||||
#[error("Error while serializing with the cache: {0}")]
|
#[error("Error while serializing with the cache: {0}")]
|
||||||
SerdeCacheError(#[from] serde_json::Error),
|
SerdeCacheError(#[from] serde_json::Error),
|
||||||
|
#[error("Schema error: {0}")]
|
||||||
|
SchemaError(String),
|
||||||
}
|
}
|
||||||
|
|||||||
@ -141,7 +141,6 @@ impl ModCategory {
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ProjectBuilder {
|
pub struct ProjectBuilder {
|
||||||
pub project_id: ProjectId,
|
pub project_id: ProjectId,
|
||||||
pub project_type_id: ProjectTypeId,
|
|
||||||
pub team_id: TeamId,
|
pub team_id: TeamId,
|
||||||
pub organization_id: Option<OrganizationId>,
|
pub organization_id: Option<OrganizationId>,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
@ -158,8 +157,6 @@ pub struct ProjectBuilder {
|
|||||||
pub initial_versions: Vec<super::version_item::VersionBuilder>,
|
pub initial_versions: Vec<super::version_item::VersionBuilder>,
|
||||||
pub status: ProjectStatus,
|
pub status: ProjectStatus,
|
||||||
pub requested_status: Option<ProjectStatus>,
|
pub requested_status: Option<ProjectStatus>,
|
||||||
pub client_side: SideTypeId,
|
|
||||||
pub server_side: SideTypeId,
|
|
||||||
pub license: String,
|
pub license: String,
|
||||||
pub slug: Option<String>,
|
pub slug: Option<String>,
|
||||||
pub donation_urls: Vec<DonationUrl>,
|
pub donation_urls: Vec<DonationUrl>,
|
||||||
@ -175,7 +172,6 @@ impl ProjectBuilder {
|
|||||||
) -> Result<ProjectId, DatabaseError> {
|
) -> Result<ProjectId, DatabaseError> {
|
||||||
let project_struct = Project {
|
let project_struct = Project {
|
||||||
id: self.project_id,
|
id: self.project_id,
|
||||||
project_type: self.project_type_id,
|
|
||||||
team_id: self.team_id,
|
team_id: self.team_id,
|
||||||
organization_id: self.organization_id,
|
organization_id: self.organization_id,
|
||||||
title: self.title,
|
title: self.title,
|
||||||
@ -200,8 +196,6 @@ impl ProjectBuilder {
|
|||||||
wiki_url: self.wiki_url,
|
wiki_url: self.wiki_url,
|
||||||
license_url: self.license_url,
|
license_url: self.license_url,
|
||||||
discord_url: self.discord_url,
|
discord_url: self.discord_url,
|
||||||
client_side: self.client_side,
|
|
||||||
server_side: self.server_side,
|
|
||||||
license: self.license,
|
license: self.license,
|
||||||
slug: self.slug,
|
slug: self.slug,
|
||||||
moderation_message: None,
|
moderation_message: None,
|
||||||
@ -210,7 +204,6 @@ impl ProjectBuilder {
|
|||||||
color: self.color,
|
color: self.color,
|
||||||
monetization_status: self.monetization_status,
|
monetization_status: self.monetization_status,
|
||||||
loaders: vec![],
|
loaders: vec![],
|
||||||
game_versions: vec![],
|
|
||||||
};
|
};
|
||||||
project_struct.insert(&mut *transaction).await?;
|
project_struct.insert(&mut *transaction).await?;
|
||||||
|
|
||||||
@ -244,16 +237,12 @@ impl ProjectBuilder {
|
|||||||
.collect_vec();
|
.collect_vec();
|
||||||
ModCategory::insert_many(mod_categories, &mut *transaction).await?;
|
ModCategory::insert_many(mod_categories, &mut *transaction).await?;
|
||||||
|
|
||||||
Project::update_game_versions(self.project_id, &mut *transaction).await?;
|
|
||||||
Project::update_loaders(self.project_id, &mut *transaction).await?;
|
|
||||||
|
|
||||||
Ok(self.project_id)
|
Ok(self.project_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct Project {
|
pub struct Project {
|
||||||
pub id: ProjectId,
|
pub id: ProjectId,
|
||||||
pub project_type: ProjectTypeId,
|
|
||||||
pub team_id: TeamId,
|
pub team_id: TeamId,
|
||||||
pub organization_id: Option<OrganizationId>,
|
pub organization_id: Option<OrganizationId>,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
@ -274,8 +263,6 @@ pub struct Project {
|
|||||||
pub wiki_url: Option<String>,
|
pub wiki_url: Option<String>,
|
||||||
pub license_url: Option<String>,
|
pub license_url: Option<String>,
|
||||||
pub discord_url: Option<String>,
|
pub discord_url: Option<String>,
|
||||||
pub client_side: SideTypeId,
|
|
||||||
pub server_side: SideTypeId,
|
|
||||||
pub license: String,
|
pub license: String,
|
||||||
pub slug: Option<String>,
|
pub slug: Option<String>,
|
||||||
pub moderation_message: Option<String>,
|
pub moderation_message: Option<String>,
|
||||||
@ -284,7 +271,6 @@ pub struct Project {
|
|||||||
pub color: Option<u32>,
|
pub color: Option<u32>,
|
||||||
pub monetization_status: MonetizationStatus,
|
pub monetization_status: MonetizationStatus,
|
||||||
pub loaders: Vec<String>,
|
pub loaders: Vec<String>,
|
||||||
pub game_versions: Vec<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
@ -298,15 +284,15 @@ impl Project {
|
|||||||
id, team_id, title, description, body,
|
id, team_id, title, description, body,
|
||||||
published, downloads, icon_url, issues_url,
|
published, downloads, icon_url, issues_url,
|
||||||
source_url, wiki_url, status, requested_status, discord_url,
|
source_url, wiki_url, status, requested_status, discord_url,
|
||||||
client_side, server_side, license_url, license,
|
license_url, license,
|
||||||
slug, project_type, color, monetization_status
|
slug, color, monetization_status
|
||||||
)
|
)
|
||||||
VALUES (
|
VALUES (
|
||||||
$1, $2, $3, $4, $5,
|
$1, $2, $3, $4, $5,
|
||||||
$6, $7, $8, $9,
|
$6, $7, $8, $9,
|
||||||
$10, $11, $12, $13, $14,
|
$10, $11, $12, $13, $14,
|
||||||
$15, $16, $17, $18,
|
$15, $16,
|
||||||
LOWER($19), $20, $21, $22
|
LOWER($17), $18, $19
|
||||||
)
|
)
|
||||||
",
|
",
|
||||||
self.id as ProjectId,
|
self.id as ProjectId,
|
||||||
@ -323,12 +309,9 @@ impl Project {
|
|||||||
self.status.as_str(),
|
self.status.as_str(),
|
||||||
self.requested_status.map(|x| x.as_str()),
|
self.requested_status.map(|x| x.as_str()),
|
||||||
self.discord_url.as_ref(),
|
self.discord_url.as_ref(),
|
||||||
self.client_side as SideTypeId,
|
|
||||||
self.server_side as SideTypeId,
|
|
||||||
self.license_url.as_ref(),
|
self.license_url.as_ref(),
|
||||||
&self.license,
|
&self.license,
|
||||||
self.slug.as_ref(),
|
self.slug.as_ref(),
|
||||||
self.project_type as ProjectTypeId,
|
|
||||||
self.color.map(|x| x as i32),
|
self.color.map(|x| x as i32),
|
||||||
self.monetization_status.as_str(),
|
self.monetization_status.as_str(),
|
||||||
)
|
)
|
||||||
@ -552,7 +535,6 @@ impl Project {
|
|||||||
.flatten()
|
.flatten()
|
||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
|
|
||||||
if !project_ids.is_empty() {
|
if !project_ids.is_empty() {
|
||||||
let projects = redis
|
let projects = redis
|
||||||
.multi_get::<String, _>(PROJECTS_NAMESPACE, project_ids)
|
.multi_get::<String, _>(PROJECTS_NAMESPACE, project_ids)
|
||||||
@ -571,31 +553,31 @@ impl Project {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !remaining_strings.is_empty() {
|
if !remaining_strings.is_empty() {
|
||||||
let project_ids_parsed: Vec<i64> = remaining_strings
|
let project_ids_parsed: Vec<i64> = remaining_strings
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
||||||
.map(|x| x as i64)
|
.map(|x| x as i64)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let db_projects: Vec<QueryProject> = sqlx::query!(
|
let db_projects: Vec<QueryProject> = sqlx::query!(
|
||||||
"
|
"
|
||||||
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
|
SELECT m.id id, m.title title, m.description description, m.downloads downloads, m.follows follows,
|
||||||
m.icon_url icon_url, m.body body, m.published published,
|
m.icon_url icon_url, m.body body, m.published published,
|
||||||
m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,
|
m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,
|
||||||
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
||||||
m.team_id team_id, m.organization_id organization_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||||
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.webhook_sent, m.color,
|
m.webhook_sent, m.color,
|
||||||
t.id thread_id, m.monetization_status monetization_status, m.loaders loaders, m.game_versions game_versions,
|
t.id thread_id, m.monetization_status monetization_status,
|
||||||
|
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||||
|
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||||
|
ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,
|
||||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||||
JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,
|
JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,
|
||||||
JSONB_AGG(DISTINCT jsonb_build_object('image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering)) filter (where mg.image_url is not null) gallery,
|
JSONB_AGG(DISTINCT jsonb_build_object('image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering)) filter (where mg.image_url is not null) gallery,
|
||||||
JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations
|
JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations
|
||||||
FROM mods m
|
FROM mods m
|
||||||
INNER JOIN project_types pt ON pt.id = m.project_type
|
|
||||||
INNER JOIN side_types cs ON m.client_side = cs.id
|
|
||||||
INNER JOIN side_types ss ON m.server_side = ss.id
|
|
||||||
INNER JOIN threads t ON t.mod_id = m.id
|
INNER JOIN threads t ON t.mod_id = m.id
|
||||||
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
|
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
|
||||||
LEFT JOIN mods_donations md ON md.joining_mod_id = m.id
|
LEFT JOIN mods_donations md ON md.joining_mod_id = m.id
|
||||||
@ -603,8 +585,14 @@ impl Project {
|
|||||||
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
|
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
|
||||||
LEFT JOIN categories c ON mc.joining_category_id = c.id
|
LEFT JOIN categories c ON mc.joining_category_id = c.id
|
||||||
LEFT JOIN versions v ON v.mod_id = m.id AND v.status = ANY($3)
|
LEFT JOIN versions v ON v.mod_id = m.id AND v.status = ANY($3)
|
||||||
|
LEFT JOIN loaders_versions lv ON lv.version_id = v.id
|
||||||
|
LEFT JOIN loaders l on lv.loader_id = l.id
|
||||||
|
LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||||
|
LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||||
|
LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id
|
||||||
|
LEFT JOIN games g ON lptg.game_id = g.id
|
||||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||||
GROUP BY pt.id, cs.id, ss.id, t.id, m.id;
|
GROUP BY t.id, m.id;
|
||||||
",
|
",
|
||||||
&project_ids_parsed,
|
&project_ids_parsed,
|
||||||
&remaining_strings.into_iter().map(|x| x.to_string().to_lowercase()).collect::<Vec<_>>(),
|
&remaining_strings.into_iter().map(|x| x.to_string().to_lowercase()).collect::<Vec<_>>(),
|
||||||
@ -614,11 +602,9 @@ impl Project {
|
|||||||
.try_filter_map(|e| async {
|
.try_filter_map(|e| async {
|
||||||
Ok(e.right().map(|m| {
|
Ok(e.right().map(|m| {
|
||||||
let id = m.id;
|
let id = m.id;
|
||||||
|
|
||||||
QueryProject {
|
QueryProject {
|
||||||
inner: Project {
|
inner: Project {
|
||||||
id: ProjectId(id),
|
id: ProjectId(id),
|
||||||
project_type: ProjectTypeId(m.project_type),
|
|
||||||
team_id: TeamId(m.team_id),
|
team_id: TeamId(m.team_id),
|
||||||
organization_id: m.organization_id.map(OrganizationId),
|
organization_id: m.organization_id.map(OrganizationId),
|
||||||
title: m.title.clone(),
|
title: m.title.clone(),
|
||||||
@ -633,14 +619,12 @@ impl Project {
|
|||||||
wiki_url: m.wiki_url.clone(),
|
wiki_url: m.wiki_url.clone(),
|
||||||
license_url: m.license_url.clone(),
|
license_url: m.license_url.clone(),
|
||||||
discord_url: m.discord_url.clone(),
|
discord_url: m.discord_url.clone(),
|
||||||
client_side: SideTypeId(m.client_side),
|
|
||||||
status: ProjectStatus::from_string(
|
status: ProjectStatus::from_string(
|
||||||
&m.status,
|
&m.status,
|
||||||
),
|
),
|
||||||
requested_status: m.requested_status.map(|x| ProjectStatus::from_string(
|
requested_status: m.requested_status.map(|x| ProjectStatus::from_string(
|
||||||
&x,
|
&x,
|
||||||
)),
|
)),
|
||||||
server_side: SideTypeId(m.server_side),
|
|
||||||
license: m.license.clone(),
|
license: m.license.clone(),
|
||||||
slug: m.slug.clone(),
|
slug: m.slug.clone(),
|
||||||
body: m.body.clone(),
|
body: m.body.clone(),
|
||||||
@ -654,12 +638,12 @@ impl Project {
|
|||||||
monetization_status: MonetizationStatus::from_string(
|
monetization_status: MonetizationStatus::from_string(
|
||||||
&m.monetization_status,
|
&m.monetization_status,
|
||||||
),
|
),
|
||||||
loaders: m.loaders,
|
loaders: m.loaders.unwrap_or_default(),
|
||||||
game_versions: m.game_versions,
|
|
||||||
},
|
},
|
||||||
project_type: m.project_type_name,
|
|
||||||
categories: m.categories.unwrap_or_default(),
|
categories: m.categories.unwrap_or_default(),
|
||||||
additional_categories: m.additional_categories.unwrap_or_default(),
|
additional_categories: m.additional_categories.unwrap_or_default(),
|
||||||
|
project_types: m.project_types.unwrap_or_default(),
|
||||||
|
games: m.games.unwrap_or_default(),
|
||||||
versions: {
|
versions: {
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct Version {
|
struct Version {
|
||||||
@ -674,7 +658,6 @@ impl Project {
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
versions.sort_by(|a, b| a.date_published.cmp(&b.date_published));
|
versions.sort_by(|a, b| a.date_published.cmp(&b.date_published));
|
||||||
|
|
||||||
versions.into_iter().map(|x| x.id).collect()
|
versions.into_iter().map(|x| x.id).collect()
|
||||||
},
|
},
|
||||||
gallery_items: {
|
gallery_items: {
|
||||||
@ -689,8 +672,6 @@ impl Project {
|
|||||||
donation_urls: serde_json::from_value(
|
donation_urls: serde_json::from_value(
|
||||||
m.donations.unwrap_or_default(),
|
m.donations.unwrap_or_default(),
|
||||||
).ok().unwrap_or_default(),
|
).ok().unwrap_or_default(),
|
||||||
client_side: crate::models::projects::SideType::from_string(&m.client_side_type),
|
|
||||||
server_side: crate::models::projects::SideType::from_string(&m.server_side_type),
|
|
||||||
thread_id: ThreadId(m.thread_id),
|
thread_id: ThreadId(m.thread_id),
|
||||||
}}))
|
}}))
|
||||||
})
|
})
|
||||||
@ -768,56 +749,6 @@ impl Project {
|
|||||||
Ok(dependencies)
|
Ok(dependencies)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_game_versions(
|
|
||||||
id: ProjectId,
|
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
|
||||||
) -> Result<(), sqlx::error::Error> {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE mods
|
|
||||||
SET game_versions = (
|
|
||||||
SELECT COALESCE(ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null), array[]::varchar[])
|
|
||||||
FROM versions v
|
|
||||||
INNER JOIN game_versions_versions gvv ON v.id = gvv.joining_version_id
|
|
||||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id
|
|
||||||
WHERE v.mod_id = mods.id AND v.status != ALL($2)
|
|
||||||
)
|
|
||||||
WHERE id = $1
|
|
||||||
",
|
|
||||||
id as ProjectId,
|
|
||||||
&*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>()
|
|
||||||
)
|
|
||||||
.execute(&mut **transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_loaders(
|
|
||||||
id: ProjectId,
|
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
|
||||||
) -> Result<(), sqlx::error::Error> {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE mods
|
|
||||||
SET loaders = (
|
|
||||||
SELECT COALESCE(ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null), array[]::varchar[])
|
|
||||||
FROM versions v
|
|
||||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
|
||||||
INNER JOIN loaders l on lv.loader_id = l.id
|
|
||||||
WHERE v.mod_id = mods.id AND v.status != ALL($2)
|
|
||||||
)
|
|
||||||
WHERE id = $1
|
|
||||||
",
|
|
||||||
id as ProjectId,
|
|
||||||
&*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>()
|
|
||||||
)
|
|
||||||
.execute(&mut **transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn clear_cache(
|
pub async fn clear_cache(
|
||||||
id: ProjectId,
|
id: ProjectId,
|
||||||
slug: Option<String>,
|
slug: Option<String>,
|
||||||
@ -845,13 +776,12 @@ impl Project {
|
|||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct QueryProject {
|
pub struct QueryProject {
|
||||||
pub inner: Project,
|
pub inner: Project,
|
||||||
pub project_type: String,
|
|
||||||
pub categories: Vec<String>,
|
pub categories: Vec<String>,
|
||||||
pub additional_categories: Vec<String>,
|
pub additional_categories: Vec<String>,
|
||||||
pub versions: Vec<VersionId>,
|
pub versions: Vec<VersionId>,
|
||||||
|
pub project_types: Vec<String>,
|
||||||
|
pub games: Vec<String>,
|
||||||
pub donation_urls: Vec<DonationUrl>,
|
pub donation_urls: Vec<DonationUrl>,
|
||||||
pub gallery_items: Vec<GalleryItem>,
|
pub gallery_items: Vec<GalleryItem>,
|
||||||
pub client_side: crate::models::projects::SideType,
|
|
||||||
pub server_side: crate::models::projects::SideType,
|
|
||||||
pub thread_id: ThreadId,
|
pub thread_id: ThreadId,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
use super::ids::*;
|
use super::ids::*;
|
||||||
|
use super::loader_fields::VersionField;
|
||||||
use super::DatabaseError;
|
use super::DatabaseError;
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::models::projects::{FileType, VersionStatus};
|
use crate::models::projects::{FileType, VersionStatus};
|
||||||
@ -9,7 +10,7 @@ use std::cmp::Ordering;
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
const VERSIONS_NAMESPACE: &str = "versions";
|
pub const VERSIONS_NAMESPACE: &str = "versions";
|
||||||
const VERSION_FILES_NAMESPACE: &str = "versions_files";
|
const VERSION_FILES_NAMESPACE: &str = "versions_files";
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -22,8 +23,8 @@ pub struct VersionBuilder {
|
|||||||
pub changelog: String,
|
pub changelog: String,
|
||||||
pub files: Vec<VersionFileBuilder>,
|
pub files: Vec<VersionFileBuilder>,
|
||||||
pub dependencies: Vec<DependencyBuilder>,
|
pub dependencies: Vec<DependencyBuilder>,
|
||||||
pub game_versions: Vec<GameVersionId>,
|
|
||||||
pub loaders: Vec<LoaderId>,
|
pub loaders: Vec<LoaderId>,
|
||||||
|
pub version_fields: Vec<VersionField>,
|
||||||
pub version_type: String,
|
pub version_type: String,
|
||||||
pub featured: bool,
|
pub featured: bool,
|
||||||
pub status: VersionStatus,
|
pub status: VersionStatus,
|
||||||
@ -234,7 +235,6 @@ impl VersionBuilder {
|
|||||||
let VersionBuilder {
|
let VersionBuilder {
|
||||||
dependencies,
|
dependencies,
|
||||||
loaders,
|
loaders,
|
||||||
game_versions,
|
|
||||||
files,
|
files,
|
||||||
version_id,
|
version_id,
|
||||||
..
|
..
|
||||||
@ -249,17 +249,13 @@ impl VersionBuilder {
|
|||||||
.collect_vec();
|
.collect_vec();
|
||||||
LoaderVersion::insert_many(loader_versions, transaction).await?;
|
LoaderVersion::insert_many(loader_versions, transaction).await?;
|
||||||
|
|
||||||
let game_version_versions = game_versions
|
VersionField::insert_many(self.version_fields, transaction).await?;
|
||||||
.iter()
|
|
||||||
.map(|v| VersionVersion::new(*v, version_id))
|
|
||||||
.collect_vec();
|
|
||||||
VersionVersion::insert_many(game_version_versions, transaction).await?;
|
|
||||||
|
|
||||||
Ok(self.version_id)
|
Ok(self.version_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(derive_new::new)]
|
#[derive(derive_new::new, Serialize, Deserialize)]
|
||||||
pub struct LoaderVersion {
|
pub struct LoaderVersion {
|
||||||
pub loader_id: LoaderId,
|
pub loader_id: LoaderId,
|
||||||
pub version_id: VersionId,
|
pub version_id: VersionId,
|
||||||
@ -289,36 +285,6 @@ impl LoaderVersion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(derive_new::new)]
|
|
||||||
pub struct VersionVersion {
|
|
||||||
pub game_version_id: GameVersionId,
|
|
||||||
pub joining_version_id: VersionId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VersionVersion {
|
|
||||||
pub async fn insert_many(
|
|
||||||
items: Vec<Self>,
|
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
|
||||||
) -> Result<(), DatabaseError> {
|
|
||||||
let (game_version_ids, version_ids): (Vec<_>, Vec<_>) = items
|
|
||||||
.into_iter()
|
|
||||||
.map(|i| (i.game_version_id.0, i.joining_version_id.0))
|
|
||||||
.unzip();
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
INSERT INTO game_versions_versions (game_version_id, joining_version_id)
|
|
||||||
SELECT * FROM UNNEST($1::integer[], $2::bigint[])
|
|
||||||
",
|
|
||||||
&game_version_ids[..],
|
|
||||||
&version_ids[..],
|
|
||||||
)
|
|
||||||
.execute(&mut **transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
|
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||||
pub struct Version {
|
pub struct Version {
|
||||||
pub id: VersionId,
|
pub id: VersionId,
|
||||||
@ -401,8 +367,8 @@ impl Version {
|
|||||||
|
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"
|
"
|
||||||
DELETE FROM game_versions_versions gvv
|
DELETE FROM version_fields vf
|
||||||
WHERE gvv.joining_version_id = $1
|
WHERE vf.version_id = $1
|
||||||
",
|
",
|
||||||
id as VersionId,
|
id as VersionId,
|
||||||
)
|
)
|
||||||
@ -494,14 +460,11 @@ impl Version {
|
|||||||
.execute(&mut **transaction)
|
.execute(&mut **transaction)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
crate::database::models::Project::update_game_versions(
|
crate::database::models::Project::clear_cache(
|
||||||
ProjectId(project_id.mod_id),
|
ProjectId(project_id.mod_id),
|
||||||
&mut *transaction,
|
None,
|
||||||
)
|
None,
|
||||||
.await?;
|
redis,
|
||||||
crate::database::models::Project::update_loaders(
|
|
||||||
ProjectId(project_id.mod_id),
|
|
||||||
&mut *transaction,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@ -559,19 +522,59 @@ impl Version {
|
|||||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||||
v.changelog changelog, v.date_published date_published, v.downloads downloads,
|
v.changelog changelog, v.date_published date_published, v.downloads downloads,
|
||||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering,
|
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering,
|
||||||
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
|
|
||||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||||
|
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||||
|
ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,
|
||||||
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,
|
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,
|
||||||
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
||||||
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
|
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies,
|
||||||
|
|
||||||
|
JSONB_AGG(
|
||||||
|
DISTINCT jsonb_build_object(
|
||||||
|
'field_id', vf.field_id,
|
||||||
|
'int_value', vf.int_value,
|
||||||
|
'enum_value', vf.enum_value,
|
||||||
|
'string_value', vf.string_value
|
||||||
|
)
|
||||||
|
) filter (where vf.field_id is not null) version_fields,
|
||||||
|
JSONB_AGG(
|
||||||
|
DISTINCT jsonb_build_object(
|
||||||
|
'lf_id', lf.id,
|
||||||
|
'loader_name', l.loader,
|
||||||
|
'field', lf.field,
|
||||||
|
'field_type', lf.field_type,
|
||||||
|
'enum_type', lf.enum_type,
|
||||||
|
'min_val', lf.min_val,
|
||||||
|
'max_val', lf.max_val,
|
||||||
|
'optional', lf.optional
|
||||||
|
)
|
||||||
|
) filter (where lf.id is not null) loader_fields,
|
||||||
|
JSONB_AGG(
|
||||||
|
DISTINCT jsonb_build_object(
|
||||||
|
'id', lfev.id,
|
||||||
|
'enum_id', lfev.enum_id,
|
||||||
|
'value', lfev.value,
|
||||||
|
'ordering', lfev.ordering,
|
||||||
|
'created', lfev.created,
|
||||||
|
'metadata', lfev.metadata
|
||||||
|
)
|
||||||
|
) filter (where lfev.id is not null) loader_field_enum_values
|
||||||
|
|
||||||
FROM versions v
|
FROM versions v
|
||||||
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
|
||||||
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
|
||||||
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
|
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
|
||||||
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
|
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
|
||||||
|
LEFT OUTER JOIN loaders_project_types lpt on l.id = lpt.joining_loader_id
|
||||||
|
LEFT JOIN project_types pt on lpt.joining_project_type_id = pt.id
|
||||||
|
LEFT OUTER JOIN loaders_project_types_games lptg on l.id = lptg.loader_id AND pt.id = lptg.project_type_id
|
||||||
|
LEFT JOIN games g on lptg.game_id = g.id
|
||||||
LEFT OUTER JOIN files f on v.id = f.version_id
|
LEFT OUTER JOIN files f on v.id = f.version_id
|
||||||
LEFT OUTER JOIN hashes h on f.id = h.file_id
|
LEFT OUTER JOIN hashes h on f.id = h.file_id
|
||||||
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
|
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
|
||||||
|
LEFT OUTER JOIN version_fields vf on v.id = vf.version_id
|
||||||
|
LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id
|
||||||
|
LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id
|
||||||
|
LEFT OUTER JOIN loader_field_enum_values lfev on lfe.id = lfev.enum_id
|
||||||
|
|
||||||
WHERE v.id = ANY($1)
|
WHERE v.id = ANY($1)
|
||||||
GROUP BY v.id
|
GROUP BY v.id
|
||||||
ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;
|
ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;
|
||||||
@ -664,24 +667,10 @@ impl Version {
|
|||||||
|
|
||||||
files
|
files
|
||||||
},
|
},
|
||||||
game_versions: {
|
version_fields: VersionField::from_query_json(v.id, v.loader_fields, v.version_fields, v.loader_field_enum_values),
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct GameVersion {
|
|
||||||
pub version: String,
|
|
||||||
pub created: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut game_versions: Vec<GameVersion> = serde_json::from_value(
|
|
||||||
v.game_versions.unwrap_or_default(),
|
|
||||||
)
|
|
||||||
.ok()
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
|
|
||||||
|
|
||||||
game_versions.into_iter().map(|x| x.version).collect()
|
|
||||||
},
|
|
||||||
loaders: v.loaders.unwrap_or_default(),
|
loaders: v.loaders.unwrap_or_default(),
|
||||||
|
project_types: v.project_types.unwrap_or_default(),
|
||||||
|
games: v.games.unwrap_or_default(),
|
||||||
dependencies: serde_json::from_value(
|
dependencies: serde_json::from_value(
|
||||||
v.dependencies.unwrap_or_default(),
|
v.dependencies.unwrap_or_default(),
|
||||||
)
|
)
|
||||||
@ -751,7 +740,6 @@ impl Version {
|
|||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
for file in files {
|
for file in files {
|
||||||
if let Some(mut file) =
|
if let Some(mut file) =
|
||||||
file.and_then(|x| serde_json::from_str::<Vec<SingleFile>>(&x).ok())
|
file.and_then(|x| serde_json::from_str::<Vec<SingleFile>>(&x).ok())
|
||||||
@ -861,8 +849,10 @@ pub struct QueryVersion {
|
|||||||
pub inner: Version,
|
pub inner: Version,
|
||||||
|
|
||||||
pub files: Vec<QueryFile>,
|
pub files: Vec<QueryFile>,
|
||||||
pub game_versions: Vec<String>,
|
pub version_fields: Vec<VersionField>,
|
||||||
pub loaders: Vec<String>,
|
pub loaders: Vec<String>,
|
||||||
|
pub project_types: Vec<String>,
|
||||||
|
pub games: Vec<String>,
|
||||||
pub dependencies: Vec<QueryDependency>,
|
pub dependencies: Vec<QueryDependency>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -135,7 +135,7 @@ pub fn app_setup(
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
scheduler::schedule_versions(&mut scheduler, pool.clone());
|
scheduler::schedule_versions(&mut scheduler, pool.clone(), redis_pool.clone());
|
||||||
|
|
||||||
let session_queue = web::Data::new(AuthQueue::new());
|
let session_queue = web::Data::new(AuthQueue::new());
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ pub fn app_setup(
|
|||||||
|
|
||||||
let reader = maxmind.clone();
|
let reader = maxmind.clone();
|
||||||
{
|
{
|
||||||
let reader_ref = reader.clone();
|
let reader_ref = reader;
|
||||||
scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || {
|
scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || {
|
||||||
let reader_ref = reader_ref.clone();
|
let reader_ref = reader_ref.clone();
|
||||||
|
|
||||||
|
|||||||
@ -1,16 +1,19 @@
|
|||||||
pub mod analytics;
|
pub mod v2;
|
||||||
pub mod collections;
|
pub mod v3;
|
||||||
pub mod error;
|
|
||||||
pub mod ids;
|
pub use v3::analytics;
|
||||||
pub mod images;
|
pub use v3::collections;
|
||||||
pub mod notifications;
|
pub use v3::error;
|
||||||
pub mod oauth_clients;
|
pub use v3::ids;
|
||||||
pub mod organizations;
|
pub use v3::images;
|
||||||
pub mod pack;
|
pub use v3::notifications;
|
||||||
pub mod pats;
|
pub use v3::oauth_clients;
|
||||||
pub mod projects;
|
pub use v3::organizations;
|
||||||
pub mod reports;
|
pub use v3::pack;
|
||||||
pub mod sessions;
|
pub use v3::pats;
|
||||||
pub mod teams;
|
pub use v3::projects;
|
||||||
pub mod threads;
|
pub use v3::reports;
|
||||||
pub mod users;
|
pub use v3::sessions;
|
||||||
|
pub use v3::teams;
|
||||||
|
pub use v3::threads;
|
||||||
|
pub use v3::users;
|
||||||
|
|||||||
2
src/models/v2/mod.rs
Normal file
2
src/models/v2/mod.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
// Legacy models from V2, where its useful to keep the struct for rerouting/conversion
|
||||||
|
pub mod projects;
|
||||||
307
src/models/v2/projects.rs
Normal file
307
src/models/v2/projects.rs
Normal file
@ -0,0 +1,307 @@
|
|||||||
|
use super::super::ids::OrganizationId;
|
||||||
|
use super::super::teams::TeamId;
|
||||||
|
use super::super::users::UserId;
|
||||||
|
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
|
||||||
|
use crate::database::models::{version_item, DatabaseError};
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::models::ids::{ProjectId, VersionId};
|
||||||
|
use crate::models::projects::{
|
||||||
|
Dependency, DonationLink, GalleryItem, License, Loader, ModeratorMessage, MonetizationStatus,
|
||||||
|
Project, ProjectStatus, Version, VersionFile, VersionStatus, VersionType,
|
||||||
|
};
|
||||||
|
use crate::models::threads::ThreadId;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// A project returned from the API
|
||||||
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
|
pub struct LegacyProject {
|
||||||
|
/// Relevant V2 fields- these were removed or modfified in V3,
|
||||||
|
/// and are now part of the dynamic fields system
|
||||||
|
/// The support range for the client project*
|
||||||
|
pub client_side: LegacySideType,
|
||||||
|
/// The support range for the server project
|
||||||
|
pub server_side: LegacySideType,
|
||||||
|
/// A list of game versions this project supports
|
||||||
|
pub game_versions: Vec<String>,
|
||||||
|
|
||||||
|
// All other fields are the same as V3
|
||||||
|
// If they change, or their constituent types change, we may need to
|
||||||
|
// add a new struct for them here.
|
||||||
|
pub id: ProjectId,
|
||||||
|
pub slug: Option<String>,
|
||||||
|
pub project_type: String,
|
||||||
|
pub team: TeamId,
|
||||||
|
pub organization: Option<OrganizationId>,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub body: String,
|
||||||
|
pub body_url: Option<String>,
|
||||||
|
pub published: DateTime<Utc>,
|
||||||
|
pub updated: DateTime<Utc>,
|
||||||
|
pub approved: Option<DateTime<Utc>>,
|
||||||
|
pub queued: Option<DateTime<Utc>>,
|
||||||
|
pub status: ProjectStatus,
|
||||||
|
pub requested_status: Option<ProjectStatus>,
|
||||||
|
pub moderator_message: Option<ModeratorMessage>,
|
||||||
|
pub license: License,
|
||||||
|
pub downloads: u32,
|
||||||
|
pub followers: u32,
|
||||||
|
pub categories: Vec<String>,
|
||||||
|
pub additional_categories: Vec<String>,
|
||||||
|
pub loaders: Vec<String>,
|
||||||
|
pub versions: Vec<VersionId>,
|
||||||
|
pub icon_url: Option<String>,
|
||||||
|
pub issues_url: Option<String>,
|
||||||
|
pub source_url: Option<String>,
|
||||||
|
pub wiki_url: Option<String>,
|
||||||
|
pub discord_url: Option<String>,
|
||||||
|
pub donation_urls: Option<Vec<DonationLink>>,
|
||||||
|
pub gallery: Vec<GalleryItem>,
|
||||||
|
pub color: Option<u32>,
|
||||||
|
pub thread_id: ThreadId,
|
||||||
|
pub monetization_status: MonetizationStatus,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LegacyProject {
|
||||||
|
// Convert from a standard V3 project to a V2 project
|
||||||
|
// Requires any queried versions to be passed in, to get access to certain version fields contained within.
|
||||||
|
// - This can be any version, because the fields are ones that used to be on the project itself.
|
||||||
|
// - Its conceivable that certain V3 projects that have many different ones may not have the same fields on all of them.
|
||||||
|
// TODO: Should this return an error instead for v2 users?
|
||||||
|
// It's safe to use a db version_item for this as the only info is side types, game versions, and loader fields (for loaders), which used to be public on project anyway.
|
||||||
|
pub fn from(data: Project, versions_item: Option<version_item::QueryVersion>) -> Self {
|
||||||
|
let mut client_side = LegacySideType::Unknown;
|
||||||
|
let mut server_side = LegacySideType::Unknown;
|
||||||
|
let mut game_versions = Vec::new();
|
||||||
|
|
||||||
|
// V2 versions only have one project type- v3 versions can rarely have multiple.
|
||||||
|
// We'll just use the first one.
|
||||||
|
let mut project_type = data.project_types.get(0).cloned().unwrap_or_default();
|
||||||
|
let mut loaders = data.loaders;
|
||||||
|
|
||||||
|
if let Some(versions_item) = versions_item {
|
||||||
|
client_side = versions_item
|
||||||
|
.version_fields
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.field_name == "client_side")
|
||||||
|
.and_then(|f| {
|
||||||
|
Some(LegacySideType::from_string(
|
||||||
|
f.value.serialize_internal().as_str()?,
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.unwrap_or(LegacySideType::Unknown);
|
||||||
|
server_side = versions_item
|
||||||
|
.version_fields
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.field_name == "server_side")
|
||||||
|
.and_then(|f| {
|
||||||
|
Some(LegacySideType::from_string(
|
||||||
|
f.value.serialize_internal().as_str()?,
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.unwrap_or(LegacySideType::Unknown);
|
||||||
|
game_versions = versions_item
|
||||||
|
.version_fields
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.field_name == "game_versions")
|
||||||
|
.and_then(|f| MinecraftGameVersion::try_from_version_field(f).ok())
|
||||||
|
.map(|v| v.into_iter().map(|v| v.version).collect())
|
||||||
|
.unwrap_or(Vec::new());
|
||||||
|
|
||||||
|
// - if loader is mrpack, this is a modpack
|
||||||
|
// the loaders are whatever the corresponding loader fields are
|
||||||
|
if versions_item.loaders == vec!["mrpack".to_string()] {
|
||||||
|
project_type = "modpack".to_string();
|
||||||
|
if let Some(mrpack_loaders) = versions_item
|
||||||
|
.version_fields
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.field_name == "mrpack_loaders")
|
||||||
|
{
|
||||||
|
loaders = mrpack_loaders.value.as_strings();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
id: data.id,
|
||||||
|
slug: data.slug,
|
||||||
|
project_type,
|
||||||
|
team: data.team,
|
||||||
|
organization: data.organization,
|
||||||
|
title: data.title,
|
||||||
|
description: data.description,
|
||||||
|
body: data.body,
|
||||||
|
body_url: data.body_url,
|
||||||
|
published: data.published,
|
||||||
|
updated: data.updated,
|
||||||
|
approved: data.approved,
|
||||||
|
queued: data.queued,
|
||||||
|
status: data.status,
|
||||||
|
requested_status: data.requested_status,
|
||||||
|
moderator_message: data.moderator_message,
|
||||||
|
license: data.license,
|
||||||
|
downloads: data.downloads,
|
||||||
|
followers: data.followers,
|
||||||
|
categories: data.categories,
|
||||||
|
additional_categories: data.additional_categories,
|
||||||
|
loaders,
|
||||||
|
versions: data.versions,
|
||||||
|
icon_url: data.icon_url,
|
||||||
|
issues_url: data.issues_url,
|
||||||
|
source_url: data.source_url,
|
||||||
|
wiki_url: data.wiki_url,
|
||||||
|
discord_url: data.discord_url,
|
||||||
|
donation_urls: data.donation_urls,
|
||||||
|
gallery: data.gallery,
|
||||||
|
color: data.color,
|
||||||
|
thread_id: data.thread_id,
|
||||||
|
monetization_status: data.monetization_status,
|
||||||
|
client_side,
|
||||||
|
server_side,
|
||||||
|
game_versions,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Because from needs a version_item, this is a helper function to get many from one db query.
|
||||||
|
pub async fn from_many<'a, E>(
|
||||||
|
data: Vec<Project>,
|
||||||
|
exec: E,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<Vec<Self>, DatabaseError>
|
||||||
|
where
|
||||||
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
|
{
|
||||||
|
let version_ids: Vec<_> = data
|
||||||
|
.iter()
|
||||||
|
.filter_map(|p| p.versions.get(0).map(|i| (*i).into()))
|
||||||
|
.collect();
|
||||||
|
let example_versions = version_item::Version::get_many(&version_ids, exec, redis).await?;
|
||||||
|
let mut legacy_projects = Vec::new();
|
||||||
|
for project in data {
|
||||||
|
let version_item = example_versions
|
||||||
|
.iter()
|
||||||
|
.find(|v| v.inner.project_id == project.id.into())
|
||||||
|
.cloned();
|
||||||
|
let project = LegacyProject::from(project, version_item);
|
||||||
|
legacy_projects.push(project);
|
||||||
|
}
|
||||||
|
Ok(legacy_projects)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
pub enum LegacySideType {
|
||||||
|
Required,
|
||||||
|
Optional,
|
||||||
|
Unsupported,
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for LegacySideType {
|
||||||
|
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
write!(fmt, "{}", self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LegacySideType {
|
||||||
|
// These are constant, so this can remove unneccessary allocations (`to_string`)
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
LegacySideType::Required => "required",
|
||||||
|
LegacySideType::Optional => "optional",
|
||||||
|
LegacySideType::Unsupported => "unsupported",
|
||||||
|
LegacySideType::Unknown => "unknown",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_string(string: &str) -> LegacySideType {
|
||||||
|
match string {
|
||||||
|
"required" => LegacySideType::Required,
|
||||||
|
"optional" => LegacySideType::Optional,
|
||||||
|
"unsupported" => LegacySideType::Unsupported,
|
||||||
|
_ => LegacySideType::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A specific version of a project
|
||||||
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
|
pub struct LegacyVersion {
|
||||||
|
/// Relevant V2 fields- these were removed or modfified in V3,
|
||||||
|
/// and are now part of the dynamic fields system
|
||||||
|
/// A list of game versions this project supports
|
||||||
|
pub game_versions: Vec<String>,
|
||||||
|
/// A list of loaders this project supports
|
||||||
|
pub loaders: Vec<Loader>,
|
||||||
|
|
||||||
|
// TODO: remove this once we have v3 testing, as this is a v3 field and tests for it should be isolated to v3
|
||||||
|
pub ordering: Option<i32>,
|
||||||
|
|
||||||
|
pub id: VersionId,
|
||||||
|
pub project_id: ProjectId,
|
||||||
|
pub author_id: UserId,
|
||||||
|
pub featured: bool,
|
||||||
|
pub name: String,
|
||||||
|
pub version_number: String,
|
||||||
|
pub changelog: String,
|
||||||
|
pub changelog_url: Option<String>,
|
||||||
|
pub date_published: DateTime<Utc>,
|
||||||
|
pub downloads: u32,
|
||||||
|
pub version_type: VersionType,
|
||||||
|
pub status: VersionStatus,
|
||||||
|
pub requested_status: Option<VersionStatus>,
|
||||||
|
pub files: Vec<VersionFile>,
|
||||||
|
pub dependencies: Vec<Dependency>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Version> for LegacyVersion {
|
||||||
|
fn from(data: Version) -> Self {
|
||||||
|
let mut game_versions = Vec::new();
|
||||||
|
if let Some(value) = data.fields.get("game_versions").and_then(|v| v.as_array()) {
|
||||||
|
for gv in value {
|
||||||
|
if let Some(game_version) = gv.as_str() {
|
||||||
|
game_versions.push(game_version.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// - if loader is mrpack, this is a modpack
|
||||||
|
// the v2 loaders are whatever the corresponding loader fields are
|
||||||
|
let mut loaders = data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
|
||||||
|
if loaders == vec!["mrpack".to_string()] {
|
||||||
|
if let Some((_, mrpack_loaders)) = data
|
||||||
|
.fields
|
||||||
|
.into_iter()
|
||||||
|
.find(|(key, _)| key == "mrpack_loaders")
|
||||||
|
{
|
||||||
|
if let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders) {
|
||||||
|
loaders = mrpack_loaders;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
id: data.id,
|
||||||
|
project_id: data.project_id,
|
||||||
|
author_id: data.author_id,
|
||||||
|
featured: data.featured,
|
||||||
|
name: data.name,
|
||||||
|
version_number: data.version_number,
|
||||||
|
changelog: data.changelog,
|
||||||
|
changelog_url: data.changelog_url,
|
||||||
|
date_published: data.date_published,
|
||||||
|
downloads: data.downloads,
|
||||||
|
version_type: data.version_type,
|
||||||
|
status: data.status,
|
||||||
|
requested_status: data.requested_status,
|
||||||
|
files: data.files,
|
||||||
|
dependencies: data.dependencies,
|
||||||
|
game_versions,
|
||||||
|
ordering: data.ordering,
|
||||||
|
loaders,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
16
src/models/v3/mod.rs
Normal file
16
src/models/v3/mod.rs
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
pub mod analytics;
|
||||||
|
pub mod collections;
|
||||||
|
pub mod error;
|
||||||
|
pub mod ids;
|
||||||
|
pub mod images;
|
||||||
|
pub mod notifications;
|
||||||
|
pub mod oauth_clients;
|
||||||
|
pub mod organizations;
|
||||||
|
pub mod pack;
|
||||||
|
pub mod pats;
|
||||||
|
pub mod projects;
|
||||||
|
pub mod reports;
|
||||||
|
pub mod sessions;
|
||||||
|
pub mod teams;
|
||||||
|
pub mod threads;
|
||||||
|
pub mod users;
|
||||||
@ -132,9 +132,7 @@ impl Scopes {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_from_oauth_scopes(scopes: &str) -> Result<Scopes, bitflags::parser::ParseError> {
|
pub fn parse_from_oauth_scopes(scopes: &str) -> Result<Scopes, bitflags::parser::ParseError> {
|
||||||
let scopes = scopes
|
let scopes = scopes.replace(['+', ' '], "|").replace("%20", "|");
|
||||||
.replace(['+', ' '], "|")
|
|
||||||
.replace("%20", "|");
|
|
||||||
bitflags::parser::from_str(&scopes)
|
bitflags::parser::from_str(&scopes)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1,3 +1,5 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use super::ids::{Base62Id, OrganizationId};
|
use super::ids::{Base62Id, OrganizationId};
|
||||||
use super::teams::TeamId;
|
use super::teams::TeamId;
|
||||||
use super::users::UserId;
|
use super::users::UserId;
|
||||||
@ -27,8 +29,10 @@ pub struct Project {
|
|||||||
pub id: ProjectId,
|
pub id: ProjectId,
|
||||||
/// The slug of a project, used for vanity URLs
|
/// The slug of a project, used for vanity URLs
|
||||||
pub slug: Option<String>,
|
pub slug: Option<String>,
|
||||||
/// The project type of the project
|
/// The aggregated project typs of the versions of this project
|
||||||
pub project_type: String,
|
pub project_types: Vec<String>,
|
||||||
|
/// The aggregated games of the versions of this project
|
||||||
|
pub games: Vec<String>,
|
||||||
/// The team of people that has ownership of this project.
|
/// The team of people that has ownership of this project.
|
||||||
pub team: TeamId,
|
pub team: TeamId,
|
||||||
/// The optional organization of people that have ownership of this project.
|
/// The optional organization of people that have ownership of this project.
|
||||||
@ -66,11 +70,6 @@ pub struct Project {
|
|||||||
/// The license of this project
|
/// The license of this project
|
||||||
pub license: License,
|
pub license: License,
|
||||||
|
|
||||||
/// The support range for the client project*
|
|
||||||
pub client_side: SideType,
|
|
||||||
/// The support range for the server project
|
|
||||||
pub server_side: SideType,
|
|
||||||
|
|
||||||
/// The total number of downloads the project has had.
|
/// The total number of downloads the project has had.
|
||||||
pub downloads: u32,
|
pub downloads: u32,
|
||||||
/// The total number of followers this project has accumulated
|
/// The total number of followers this project has accumulated
|
||||||
@ -81,8 +80,6 @@ pub struct Project {
|
|||||||
|
|
||||||
/// A list of the categories that the project is in.
|
/// A list of the categories that the project is in.
|
||||||
pub additional_categories: Vec<String>,
|
pub additional_categories: Vec<String>,
|
||||||
/// A list of game versions this project supports
|
|
||||||
pub game_versions: Vec<String>,
|
|
||||||
/// A list of loaders this project supports
|
/// A list of loaders this project supports
|
||||||
pub loaders: Vec<String>,
|
pub loaders: Vec<String>,
|
||||||
|
|
||||||
@ -120,7 +117,8 @@ impl From<QueryProject> for Project {
|
|||||||
Self {
|
Self {
|
||||||
id: m.id.into(),
|
id: m.id.into(),
|
||||||
slug: m.slug,
|
slug: m.slug,
|
||||||
project_type: data.project_type,
|
project_types: data.project_types,
|
||||||
|
games: data.games,
|
||||||
team: m.team_id.into(),
|
team: m.team_id.into(),
|
||||||
organization: m.organization_id.map(|i| i.into()),
|
organization: m.organization_id.map(|i| i.into()),
|
||||||
title: m.title,
|
title: m.title,
|
||||||
@ -162,13 +160,10 @@ impl From<QueryProject> for Project {
|
|||||||
},
|
},
|
||||||
url: m.license_url,
|
url: m.license_url,
|
||||||
},
|
},
|
||||||
client_side: data.client_side,
|
|
||||||
server_side: data.server_side,
|
|
||||||
downloads: m.downloads as u32,
|
downloads: m.downloads as u32,
|
||||||
followers: m.follows as u32,
|
followers: m.follows as u32,
|
||||||
categories: data.categories,
|
categories: data.categories,
|
||||||
additional_categories: data.additional_categories,
|
additional_categories: data.additional_categories,
|
||||||
game_versions: m.game_versions,
|
|
||||||
loaders: m.loaders,
|
loaders: m.loaders,
|
||||||
versions: data.versions.into_iter().map(|v| v.into()).collect(),
|
versions: data.versions.into_iter().map(|v| v.into()).collect(),
|
||||||
icon_url: m.icon_url,
|
icon_url: m.icon_url,
|
||||||
@ -462,11 +457,14 @@ pub struct Version {
|
|||||||
pub author_id: UserId,
|
pub author_id: UserId,
|
||||||
/// Whether the version is featured or not
|
/// Whether the version is featured or not
|
||||||
pub featured: bool,
|
pub featured: bool,
|
||||||
|
|
||||||
/// The name of this version
|
/// The name of this version
|
||||||
pub name: String,
|
pub name: String,
|
||||||
/// The version number. Ideally will follow semantic versioning
|
/// The version number. Ideally will follow semantic versioning
|
||||||
pub version_number: String,
|
pub version_number: String,
|
||||||
|
/// Project types for which this version is compatible with, extracted from Loader
|
||||||
|
pub project_types: Vec<String>,
|
||||||
|
/// Games for which this version is compatible with, extracted from Loader/Project types
|
||||||
|
pub games: Vec<String>,
|
||||||
/// The changelog for this version of the project.
|
/// The changelog for this version of the project.
|
||||||
pub changelog: String,
|
pub changelog: String,
|
||||||
/// A link to the changelog for this version of the project. Deprecated, always None
|
/// A link to the changelog for this version of the project. Deprecated, always None
|
||||||
@ -487,26 +485,40 @@ pub struct Version {
|
|||||||
pub files: Vec<VersionFile>,
|
pub files: Vec<VersionFile>,
|
||||||
/// A list of projects that this version depends on.
|
/// A list of projects that this version depends on.
|
||||||
pub dependencies: Vec<Dependency>,
|
pub dependencies: Vec<Dependency>,
|
||||||
/// A list of versions of Minecraft that this version of the project supports.
|
|
||||||
pub game_versions: Vec<GameVersion>,
|
|
||||||
/// The loaders that this version works on
|
/// The loaders that this version works on
|
||||||
pub loaders: Vec<Loader>,
|
pub loaders: Vec<Loader>,
|
||||||
/// Ordering override, lower is returned first
|
/// Ordering override, lower is returned first
|
||||||
pub ordering: Option<i32>,
|
pub ordering: Option<i32>,
|
||||||
|
|
||||||
|
// All other fields are loader-specific VersionFields
|
||||||
|
// These are flattened during serialization
|
||||||
|
#[serde(deserialize_with = "skip_nulls")]
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub fields: HashMap<String, serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn skip_nulls<'de, D>(deserializer: D) -> Result<HashMap<String, serde_json::Value>, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let mut map = HashMap::deserialize(deserializer)?;
|
||||||
|
map.retain(|_, v: &mut serde_json::Value| !v.is_null());
|
||||||
|
Ok(map)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<QueryVersion> for Version {
|
impl From<QueryVersion> for Version {
|
||||||
fn from(data: QueryVersion) -> Version {
|
fn from(data: QueryVersion) -> Version {
|
||||||
let v = data.inner;
|
let v = data.inner;
|
||||||
|
|
||||||
Version {
|
Version {
|
||||||
id: v.id.into(),
|
id: v.id.into(),
|
||||||
project_id: v.project_id.into(),
|
project_id: v.project_id.into(),
|
||||||
author_id: v.author_id.into(),
|
author_id: v.author_id.into(),
|
||||||
|
|
||||||
featured: v.featured,
|
featured: v.featured,
|
||||||
name: v.name,
|
name: v.name,
|
||||||
version_number: v.version_number,
|
version_number: v.version_number,
|
||||||
|
project_types: data.project_types,
|
||||||
|
games: data.games,
|
||||||
changelog: v.changelog,
|
changelog: v.changelog,
|
||||||
changelog_url: None,
|
changelog_url: None,
|
||||||
date_published: v.date_published,
|
date_published: v.date_published,
|
||||||
@ -543,8 +555,14 @@ impl From<QueryVersion> for Version {
|
|||||||
dependency_type: DependencyType::from_string(d.dependency_type.as_str()),
|
dependency_type: DependencyType::from_string(d.dependency_type.as_str()),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
game_versions: data.game_versions.into_iter().map(GameVersion).collect(),
|
|
||||||
loaders: data.loaders.into_iter().map(Loader).collect(),
|
loaders: data.loaders.into_iter().map(Loader).collect(),
|
||||||
|
// Only add the internal component of the field for display
|
||||||
|
// "ie": "game_versions",["1.2.3"] instead of "game_versions",ArrayEnum(...)
|
||||||
|
fields: data
|
||||||
|
.version_fields
|
||||||
|
.into_iter()
|
||||||
|
.map(|vf| (vf.field_name, vf.value.serialize_internal()))
|
||||||
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -658,7 +676,7 @@ pub struct VersionFile {
|
|||||||
|
|
||||||
/// A dendency which describes what versions are required, break support, or are optional to the
|
/// A dendency which describes what versions are required, break support, or are optional to the
|
||||||
/// version's functionality
|
/// version's functionality
|
||||||
#[derive(Serialize, Deserialize, Clone)]
|
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||||
pub struct Dependency {
|
pub struct Dependency {
|
||||||
/// The specific version id that the dependency uses
|
/// The specific version id that the dependency uses
|
||||||
pub version_id: Option<VersionId>,
|
pub version_id: Option<VersionId>,
|
||||||
@ -670,7 +688,7 @@ pub struct Dependency {
|
|||||||
pub dependency_type: DependencyType,
|
pub dependency_type: DependencyType,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq)]
|
#[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq, Debug)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "lowercase")]
|
||||||
pub enum VersionType {
|
pub enum VersionType {
|
||||||
Release,
|
Release,
|
||||||
@ -695,7 +713,7 @@ impl VersionType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Copy, Clone)]
|
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "lowercase")]
|
||||||
pub enum DependencyType {
|
pub enum DependencyType {
|
||||||
Required,
|
Required,
|
||||||
@ -766,19 +784,14 @@ impl FileType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A specific version of Minecraft
|
|
||||||
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct GameVersion(pub String);
|
|
||||||
|
|
||||||
/// A project loader
|
/// A project loader
|
||||||
#[derive(Serialize, Deserialize, Clone)]
|
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
||||||
#[serde(transparent)]
|
#[serde(transparent)]
|
||||||
pub struct Loader(pub String);
|
pub struct Loader(pub String);
|
||||||
|
|
||||||
// These fields must always succeed parsing; deserialize errors aren't
|
// These fields must always succeed parsing; deserialize errors aren't
|
||||||
// processed correctly (don't return JSON errors)
|
// processed correctly (don't return JSON errors)
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SearchRequest {
|
pub struct SearchRequest {
|
||||||
pub query: Option<String>,
|
pub query: Option<String>,
|
||||||
pub offset: Option<String>,
|
pub offset: Option<String>,
|
||||||
@ -787,7 +800,7 @@ pub struct SearchRequest {
|
|||||||
|
|
||||||
pub new_filters: Option<String>,
|
pub new_filters: Option<String>,
|
||||||
|
|
||||||
// Deprecated values below. WILL BE REMOVED V3!
|
// TODO: Deprecated values below. WILL BE REMOVED V3!
|
||||||
pub facets: Option<String>,
|
pub facets: Option<String>,
|
||||||
pub filters: Option<String>,
|
pub filters: Option<String>,
|
||||||
pub version: Option<String>,
|
pub version: Option<String>,
|
||||||
@ -1,4 +1,5 @@
|
|||||||
use crate::database::models::categories::Loader;
|
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
|
||||||
|
use crate::database::models::loader_fields::Loader;
|
||||||
use crate::database::models::project_item::QueryProject;
|
use crate::database::models::project_item::QueryProject;
|
||||||
use crate::database::models::version_item::{QueryFile, QueryVersion};
|
use crate::database::models::version_item::{QueryFile, QueryVersion};
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
@ -22,6 +23,8 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
|||||||
cfg.service(version_file);
|
cfg.service(version_file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: These were modified in v3 and should be tested
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, YaSerialize)]
|
#[derive(Default, Debug, Clone, YaSerialize)]
|
||||||
#[yaserde(root = "metadata", rename = "metadata")]
|
#[yaserde(root = "metadata", rename = "metadata")]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
@ -198,8 +201,19 @@ async fn find_version(
|
|||||||
if !loaders.is_empty() {
|
if !loaders.is_empty() {
|
||||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For maven in particular, we will hardcode it to use GameVersions rather than generic loader fields, as this is minecraft-java exclusive
|
||||||
if !game_versions.is_empty() {
|
if !game_versions.is_empty() {
|
||||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
let version_game_versions = x
|
||||||
|
.version_fields
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.find_map(|v| MinecraftGameVersion::try_from_version_field(&v).ok());
|
||||||
|
if let Some(version_game_versions) = version_game_versions {
|
||||||
|
bool &= version_game_versions
|
||||||
|
.iter()
|
||||||
|
.any(|y| game_versions.contains(&y.version));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
@ -216,7 +230,6 @@ async fn find_version(
|
|||||||
fn find_file<'a>(
|
fn find_file<'a>(
|
||||||
project_id: &str,
|
project_id: &str,
|
||||||
vcoords: &str,
|
vcoords: &str,
|
||||||
project: &QueryProject,
|
|
||||||
version: &'a QueryVersion,
|
version: &'a QueryVersion,
|
||||||
file: &str,
|
file: &str,
|
||||||
) -> Option<&'a QueryFile> {
|
) -> Option<&'a QueryFile> {
|
||||||
@ -224,21 +237,27 @@ fn find_file<'a>(
|
|||||||
return Some(selected_file);
|
return Some(selected_file);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fileext = match project.project_type.as_str() {
|
// Minecraft mods are not going to be both a mod and a modpack, so this minecraft-specific handling is fine
|
||||||
"mod" => "jar",
|
// As there can be multiple project types, returns the first allowable match
|
||||||
"modpack" => "mrpack",
|
let mut fileexts = vec![];
|
||||||
_ => return None,
|
for project_type in version.project_types.iter() {
|
||||||
};
|
match project_type.as_str() {
|
||||||
|
"mod" => fileexts.push("jar"),
|
||||||
if file == format!("{}-{}.{}", &project_id, &vcoords, fileext) {
|
"modpack" => fileexts.push("mrpack"),
|
||||||
version
|
_ => (),
|
||||||
.files
|
}
|
||||||
.iter()
|
|
||||||
.find(|x| x.primary)
|
|
||||||
.or_else(|| version.files.iter().last())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for fileext in fileexts {
|
||||||
|
if file == format!("{}-{}.{}", &project_id, &vcoords, fileext) {
|
||||||
|
return version
|
||||||
|
.files
|
||||||
|
.iter()
|
||||||
|
.find(|x| x.primary)
|
||||||
|
.or_else(|| version.files.iter().last());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
#[route(
|
#[route(
|
||||||
@ -297,7 +316,7 @@ pub async fn version_file(
|
|||||||
return Ok(HttpResponse::Ok()
|
return Ok(HttpResponse::Ok()
|
||||||
.content_type("text/xml")
|
.content_type("text/xml")
|
||||||
.body(yaserde::ser::to_string(&respdata).map_err(ApiError::Xml)?));
|
.body(yaserde::ser::to_string(&respdata).map_err(ApiError::Xml)?));
|
||||||
} else if let Some(selected_file) = find_file(&project_id, &vnum, &project, &version, &file) {
|
} else if let Some(selected_file) = find_file(&project_id, &vnum, &version, &file) {
|
||||||
return Ok(HttpResponse::TemporaryRedirect()
|
return Ok(HttpResponse::TemporaryRedirect()
|
||||||
.append_header(("location", &*selected_file.url))
|
.append_header(("location", &*selected_file.url))
|
||||||
.body(""));
|
.body(""));
|
||||||
@ -342,7 +361,7 @@ pub async fn version_file_sha1(
|
|||||||
return Ok(HttpResponse::NotFound().body(""));
|
return Ok(HttpResponse::NotFound().body(""));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(find_file(&project_id, &vnum, &project, &version, &file)
|
Ok(find_file(&project_id, &vnum, &version, &file)
|
||||||
.and_then(|file| file.hashes.get("sha1"))
|
.and_then(|file| file.hashes.get("sha1"))
|
||||||
.map(|hash_str| HttpResponse::Ok().body(hash_str.clone()))
|
.map(|hash_str| HttpResponse::Ok().body(hash_str.clone()))
|
||||||
.unwrap_or_else(|| HttpResponse::NotFound().body("")))
|
.unwrap_or_else(|| HttpResponse::NotFound().body("")))
|
||||||
@ -384,7 +403,7 @@ pub async fn version_file_sha512(
|
|||||||
return Ok(HttpResponse::NotFound().body(""));
|
return Ok(HttpResponse::NotFound().body(""));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(find_file(&project_id, &vnum, &project, &version, &file)
|
Ok(find_file(&project_id, &vnum, &version, &file)
|
||||||
.and_then(|file| file.hashes.get("sha512"))
|
.and_then(|file| file.hashes.get("sha512"))
|
||||||
.map(|hash_str| HttpResponse::Ok().body(hash_str.clone()))
|
.map(|hash_str| HttpResponse::Ok().body(hash_str.clone()))
|
||||||
.unwrap_or_else(|| HttpResponse::NotFound().body("")))
|
.unwrap_or_else(|| HttpResponse::NotFound().body("")))
|
||||||
|
|||||||
@ -11,6 +11,8 @@ use futures::FutureExt;
|
|||||||
pub mod v2;
|
pub mod v2;
|
||||||
pub mod v3;
|
pub mod v3;
|
||||||
|
|
||||||
|
pub mod v2_reroute;
|
||||||
|
|
||||||
mod analytics;
|
mod analytics;
|
||||||
mod index;
|
mod index;
|
||||||
mod maven;
|
mod maven;
|
||||||
@ -118,6 +120,8 @@ pub enum ApiError {
|
|||||||
PasswordStrengthCheck(#[from] zxcvbn::ZxcvbnError),
|
PasswordStrengthCheck(#[from] zxcvbn::ZxcvbnError),
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
Mail(#[from] crate::auth::email::MailError),
|
Mail(#[from] crate::auth::email::MailError),
|
||||||
|
#[error("Error while rerouting request: {0}")]
|
||||||
|
Reroute(#[from] reqwest::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl actix_web::ResponseError for ApiError {
|
impl actix_web::ResponseError for ApiError {
|
||||||
@ -144,6 +148,7 @@ impl actix_web::ResponseError for ApiError {
|
|||||||
ApiError::PasswordHashing(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
ApiError::PasswordHashing(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
ApiError::PasswordStrengthCheck(..) => StatusCode::BAD_REQUEST,
|
ApiError::PasswordStrengthCheck(..) => StatusCode::BAD_REQUEST,
|
||||||
ApiError::Mail(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
ApiError::Mail(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
ApiError::Reroute(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -171,6 +176,7 @@ impl actix_web::ResponseError for ApiError {
|
|||||||
ApiError::PasswordStrengthCheck(..) => "strength_check_error",
|
ApiError::PasswordStrengthCheck(..) => "strength_check_error",
|
||||||
ApiError::Mail(..) => "mail_error",
|
ApiError::Mail(..) => "mail_error",
|
||||||
ApiError::Clickhouse(..) => "clickhouse_error",
|
ApiError::Clickhouse(..) => "clickhouse_error",
|
||||||
|
ApiError::Reroute(..) => "reroute_error",
|
||||||
},
|
},
|
||||||
description: &self.to_string(),
|
description: &self.to_string(),
|
||||||
})
|
})
|
||||||
|
|||||||
@ -6,6 +6,7 @@ use sqlx::PgPool;
|
|||||||
|
|
||||||
use crate::auth::{filter_authorized_versions, get_user_from_headers, is_authorized};
|
use crate::auth::{filter_authorized_versions, get_user_from_headers, is_authorized};
|
||||||
use crate::database;
|
use crate::database;
|
||||||
|
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::models::pats::Scopes;
|
use crate::models::pats::Scopes;
|
||||||
use crate::models::projects::VersionType;
|
use crate::models::projects::VersionType;
|
||||||
@ -95,19 +96,29 @@ pub async fn forge_updates(
|
|||||||
};
|
};
|
||||||
|
|
||||||
for version in versions {
|
for version in versions {
|
||||||
|
// For forge in particular, we will hardcode it to use GameVersions rather than generic loader fields, as this is minecraft-java exclusive
|
||||||
|
// Will have duplicates between game_versions (for non-forge loaders), but that's okay as
|
||||||
|
// before v3 this was stored to the project and not the version
|
||||||
|
let game_versions: Vec<String> = version
|
||||||
|
.fields
|
||||||
|
.iter()
|
||||||
|
.find(|(key, _)| key.as_str() == MinecraftGameVersion::FIELD_NAME)
|
||||||
|
.and_then(|(_, value)| serde_json::from_value::<Vec<String>>(value.clone()).ok())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
if version.version_type == VersionType::Release {
|
if version.version_type == VersionType::Release {
|
||||||
for game_version in &version.game_versions {
|
for game_version in &game_versions {
|
||||||
response
|
response
|
||||||
.promos
|
.promos
|
||||||
.entry(format!("{}-recommended", game_version.0))
|
.entry(format!("{}-recommended", game_version))
|
||||||
.or_insert_with(|| version.version_number.clone());
|
.or_insert_with(|| version.version_number.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for game_version in &version.game_versions {
|
for game_version in &game_versions {
|
||||||
response
|
response
|
||||||
.promos
|
.promos
|
||||||
.entry(format!("{}-latest", game_version.0))
|
.entry(format!("{}-latest", game_version))
|
||||||
.or_insert_with(|| version.version_number.clone());
|
.or_insert_with(|| version.version_number.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,6 +9,7 @@ use crate::queue::analytics::AnalyticsQueue;
|
|||||||
use crate::queue::maxmind::MaxMindIndexer;
|
use crate::queue::maxmind::MaxMindIndexer;
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::ApiError;
|
||||||
|
use crate::search::SearchConfig;
|
||||||
use crate::util::date::get_current_tenths_of_ms;
|
use crate::util::date::get_current_tenths_of_ms;
|
||||||
use crate::util::guards::admin_key_guard;
|
use crate::util::guards::admin_key_guard;
|
||||||
use crate::util::routes::read_from_payload;
|
use crate::util::routes::read_from_payload;
|
||||||
@ -27,7 +28,8 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
|||||||
cfg.service(
|
cfg.service(
|
||||||
web::scope("admin")
|
web::scope("admin")
|
||||||
.service(count_download)
|
.service(count_download)
|
||||||
.service(trolley_webhook),
|
.service(trolley_webhook)
|
||||||
|
.service(force_reindex),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -308,3 +310,13 @@ pub async fn trolley_webhook(
|
|||||||
|
|
||||||
Ok(HttpResponse::NoContent().finish())
|
Ok(HttpResponse::NoContent().finish())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/_force_reindex", guard = "admin_key_guard")]
|
||||||
|
pub async fn force_reindex(
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
config: web::Data<SearchConfig>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
use crate::search::indexing::index_projects;
|
||||||
|
index_projects(pool.as_ref().clone(), &config).await?;
|
||||||
|
Ok(HttpResponse::NoContent().finish())
|
||||||
|
}
|
||||||
|
|||||||
@ -1,24 +1,12 @@
|
|||||||
use super::ApiError;
|
use super::ApiError;
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::{
|
use crate::routes::v3;
|
||||||
auth::{filter_authorized_projects, filter_authorized_versions, get_user_from_headers},
|
use crate::{models::ids::VersionId, queue::session::AuthQueue};
|
||||||
database::models::{project_item, user_item, version_item},
|
|
||||||
models::{
|
|
||||||
ids::{
|
|
||||||
base62_impl::{parse_base62, to_base62},
|
|
||||||
ProjectId, VersionId,
|
|
||||||
},
|
|
||||||
pats::Scopes,
|
|
||||||
},
|
|
||||||
queue::session::AuthQueue,
|
|
||||||
};
|
|
||||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||||
use chrono::{DateTime, Duration, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::postgres::types::PgInterval;
|
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::convert::TryInto;
|
|
||||||
|
|
||||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
@ -76,66 +64,22 @@ pub async fn playtimes_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let data = data.into_inner();
|
||||||
&req,
|
v3::analytics_get::playtimes_get(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
clickhouse,
|
||||||
&session_queue,
|
web::Query(v3::analytics_get::GetData {
|
||||||
Some(&[Scopes::ANALYTICS]),
|
project_ids: data.project_ids,
|
||||||
|
version_ids: data.version_ids,
|
||||||
|
start_date: data.start_date,
|
||||||
|
end_date: data.end_date,
|
||||||
|
resolution_minutes: data.resolution_minutes,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)?;
|
|
||||||
|
|
||||||
let project_ids = data
|
|
||||||
.project_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
let version_ids = data
|
|
||||||
.version_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if project_ids.is_some() && version_ids.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
|
||||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
|
||||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
|
||||||
|
|
||||||
// Convert String list to list of ProjectIds or VersionIds
|
|
||||||
// - Filter out unauthorized projects/versions
|
|
||||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
|
||||||
let (project_ids, version_ids) =
|
|
||||||
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
|
||||||
|
|
||||||
// Get the views
|
|
||||||
let playtimes = crate::clickhouse::fetch_playtimes(
|
|
||||||
project_ids,
|
|
||||||
version_ids,
|
|
||||||
start_date,
|
|
||||||
end_date,
|
|
||||||
resolution_minutes,
|
|
||||||
clickhouse.into_inner(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hm = HashMap::new();
|
|
||||||
for playtime in playtimes {
|
|
||||||
let id_string = to_base62(playtime.id);
|
|
||||||
if !hm.contains_key(&id_string) {
|
|
||||||
hm.insert(id_string.clone(), HashMap::new());
|
|
||||||
}
|
|
||||||
if let Some(hm) = hm.get_mut(&id_string) {
|
|
||||||
hm.insert(playtime.time, playtime.total_seconds);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(hm))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get view data for a set of projects or versions
|
/// Get view data for a set of projects or versions
|
||||||
@ -156,66 +100,22 @@ pub async fn views_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let data = data.into_inner();
|
||||||
&req,
|
v3::analytics_get::views_get(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
clickhouse,
|
||||||
&session_queue,
|
web::Query(v3::analytics_get::GetData {
|
||||||
Some(&[Scopes::ANALYTICS]),
|
project_ids: data.project_ids,
|
||||||
|
version_ids: data.version_ids,
|
||||||
|
start_date: data.start_date,
|
||||||
|
end_date: data.end_date,
|
||||||
|
resolution_minutes: data.resolution_minutes,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)?;
|
|
||||||
|
|
||||||
let project_ids = data
|
|
||||||
.project_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
let version_ids = data
|
|
||||||
.version_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if project_ids.is_some() && version_ids.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
|
||||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
|
||||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
|
||||||
|
|
||||||
// Convert String list to list of ProjectIds or VersionIds
|
|
||||||
// - Filter out unauthorized projects/versions
|
|
||||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
|
||||||
let (project_ids, version_ids) =
|
|
||||||
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
|
||||||
|
|
||||||
// Get the views
|
|
||||||
let views = crate::clickhouse::fetch_views(
|
|
||||||
project_ids,
|
|
||||||
version_ids,
|
|
||||||
start_date,
|
|
||||||
end_date,
|
|
||||||
resolution_minutes,
|
|
||||||
clickhouse.into_inner(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hm = HashMap::new();
|
|
||||||
for views in views {
|
|
||||||
let id_string = to_base62(views.id);
|
|
||||||
if !hm.contains_key(&id_string) {
|
|
||||||
hm.insert(id_string.clone(), HashMap::new());
|
|
||||||
}
|
|
||||||
if let Some(hm) = hm.get_mut(&id_string) {
|
|
||||||
hm.insert(views.time, views.total_views);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(hm))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get download data for a set of projects or versions
|
/// Get download data for a set of projects or versions
|
||||||
@ -236,66 +136,22 @@ pub async fn downloads_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
let data = data.into_inner();
|
||||||
&req,
|
v3::analytics_get::downloads_get(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
clickhouse,
|
||||||
&session_queue,
|
web::Query(v3::analytics_get::GetData {
|
||||||
Some(&[Scopes::ANALYTICS]),
|
project_ids: data.project_ids,
|
||||||
|
version_ids: data.version_ids,
|
||||||
|
start_date: data.start_date,
|
||||||
|
end_date: data.end_date,
|
||||||
|
resolution_minutes: data.resolution_minutes,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)?;
|
|
||||||
|
|
||||||
let project_ids = data
|
|
||||||
.project_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
let version_ids = data
|
|
||||||
.version_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if project_ids.is_some() && version_ids.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
|
||||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
|
||||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
|
||||||
|
|
||||||
// Convert String list to list of ProjectIds or VersionIds
|
|
||||||
// - Filter out unauthorized projects/versions
|
|
||||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
|
||||||
let (project_ids, version_ids) =
|
|
||||||
filter_allowed_ids(project_ids, version_ids, user_option, &pool, &redis).await?;
|
|
||||||
|
|
||||||
// Get the downloads
|
|
||||||
let downloads = crate::clickhouse::fetch_downloads(
|
|
||||||
project_ids,
|
|
||||||
version_ids,
|
|
||||||
start_date,
|
|
||||||
end_date,
|
|
||||||
resolution_minutes,
|
|
||||||
clickhouse.into_inner(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hm = HashMap::new();
|
|
||||||
for downloads in downloads {
|
|
||||||
let id_string = to_base62(downloads.id);
|
|
||||||
if !hm.contains_key(&id_string) {
|
|
||||||
hm.insert(id_string.clone(), HashMap::new());
|
|
||||||
}
|
|
||||||
if let Some(hm) = hm.get_mut(&id_string) {
|
|
||||||
hm.insert(downloads.time, downloads.total_downloads);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(hm))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get payout data for a set of projects
|
/// Get payout data for a set of projects
|
||||||
@ -315,77 +171,21 @@ pub async fn revenue_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let data = data.into_inner();
|
||||||
&req,
|
v3::analytics_get::revenue_get(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
web::Query(v3::analytics_get::GetData {
|
||||||
&session_queue,
|
project_ids: data.project_ids,
|
||||||
Some(&[Scopes::PAYOUTS_READ]),
|
version_ids: None,
|
||||||
|
start_date: data.start_date,
|
||||||
|
end_date: data.end_date,
|
||||||
|
resolution_minutes: data.resolution_minutes,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)?;
|
|
||||||
|
|
||||||
let project_ids = data
|
|
||||||
.project_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
|
||||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
|
||||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
|
||||||
|
|
||||||
// Round up/down to nearest duration as we are using pgadmin, does not have rounding in the fetch command
|
|
||||||
// Round start_date down to nearest resolution
|
|
||||||
let diff = start_date.timestamp() % (resolution_minutes as i64 * 60);
|
|
||||||
let start_date = start_date - Duration::seconds(diff);
|
|
||||||
|
|
||||||
// Round end_date up to nearest resolution
|
|
||||||
let diff = end_date.timestamp() % (resolution_minutes as i64 * 60);
|
|
||||||
let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff);
|
|
||||||
|
|
||||||
// Convert String list to list of ProjectIds or VersionIds
|
|
||||||
// - Filter out unauthorized projects/versions
|
|
||||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
|
||||||
let (project_ids, _) = filter_allowed_ids(project_ids, None, user, &pool, &redis).await?;
|
|
||||||
|
|
||||||
let duration: PgInterval = Duration::minutes(resolution_minutes as i64)
|
|
||||||
.try_into()
|
|
||||||
.unwrap();
|
|
||||||
// Get the revenue data
|
|
||||||
let payouts_values = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start
|
|
||||||
FROM payouts_values
|
|
||||||
WHERE mod_id = ANY($1) AND created BETWEEN $2 AND $3
|
|
||||||
GROUP by mod_id, interval_start ORDER BY interval_start
|
|
||||||
",
|
|
||||||
&project_ids.unwrap_or_default().into_iter().map(|x| x.0 as i64).collect::<Vec<_>>(),
|
|
||||||
start_date,
|
|
||||||
end_date,
|
|
||||||
duration,
|
|
||||||
)
|
|
||||||
.fetch_all(&**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hm = HashMap::new();
|
|
||||||
for value in payouts_values {
|
|
||||||
if let Some(mod_id) = value.mod_id {
|
|
||||||
if let Some(amount) = value.amount_sum {
|
|
||||||
if let Some(interval_start) = value.interval_start {
|
|
||||||
let id_string = to_base62(mod_id as u64);
|
|
||||||
if !hm.contains_key(&id_string) {
|
|
||||||
hm.insert(id_string.clone(), HashMap::new());
|
|
||||||
}
|
|
||||||
if let Some(hm) = hm.get_mut(&id_string) {
|
|
||||||
hm.insert(interval_start.timestamp(), amount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(hm))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get country data for a set of projects or versions
|
/// Get country data for a set of projects or versions
|
||||||
@ -409,64 +209,22 @@ pub async fn countries_downloads_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let data = data.into_inner();
|
||||||
&req,
|
v3::analytics_get::countries_downloads_get(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
clickhouse,
|
||||||
&session_queue,
|
web::Query(v3::analytics_get::GetData {
|
||||||
Some(&[Scopes::ANALYTICS]),
|
project_ids: data.project_ids,
|
||||||
|
version_ids: data.version_ids,
|
||||||
|
start_date: data.start_date,
|
||||||
|
end_date: data.end_date,
|
||||||
|
resolution_minutes: data.resolution_minutes,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)?;
|
|
||||||
|
|
||||||
let project_ids = data
|
|
||||||
.project_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
let version_ids = data
|
|
||||||
.version_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if project_ids.is_some() && version_ids.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
|
||||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
|
||||||
|
|
||||||
// Convert String list to list of ProjectIds or VersionIds
|
|
||||||
// - Filter out unauthorized projects/versions
|
|
||||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
|
||||||
let (project_ids, version_ids) =
|
|
||||||
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
|
||||||
|
|
||||||
// Get the countries
|
|
||||||
let countries = crate::clickhouse::fetch_countries(
|
|
||||||
project_ids,
|
|
||||||
version_ids,
|
|
||||||
start_date,
|
|
||||||
end_date,
|
|
||||||
clickhouse.into_inner(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hm = HashMap::new();
|
|
||||||
for views in countries {
|
|
||||||
let id_string = to_base62(views.id);
|
|
||||||
if !hm.contains_key(&id_string) {
|
|
||||||
hm.insert(id_string.clone(), HashMap::new());
|
|
||||||
}
|
|
||||||
if let Some(hm) = hm.get_mut(&id_string) {
|
|
||||||
hm.insert(views.country, views.total_downloads);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(hm))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get country data for a set of projects or versions
|
/// Get country data for a set of projects or versions
|
||||||
@ -490,126 +248,20 @@ pub async fn countries_views_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let data = data.into_inner();
|
||||||
&req,
|
v3::analytics_get::countries_views_get(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
clickhouse,
|
||||||
&session_queue,
|
web::Query(v3::analytics_get::GetData {
|
||||||
Some(&[Scopes::ANALYTICS]),
|
project_ids: data.project_ids,
|
||||||
|
version_ids: data.version_ids,
|
||||||
|
start_date: data.start_date,
|
||||||
|
end_date: data.end_date,
|
||||||
|
resolution_minutes: data.resolution_minutes,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)?;
|
|
||||||
|
|
||||||
let project_ids = data
|
|
||||||
.project_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
let version_ids = data
|
|
||||||
.version_ids
|
|
||||||
.as_ref()
|
|
||||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if project_ids.is_some() && version_ids.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
|
||||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
|
||||||
|
|
||||||
// Convert String list to list of ProjectIds or VersionIds
|
|
||||||
// - Filter out unauthorized projects/versions
|
|
||||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
|
||||||
let (project_ids, version_ids) =
|
|
||||||
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
|
||||||
|
|
||||||
// Get the countries
|
|
||||||
let countries = crate::clickhouse::fetch_countries(
|
|
||||||
project_ids,
|
|
||||||
version_ids,
|
|
||||||
start_date,
|
|
||||||
end_date,
|
|
||||||
clickhouse.into_inner(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut hm = HashMap::new();
|
|
||||||
for views in countries {
|
|
||||||
let id_string = to_base62(views.id);
|
|
||||||
if !hm.contains_key(&id_string) {
|
|
||||||
hm.insert(id_string.clone(), HashMap::new());
|
|
||||||
}
|
|
||||||
if let Some(hm) = hm.get_mut(&id_string) {
|
|
||||||
hm.insert(views.country, views.total_views);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(hm))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn filter_allowed_ids(
|
|
||||||
mut project_ids: Option<Vec<String>>,
|
|
||||||
version_ids: Option<Vec<String>>,
|
|
||||||
user: crate::models::users::User,
|
|
||||||
pool: &web::Data<PgPool>,
|
|
||||||
redis: &RedisPool,
|
|
||||||
) -> Result<(Option<Vec<ProjectId>>, Option<Vec<VersionId>>), ApiError> {
|
|
||||||
if project_ids.is_some() && version_ids.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no project_ids or version_ids are provided, we default to all projects the user has access to
|
|
||||||
if project_ids.is_none() && version_ids.is_none() {
|
|
||||||
project_ids = Some(
|
|
||||||
user_item::User::get_projects(user.id.into(), &***pool, redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| ProjectId::from(x).to_string())
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert String list to list of ProjectIds or VersionIds
|
|
||||||
// - Filter out unauthorized projects/versions
|
|
||||||
|
|
||||||
let project_ids = if let Some(project_ids) = project_ids {
|
|
||||||
// Submitted project_ids are filtered by the user's permissions
|
|
||||||
let ids = project_ids
|
|
||||||
.iter()
|
|
||||||
.map(|id| Ok(ProjectId(parse_base62(id)?).into()))
|
|
||||||
.collect::<Result<Vec<_>, ApiError>>()?;
|
|
||||||
let projects = project_item::Project::get_many_ids(&ids, &***pool, redis).await?;
|
|
||||||
let ids: Vec<ProjectId> = filter_authorized_projects(projects, &Some(user.clone()), pool)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| x.id)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
Some(ids)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let version_ids = if let Some(version_ids) = version_ids {
|
|
||||||
// Submitted version_ids are filtered by the user's permissions
|
|
||||||
let ids = version_ids
|
|
||||||
.iter()
|
|
||||||
.map(|id| Ok(VersionId(parse_base62(id)?).into()))
|
|
||||||
.collect::<Result<Vec<_>, ApiError>>()?;
|
|
||||||
let versions = version_item::Version::get_many(&ids, &***pool, redis).await?;
|
|
||||||
let ids: Vec<VersionId> = filter_authorized_versions(versions, &Some(user), pool)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| x.id)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
Some(ids)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Only one of project_ids or version_ids will be Some
|
|
||||||
Ok((project_ids, version_ids))
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,28 +1,16 @@
|
|||||||
use crate::auth::checks::{filter_authorized_collections, is_authorized_collection};
|
|
||||||
use crate::auth::get_user_from_headers;
|
|
||||||
use crate::database::models::{collection_item, generate_collection_id, project_item};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::file_hosting::FileHost;
|
use crate::file_hosting::FileHost;
|
||||||
use crate::models::collections::{Collection, CollectionStatus};
|
use crate::models::collections::CollectionStatus;
|
||||||
use crate::models::ids::base62_impl::parse_base62;
|
|
||||||
use crate::models::ids::{CollectionId, ProjectId};
|
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::v3::project_creation::CreateError;
|
||||||
use crate::util::routes::read_from_payload;
|
use crate::routes::{v3, ApiError};
|
||||||
use crate::util::validate::validation_errors_to_string;
|
|
||||||
use crate::{database, models};
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||||
use chrono::Utc;
|
|
||||||
use itertools::Itertools;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
use super::project_creation::CreateError;
|
|
||||||
|
|
||||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(collections_get);
|
cfg.service(collections_get);
|
||||||
cfg.service(collection_create);
|
cfg.service(collection_create);
|
||||||
@ -62,68 +50,18 @@ pub async fn collection_create(
|
|||||||
session_queue: Data<AuthQueue>,
|
session_queue: Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, CreateError> {
|
) -> Result<HttpResponse, CreateError> {
|
||||||
let collection_create_data = collection_create_data.into_inner();
|
let collection_create_data = collection_create_data.into_inner();
|
||||||
|
v3::collections::collection_create(
|
||||||
// The currently logged in user
|
req,
|
||||||
let current_user = get_user_from_headers(
|
web::Json(v3::collections::CollectionCreateData {
|
||||||
&req,
|
title: collection_create_data.title,
|
||||||
&**client,
|
description: collection_create_data.description,
|
||||||
&redis,
|
projects: collection_create_data.projects,
|
||||||
&session_queue,
|
}),
|
||||||
Some(&[Scopes::COLLECTION_CREATE]),
|
client,
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
collection_create_data
|
|
||||||
.validate()
|
|
||||||
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
|
|
||||||
|
|
||||||
let mut transaction = client.begin().await?;
|
|
||||||
|
|
||||||
let collection_id: CollectionId = generate_collection_id(&mut transaction).await?.into();
|
|
||||||
|
|
||||||
let initial_project_ids = project_item::Project::get_many(
|
|
||||||
&collection_create_data.projects,
|
|
||||||
&mut *transaction,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| x.inner.id.into())
|
|
||||||
.collect::<Vec<ProjectId>>();
|
|
||||||
|
|
||||||
let collection_builder_actual = collection_item::CollectionBuilder {
|
|
||||||
collection_id: collection_id.into(),
|
|
||||||
user_id: current_user.id.into(),
|
|
||||||
title: collection_create_data.title,
|
|
||||||
description: collection_create_data.description,
|
|
||||||
status: CollectionStatus::Listed,
|
|
||||||
projects: initial_project_ids
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.map(|x| x.into())
|
|
||||||
.collect(),
|
|
||||||
};
|
|
||||||
let collection_builder = collection_builder_actual.clone();
|
|
||||||
|
|
||||||
let now = Utc::now();
|
|
||||||
collection_builder_actual.insert(&mut transaction).await?;
|
|
||||||
|
|
||||||
let response = crate::models::collections::Collection {
|
|
||||||
id: collection_id,
|
|
||||||
user: collection_builder.user_id.into(),
|
|
||||||
title: collection_builder.title.clone(),
|
|
||||||
description: collection_builder.description.clone(),
|
|
||||||
created: now,
|
|
||||||
updated: now,
|
|
||||||
icon_url: None,
|
|
||||||
color: None,
|
|
||||||
status: collection_builder.status,
|
|
||||||
projects: initial_project_ids,
|
|
||||||
};
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@ -138,28 +76,14 @@ pub async fn collections_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
v3::collections::collections_get(
|
||||||
let ids = ids
|
req,
|
||||||
.into_iter()
|
web::Query(v3::collections::CollectionIds { ids: ids.ids }),
|
||||||
.map(|x| parse_base62(x).map(|x| database::models::CollectionId(x as i64)))
|
pool,
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
redis,
|
||||||
|
session_queue,
|
||||||
let collections_data = database::models::Collection::get_many(&ids, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
let user_option = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::COLLECTION_READ]),
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let collections = filter_authorized_collections(collections_data, &user_option, &pool).await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(collections))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}")]
|
#[get("{id}")]
|
||||||
@ -170,27 +94,7 @@ pub async fn collection_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let string = info.into_inner().0;
|
v3::collections::collection_get(req, info, pool, redis, session_queue).await
|
||||||
|
|
||||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
|
||||||
let collection_data = database::models::Collection::get(id, &**pool, &redis).await?;
|
|
||||||
let user_option = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::COLLECTION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
if let Some(data) = collection_data {
|
|
||||||
if is_authorized_collection(&data, &user_option).await? {
|
|
||||||
return Ok(HttpResponse::Ok().json(Collection::from(data)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Validate)]
|
#[derive(Deserialize, Validate)]
|
||||||
@ -216,131 +120,21 @@ pub async fn collection_edit(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let new_collection = new_collection.into_inner();
|
||||||
&req,
|
v3::collections::collection_edit(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
info,
|
||||||
&session_queue,
|
pool,
|
||||||
Some(&[Scopes::COLLECTION_WRITE]),
|
web::Json(v3::collections::EditCollection {
|
||||||
|
title: new_collection.title,
|
||||||
|
description: new_collection.description,
|
||||||
|
status: new_collection.status,
|
||||||
|
new_projects: new_collection.new_projects,
|
||||||
|
}),
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
new_collection
|
|
||||||
.validate()
|
|
||||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
|
||||||
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
|
||||||
let result = database::models::Collection::get(id, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(collection_item) = result {
|
|
||||||
if !can_modify_collection(&collection_item, &user) {
|
|
||||||
return Ok(HttpResponse::Unauthorized().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let id = collection_item.id;
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
if let Some(title) = &new_collection.title {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE collections
|
|
||||||
SET title = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
title.trim(),
|
|
||||||
id as database::models::ids::CollectionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(description) = &new_collection.description {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE collections
|
|
||||||
SET description = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
description,
|
|
||||||
id as database::models::ids::CollectionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(status) = &new_collection.status {
|
|
||||||
if !(user.role.is_mod()
|
|
||||||
|| collection_item.status.is_approved() && status.can_be_requested())
|
|
||||||
{
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to set this status!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE collections
|
|
||||||
SET status = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
status.to_string(),
|
|
||||||
id as database::models::ids::CollectionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(new_project_ids) = &new_collection.new_projects {
|
|
||||||
// Delete all existing projects
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
DELETE FROM collections_mods
|
|
||||||
WHERE collection_id = $1
|
|
||||||
",
|
|
||||||
collection_item.id as database::models::ids::CollectionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let collection_item_ids = new_project_ids
|
|
||||||
.iter()
|
|
||||||
.map(|_| collection_item.id.0)
|
|
||||||
.collect_vec();
|
|
||||||
let mut validated_project_ids = Vec::new();
|
|
||||||
for project_id in new_project_ids {
|
|
||||||
let project = database::models::Project::get(project_id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(format!(
|
|
||||||
"The specified project {project_id} does not exist!"
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
validated_project_ids.push(project.inner.id.0);
|
|
||||||
}
|
|
||||||
// Insert- don't throw an error if it already exists
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
INSERT INTO collections_mods (collection_id, mod_id)
|
|
||||||
SELECT * FROM UNNEST ($1::int8[], $2::int8[])
|
|
||||||
ON CONFLICT DO NOTHING
|
|
||||||
",
|
|
||||||
&collection_item_ids[..],
|
|
||||||
&validated_project_ids[..],
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@ -357,82 +151,20 @@ pub async fn collection_icon_edit(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
mut payload: web::Payload,
|
payload: web::Payload,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) {
|
v3::collections::collection_icon_edit(
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
web::Query(v3::collections::Extension { ext: ext.ext }),
|
||||||
let user = get_user_from_headers(
|
req,
|
||||||
&req,
|
info,
|
||||||
&**pool,
|
pool,
|
||||||
&redis,
|
redis,
|
||||||
&session_queue,
|
file_host,
|
||||||
Some(&[Scopes::COLLECTION_WRITE]),
|
payload,
|
||||||
)
|
session_queue,
|
||||||
.await?
|
)
|
||||||
.1;
|
.await
|
||||||
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
|
||||||
let collection_item = database::models::Collection::get(id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !can_modify_collection(&collection_item, &user) {
|
|
||||||
return Ok(HttpResponse::Unauthorized().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(icon) = collection_item.icon_url {
|
|
||||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
|
||||||
|
|
||||||
if let Some(icon_path) = name {
|
|
||||||
file_host.delete_file_version("", icon_path).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let bytes =
|
|
||||||
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
|
|
||||||
|
|
||||||
let color = crate::util::img::get_color_from_img(&bytes)?;
|
|
||||||
|
|
||||||
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
|
||||||
let collection_id: CollectionId = collection_item.id.into();
|
|
||||||
let upload_data = file_host
|
|
||||||
.upload_file(
|
|
||||||
content_type,
|
|
||||||
&format!("data/{}/{}.{}", collection_id, hash, ext.ext),
|
|
||||||
bytes.freeze(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE collections
|
|
||||||
SET icon_url = $1, color = $2
|
|
||||||
WHERE (id = $3)
|
|
||||||
",
|
|
||||||
format!("{}/{}", cdn_url, upload_data.file_name),
|
|
||||||
color.map(|x| x as i32),
|
|
||||||
collection_item.id as database::models::ids::CollectionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::InvalidInput(format!(
|
|
||||||
"Invalid format for collection icon: {}",
|
|
||||||
ext.ext
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{id}/icon")]
|
#[delete("{id}/icon")]
|
||||||
@ -444,54 +176,7 @@ pub async fn delete_collection_icon(
|
|||||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::collections::delete_collection_icon(req, info, pool, redis, file_host, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::COLLECTION_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
|
||||||
let collection_item = database::models::Collection::get(id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
if !can_modify_collection(&collection_item, &user) {
|
|
||||||
return Ok(HttpResponse::Unauthorized().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
|
||||||
if let Some(icon) = collection_item.icon_url {
|
|
||||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
|
||||||
|
|
||||||
if let Some(icon_path) = name {
|
|
||||||
file_host.delete_file_version("", icon_path).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE collections
|
|
||||||
SET icon_url = NULL, color = NULL
|
|
||||||
WHERE (id = $1)
|
|
||||||
",
|
|
||||||
collection_item.id as database::models::ids::CollectionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{id}")]
|
#[delete("{id}")]
|
||||||
@ -502,44 +187,5 @@ pub async fn collection_delete(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::collections::collection_delete(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::COLLECTION_DELETE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
|
||||||
let collection = database::models::Collection::get(id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
if !can_modify_collection(&collection, &user) {
|
|
||||||
return Ok(HttpResponse::Unauthorized().body(""));
|
|
||||||
}
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let result =
|
|
||||||
database::models::Collection::remove(collection.id, &mut transaction, &redis).await?;
|
|
||||||
database::models::Collection::clear_cache(collection.id, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
if result.is_some() {
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn can_modify_collection(
|
|
||||||
collection: &database::models::Collection,
|
|
||||||
user: &models::users::User,
|
|
||||||
) -> bool {
|
|
||||||
collection.user_id == user.id.into() || user.role.is_mod()
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,17 +1,11 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::auth::{get_user_from_headers, is_authorized, is_authorized_version};
|
|
||||||
use crate::database;
|
|
||||||
use crate::database::models::{project_item, report_item, thread_item, version_item};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::file_hosting::FileHost;
|
use crate::file_hosting::FileHost;
|
||||||
use crate::models::ids::{ThreadMessageId, VersionId};
|
use crate::models::ids::{ThreadMessageId, VersionId};
|
||||||
use crate::models::images::{Image, ImageContext};
|
|
||||||
use crate::models::reports::ReportId;
|
use crate::models::reports::ReportId;
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::v2::threads::is_authorized_thread;
|
use crate::routes::{v3, ApiError};
|
||||||
use crate::routes::ApiError;
|
|
||||||
use crate::util::routes::read_from_payload;
|
|
||||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
@ -40,195 +34,26 @@ pub async fn images_add(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
web::Query(data): web::Query<ImageUpload>,
|
web::Query(data): web::Query<ImageUpload>,
|
||||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
mut payload: web::Payload,
|
payload: web::Payload,
|
||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
if let Some(content_type) = crate::util::ext::get_image_content_type(&data.ext) {
|
v3::images::images_add(
|
||||||
let mut context = ImageContext::from_str(&data.context, None);
|
req,
|
||||||
|
web::Query(v3::images::ImageUpload {
|
||||||
let scopes = vec![context.relevant_scope()];
|
ext: data.ext,
|
||||||
|
context: data.context,
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
project_id: data.project_id,
|
||||||
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, Some(&scopes))
|
version_id: data.version_id,
|
||||||
.await?
|
thread_message_id: data.thread_message_id,
|
||||||
.1;
|
report_id: data.report_id,
|
||||||
|
}),
|
||||||
// Attempt to associated a supplied id with the context
|
file_host,
|
||||||
// If the context cannot be found, or the user is not authorized to upload images for the context, return an error
|
payload,
|
||||||
match &mut context {
|
pool,
|
||||||
ImageContext::Project { project_id } => {
|
redis,
|
||||||
if let Some(id) = data.project_id {
|
session_queue,
|
||||||
let project = project_item::Project::get(&id, &**pool, &redis).await?;
|
)
|
||||||
if let Some(project) = project {
|
.await
|
||||||
if is_authorized(&project.inner, &Some(user.clone()), &pool).await? {
|
|
||||||
*project_id = Some(project.inner.id.into());
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You are not authorized to upload images for this project"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The project could not be found.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ImageContext::Version { version_id } => {
|
|
||||||
if let Some(id) = data.version_id {
|
|
||||||
let version = version_item::Version::get(id.into(), &**pool, &redis).await?;
|
|
||||||
if let Some(version) = version {
|
|
||||||
if is_authorized_version(&version.inner, &Some(user.clone()), &pool).await?
|
|
||||||
{
|
|
||||||
*version_id = Some(version.inner.id.into());
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You are not authorized to upload images for this version"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The version could not be found.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ImageContext::ThreadMessage { thread_message_id } => {
|
|
||||||
if let Some(id) = data.thread_message_id {
|
|
||||||
let thread_message = thread_item::ThreadMessage::get(id.into(), &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(
|
|
||||||
"The thread message could not found.".to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
let thread = thread_item::Thread::get(thread_message.thread_id, &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(
|
|
||||||
"The thread associated with the thread message could not be found"
|
|
||||||
.to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
if is_authorized_thread(&thread, &user, &pool).await? {
|
|
||||||
*thread_message_id = Some(thread_message.id.into());
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You are not authorized to upload images for this thread message"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ImageContext::Report { report_id } => {
|
|
||||||
if let Some(id) = data.report_id {
|
|
||||||
let report = report_item::Report::get(id.into(), &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The report could not be found.".to_string())
|
|
||||||
})?;
|
|
||||||
let thread = thread_item::Thread::get(report.thread_id, &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(
|
|
||||||
"The thread associated with the report could not be found."
|
|
||||||
.to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
if is_authorized_thread(&thread, &user, &pool).await? {
|
|
||||||
*report_id = Some(report.id.into());
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You are not authorized to upload images for this report".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ImageContext::Unknown => {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Context must be one of: project, version, thread_message, report".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upload the image to the file host
|
|
||||||
let bytes =
|
|
||||||
read_from_payload(&mut payload, 1_048_576, "Icons must be smaller than 1MiB").await?;
|
|
||||||
|
|
||||||
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
|
||||||
let upload_data = file_host
|
|
||||||
.upload_file(
|
|
||||||
content_type,
|
|
||||||
&format!("data/cached_images/{}.{}", hash, data.ext),
|
|
||||||
bytes.freeze(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let db_image: database::models::Image = database::models::Image {
|
|
||||||
id: database::models::generate_image_id(&mut transaction).await?,
|
|
||||||
url: format!("{}/{}", cdn_url, upload_data.file_name),
|
|
||||||
size: upload_data.content_length as u64,
|
|
||||||
created: chrono::Utc::now(),
|
|
||||||
owner_id: database::models::UserId::from(user.id),
|
|
||||||
context: context.context_as_str().to_string(),
|
|
||||||
project_id: if let ImageContext::Project {
|
|
||||||
project_id: Some(id),
|
|
||||||
} = context
|
|
||||||
{
|
|
||||||
Some(database::models::ProjectId::from(id))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
version_id: if let ImageContext::Version {
|
|
||||||
version_id: Some(id),
|
|
||||||
} = context
|
|
||||||
{
|
|
||||||
Some(database::models::VersionId::from(id))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
thread_message_id: if let ImageContext::ThreadMessage {
|
|
||||||
thread_message_id: Some(id),
|
|
||||||
} = context
|
|
||||||
{
|
|
||||||
Some(database::models::ThreadMessageId::from(id))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
report_id: if let ImageContext::Report {
|
|
||||||
report_id: Some(id),
|
|
||||||
} = context
|
|
||||||
{
|
|
||||||
Some(database::models::ReportId::from(id))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Insert
|
|
||||||
db_image.insert(&mut transaction).await?;
|
|
||||||
|
|
||||||
let image = Image {
|
|
||||||
id: db_image.id.into(),
|
|
||||||
url: db_image.url,
|
|
||||||
size: db_image.size,
|
|
||||||
created: db_image.created,
|
|
||||||
owner_id: db_image.owner_id.into(),
|
|
||||||
context,
|
|
||||||
};
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(image))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::InvalidInput(
|
|
||||||
"The specified file is not an image!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,12 +9,12 @@ pub(crate) mod project_creation;
|
|||||||
mod projects;
|
mod projects;
|
||||||
mod reports;
|
mod reports;
|
||||||
mod statistics;
|
mod statistics;
|
||||||
mod tags;
|
pub mod tags;
|
||||||
mod teams;
|
mod teams;
|
||||||
mod threads;
|
mod threads;
|
||||||
mod users;
|
mod users;
|
||||||
mod version_creation;
|
mod version_creation;
|
||||||
mod version_file;
|
pub mod version_file;
|
||||||
mod versions;
|
mod versions;
|
||||||
|
|
||||||
pub use super::ApiError;
|
pub use super::ApiError;
|
||||||
|
|||||||
@ -1,9 +1,7 @@
|
|||||||
use super::ApiError;
|
use super::ApiError;
|
||||||
use crate::database;
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::models::projects::ProjectStatus;
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes};
|
use crate::routes::v3;
|
||||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
@ -30,37 +28,12 @@ pub async fn get_projects(
|
|||||||
count: web::Query<ResultCount>,
|
count: web::Query<ResultCount>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
check_is_moderator_from_headers(
|
v3::moderation::get_projects(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
pool,
|
||||||
&redis,
|
redis,
|
||||||
&session_queue,
|
web::Query(v3::moderation::ResultCount { count: count.count }),
|
||||||
Some(&[Scopes::PROJECT_READ]),
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?;
|
.await
|
||||||
|
|
||||||
use futures::stream::TryStreamExt;
|
|
||||||
|
|
||||||
let project_ids = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id FROM mods
|
|
||||||
WHERE status = $1
|
|
||||||
ORDER BY queued ASC
|
|
||||||
LIMIT $2;
|
|
||||||
",
|
|
||||||
ProjectStatus::Processing.as_str(),
|
|
||||||
count.count as i64
|
|
||||||
)
|
|
||||||
.fetch_many(&**pool)
|
|
||||||
.try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) })
|
|
||||||
.try_collect::<Vec<database::models::ProjectId>>()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(crate::models::projects::Project::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(projects))
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,10 +1,7 @@
|
|||||||
use crate::auth::get_user_from_headers;
|
|
||||||
use crate::database;
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::models::ids::NotificationId;
|
use crate::models::ids::NotificationId;
|
||||||
use crate::models::notifications::Notification;
|
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::routes::v3;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::ApiError;
|
||||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -36,36 +33,14 @@ pub async fn notifications_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::notifications::notifications_get(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||||
&redis,
|
pool,
|
||||||
&session_queue,
|
redis,
|
||||||
Some(&[Scopes::NOTIFICATION_READ]),
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
use database::models::notification_item::Notification as DBNotification;
|
|
||||||
use database::models::NotificationId as DBNotificationId;
|
|
||||||
|
|
||||||
let notification_ids: Vec<DBNotificationId> =
|
|
||||||
serde_json::from_str::<Vec<NotificationId>>(ids.ids.as_str())?
|
|
||||||
.into_iter()
|
|
||||||
.map(DBNotificationId::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let notifications_data: Vec<DBNotification> =
|
|
||||||
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let notifications: Vec<Notification> = notifications_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|n| n.user_id == user.id.into() || user.role.is_admin())
|
|
||||||
.map(Notification::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(notifications))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}")]
|
#[get("{id}")]
|
||||||
@ -76,30 +51,7 @@ pub async fn notification_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::notifications::notification_get(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::NOTIFICATION_READ]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let id = info.into_inner().0;
|
|
||||||
|
|
||||||
let notification_data =
|
|
||||||
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(data) = notification_data {
|
|
||||||
if user.id == data.user_id.into() || user.role.is_admin() {
|
|
||||||
Ok(HttpResponse::Ok().json(Notification::from(data)))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[patch("{id}")]
|
#[patch("{id}")]
|
||||||
@ -110,43 +62,7 @@ pub async fn notification_read(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::notifications::notification_read(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let id = info.into_inner().0;
|
|
||||||
|
|
||||||
let notification_data =
|
|
||||||
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(data) = notification_data {
|
|
||||||
if data.user_id == user.id.into() || user.role.is_admin() {
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
database::models::notification_item::Notification::read(
|
|
||||||
id.into(),
|
|
||||||
&mut transaction,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::CustomAuthentication(
|
|
||||||
"You are not authorized to read this notification!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{id}")]
|
#[delete("{id}")]
|
||||||
@ -157,43 +73,7 @@ pub async fn notification_delete(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::notifications::notification_delete(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let id = info.into_inner().0;
|
|
||||||
|
|
||||||
let notification_data =
|
|
||||||
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(data) = notification_data {
|
|
||||||
if data.user_id == user.id.into() || user.role.is_admin() {
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
database::models::notification_item::Notification::remove(
|
|
||||||
id.into(),
|
|
||||||
&mut transaction,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::CustomAuthentication(
|
|
||||||
"You are not authorized to delete this notification!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[patch("notifications")]
|
#[patch("notifications")]
|
||||||
@ -204,45 +84,14 @@ pub async fn notifications_read(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::notifications::notifications_read(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||||
&redis,
|
pool,
|
||||||
&session_queue,
|
redis,
|
||||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| x.into())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let notifications_data =
|
|
||||||
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
|
|
||||||
|
|
||||||
for notification in notifications_data {
|
|
||||||
if notification.user_id == user.id.into() || user.role.is_admin() {
|
|
||||||
notifications.push(notification.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
database::models::notification_item::Notification::read_many(
|
|
||||||
¬ifications,
|
|
||||||
&mut transaction,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("notifications")]
|
#[delete("notifications")]
|
||||||
@ -253,43 +102,12 @@ pub async fn notifications_delete(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::notifications::notifications_delete(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||||
&redis,
|
pool,
|
||||||
&session_queue,
|
redis,
|
||||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| x.into())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let notifications_data =
|
|
||||||
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
|
|
||||||
|
|
||||||
for notification in notifications_data {
|
|
||||||
if notification.user_id == user.id.into() || user.role.is_admin() {
|
|
||||||
notifications.push(notification.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
database::models::notification_item::Notification::remove_many(
|
|
||||||
¬ifications,
|
|
||||||
&mut transaction,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,25 +1,14 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::auth::{filter_authorized_projects, get_user_from_headers};
|
|
||||||
use crate::database::models::team_item::TeamMember;
|
|
||||||
use crate::database::models::{generate_organization_id, team_item, Organization};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::file_hosting::FileHost;
|
use crate::file_hosting::FileHost;
|
||||||
use crate::models::ids::base62_impl::parse_base62;
|
use crate::models::projects::Project;
|
||||||
use crate::models::organizations::OrganizationId;
|
use crate::models::v2::projects::LegacyProject;
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::v2::project_creation::CreateError;
|
use crate::routes::v3::project_creation::CreateError;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::{v2_reroute, v3, ApiError};
|
||||||
use crate::util::routes::read_from_payload;
|
|
||||||
use crate::util::validate::validation_errors_to_string;
|
|
||||||
use crate::{database, models};
|
|
||||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||||
use rust_decimal::Decimal;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
|
use std::sync::Arc;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
@ -58,82 +47,18 @@ pub async fn organization_create(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, CreateError> {
|
) -> Result<HttpResponse, CreateError> {
|
||||||
let current_user = get_user_from_headers(
|
let new_organization = new_organization.into_inner();
|
||||||
&req,
|
v3::organizations::organization_create(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
web::Json(v3::organizations::NewOrganization {
|
||||||
&session_queue,
|
title: new_organization.title,
|
||||||
Some(&[Scopes::ORGANIZATION_CREATE]),
|
description: new_organization.description,
|
||||||
|
}),
|
||||||
|
pool.clone(),
|
||||||
|
redis.clone(),
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
new_organization
|
|
||||||
.validate()
|
|
||||||
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
// Try title
|
|
||||||
let title_organization_id_option: Option<u64> = parse_base62(&new_organization.title).ok();
|
|
||||||
let mut organization_strings = vec![];
|
|
||||||
if let Some(title_organization_id) = title_organization_id_option {
|
|
||||||
organization_strings.push(title_organization_id.to_string());
|
|
||||||
}
|
|
||||||
organization_strings.push(new_organization.title.clone());
|
|
||||||
let results = Organization::get_many(&organization_strings, &mut *transaction, &redis).await?;
|
|
||||||
if !results.is_empty() {
|
|
||||||
return Err(CreateError::SlugCollision);
|
|
||||||
}
|
|
||||||
|
|
||||||
let organization_id = generate_organization_id(&mut transaction).await?;
|
|
||||||
|
|
||||||
// Create organization managerial team
|
|
||||||
let team = team_item::TeamBuilder {
|
|
||||||
members: vec![team_item::TeamMemberBuilder {
|
|
||||||
user_id: current_user.id.into(),
|
|
||||||
role: models::teams::OWNER_ROLE.to_owned(),
|
|
||||||
permissions: ProjectPermissions::all(),
|
|
||||||
organization_permissions: Some(OrganizationPermissions::all()),
|
|
||||||
accepted: true,
|
|
||||||
payouts_split: Decimal::ONE_HUNDRED,
|
|
||||||
ordering: 0,
|
|
||||||
}],
|
|
||||||
};
|
|
||||||
let team_id = team.insert(&mut transaction).await?;
|
|
||||||
|
|
||||||
// Create organization
|
|
||||||
let organization = Organization {
|
|
||||||
id: organization_id,
|
|
||||||
title: new_organization.title.clone(),
|
|
||||||
description: new_organization.description.clone(),
|
|
||||||
team_id,
|
|
||||||
icon_url: None,
|
|
||||||
color: None,
|
|
||||||
};
|
|
||||||
organization.clone().insert(&mut transaction).await?;
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
// Only member is the owner, the logged in one
|
|
||||||
let member_data = TeamMember::get_from_team_full(team_id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.next();
|
|
||||||
let members_data = if let Some(member_data) = member_data {
|
|
||||||
vec![crate::models::teams::TeamMember::from_model(
|
|
||||||
member_data,
|
|
||||||
current_user.clone(),
|
|
||||||
false,
|
|
||||||
)]
|
|
||||||
} else {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"Failed to get created team.".to_owned(), // should never happen
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let organization = models::organizations::Organization::from(organization, members_data);
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(organization))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}")]
|
#[get("{id}")]
|
||||||
@ -144,57 +69,7 @@ pub async fn organization_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let id = info.into_inner().0;
|
v3::organizations::organization_get(req, info, pool.clone(), redis.clone(), session_queue).await
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::ORGANIZATION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
let user_id = current_user.as_ref().map(|x| x.id.into());
|
|
||||||
|
|
||||||
let organization_data = Organization::get(&id, &**pool, &redis).await?;
|
|
||||||
if let Some(data) = organization_data {
|
|
||||||
let members_data = TeamMember::get_from_team_full(data.team_id, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
let users = crate::database::models::User::get_many_ids(
|
|
||||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let logged_in = current_user
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|user| {
|
|
||||||
members_data
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
|
||||||
})
|
|
||||||
.is_some();
|
|
||||||
let team_members: Vec<_> = members_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| {
|
|
||||||
logged_in
|
|
||||||
|| x.accepted
|
|
||||||
|| user_id
|
|
||||||
.map(|y: crate::database::models::UserId| y == x.user_id)
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
.flat_map(|data| {
|
|
||||||
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
|
||||||
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let organization = models::organizations::Organization::from(data, team_members);
|
|
||||||
return Ok(HttpResponse::Ok().json(organization));
|
|
||||||
}
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -209,72 +84,14 @@ pub async fn organizations_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
v3::organizations::organizations_get(
|
||||||
let organizations_data = Organization::get_many(&ids, &**pool, &redis).await?;
|
req,
|
||||||
let team_ids = organizations_data
|
web::Query(v3::organizations::OrganizationIds { ids: ids.ids }),
|
||||||
.iter()
|
pool,
|
||||||
.map(|x| x.team_id)
|
redis,
|
||||||
.collect::<Vec<_>>();
|
session_queue,
|
||||||
|
|
||||||
let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
|
|
||||||
let users = database::models::User::get_many_ids(
|
|
||||||
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::ORGANIZATION_READ]),
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
let user_id = current_user.as_ref().map(|x| x.id.into());
|
|
||||||
|
|
||||||
let mut organizations = vec![];
|
|
||||||
|
|
||||||
let mut team_groups = HashMap::new();
|
|
||||||
for item in teams_data {
|
|
||||||
team_groups.entry(item.team_id).or_insert(vec![]).push(item);
|
|
||||||
}
|
|
||||||
|
|
||||||
for data in organizations_data {
|
|
||||||
let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]);
|
|
||||||
let logged_in = current_user
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|user| {
|
|
||||||
members_data
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
|
||||||
})
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let team_members: Vec<_> = members_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| {
|
|
||||||
logged_in
|
|
||||||
|| x.accepted
|
|
||||||
|| user_id
|
|
||||||
.map(|y: crate::database::models::UserId| y == x.user_id)
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
.flat_map(|data| {
|
|
||||||
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
|
||||||
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let organization = models::organizations::Organization::from(data, team_members);
|
|
||||||
organizations.push(organization);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(organizations))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Validate)]
|
#[derive(Serialize, Deserialize, Validate)]
|
||||||
@ -298,132 +115,19 @@ pub async fn organizations_edit(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let new_organization = new_organization.into_inner();
|
||||||
&req,
|
v3::organizations::organizations_edit(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
info,
|
||||||
&session_queue,
|
web::Json(v3::organizations::OrganizationEdit {
|
||||||
Some(&[Scopes::ORGANIZATION_WRITE]),
|
description: new_organization.description,
|
||||||
|
title: new_organization.title,
|
||||||
|
}),
|
||||||
|
pool.clone(),
|
||||||
|
redis.clone(),
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
new_organization
|
|
||||||
.validate()
|
|
||||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
|
||||||
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
let result = database::models::Organization::get(&string, &**pool, &redis).await?;
|
|
||||||
if let Some(organization_item) = result {
|
|
||||||
let id = organization_item.id;
|
|
||||||
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id(
|
|
||||||
organization_item.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let permissions =
|
|
||||||
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member);
|
|
||||||
|
|
||||||
if let Some(perms) = permissions {
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
if let Some(description) = &new_organization.description {
|
|
||||||
if !perms.contains(OrganizationPermissions::EDIT_DETAILS) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have the permissions to edit the description of this organization!"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE organizations
|
|
||||||
SET description = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
description,
|
|
||||||
id as database::models::ids::OrganizationId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(title) = &new_organization.title {
|
|
||||||
if !perms.contains(OrganizationPermissions::EDIT_DETAILS) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have the permissions to edit the title of this organization!"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let title_organization_id_option: Option<u64> = parse_base62(title).ok();
|
|
||||||
if let Some(title_organization_id) = title_organization_id_option {
|
|
||||||
let results = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1)
|
|
||||||
",
|
|
||||||
title_organization_id as i64
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if results.exists.unwrap_or(true) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Title collides with other organization's id!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure the new title is different from the old one
|
|
||||||
// We are able to unwrap here because the title is always set
|
|
||||||
if !title.eq(&organization_item.title.clone()) {
|
|
||||||
let results = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT EXISTS(SELECT 1 FROM organizations WHERE title = LOWER($1))
|
|
||||||
",
|
|
||||||
title
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if results.exists.unwrap_or(true) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Title collides with other organization's id!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE organizations
|
|
||||||
SET title = LOWER($1)
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
Some(title),
|
|
||||||
id as database::models::ids::OrganizationId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
database::models::Organization::clear_cache(
|
|
||||||
organization_item.id,
|
|
||||||
Some(organization_item.title),
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to edit this organization!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{id}")]
|
#[delete("{id}")]
|
||||||
@ -434,60 +138,8 @@ pub async fn organization_delete(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::organizations::organization_delete(req, info, pool.clone(), redis.clone(), session_queue)
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::ORGANIZATION_DELETE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
|
|
||||||
let organization = database::models::Organization::get(&string, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !user.role.is_admin() {
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id_organization(
|
|
||||||
organization.id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.map_err(ApiError::Database)?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let permissions =
|
|
||||||
OrganizationPermissions::get_permissions_by_role(&user.role, &Some(team_member))
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(OrganizationPermissions::DELETE_ORGANIZATION) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to delete this organization!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
let result =
|
|
||||||
database::models::Organization::remove(organization.id, &mut transaction, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
database::models::Organization::clear_cache(organization.id, Some(organization.title), &redis)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if result.is_some() {
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}/projects")]
|
#[get("{id}/projects")]
|
||||||
@ -498,40 +150,23 @@ pub async fn organization_projects_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let info = info.into_inner().0;
|
let response = v3::organizations::organization_projects_get(
|
||||||
let current_user = get_user_from_headers(
|
req,
|
||||||
&req,
|
info,
|
||||||
&**pool,
|
pool.clone(),
|
||||||
&redis,
|
redis.clone(),
|
||||||
&session_queue,
|
session_queue,
|
||||||
Some(&[Scopes::ORGANIZATION_READ, Scopes::PROJECT_READ]),
|
|
||||||
)
|
)
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let possible_organization_id: Option<u64> = parse_base62(&info).ok();
|
|
||||||
use futures::TryStreamExt;
|
|
||||||
|
|
||||||
let project_ids = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT m.id FROM organizations o
|
|
||||||
INNER JOIN mods m ON m.organization_id = o.id
|
|
||||||
WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL)
|
|
||||||
",
|
|
||||||
possible_organization_id.map(|x| x as i64),
|
|
||||||
info
|
|
||||||
)
|
|
||||||
.fetch_many(&**pool)
|
|
||||||
.try_filter_map(|e| async { Ok(e.right().map(|m| crate::database::models::ProjectId(m.id))) })
|
|
||||||
.try_collect::<Vec<crate::database::models::ProjectId>>()
|
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let projects_data =
|
// Convert v3 projects to v2
|
||||||
crate::database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?;
|
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||||
|
Ok(project) => {
|
||||||
let projects = filter_authorized_projects(projects_data, ¤t_user, &pool).await?;
|
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||||
Ok(HttpResponse::Ok().json(projects))
|
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||||
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -547,98 +182,18 @@ pub async fn organization_projects_add(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let info = info.into_inner().0;
|
let project_info = project_info.into_inner();
|
||||||
let current_user = get_user_from_headers(
|
v3::organizations::organization_projects_add(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
info,
|
||||||
&redis,
|
web::Json(v3::organizations::OrganizationProjectAdd {
|
||||||
&session_queue,
|
project_id: project_info.project_id,
|
||||||
Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]),
|
}),
|
||||||
|
pool.clone(),
|
||||||
|
redis.clone(),
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let organization = database::models::Organization::get(&info, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let project_item = database::models::Project::get(&project_info.project_id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
if project_item.inner.organization_id.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The specified project is already owned by an organization!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let project_team_member = database::models::TeamMember::get_from_user_id_project(
|
|
||||||
project_item.inner.id,
|
|
||||||
current_user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| ApiError::InvalidInput("You are not a member of this project!".to_string()))?;
|
|
||||||
|
|
||||||
let organization_team_member = database::models::TeamMember::get_from_user_id_organization(
|
|
||||||
organization.id,
|
|
||||||
current_user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("You are not a member of this organization!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Require ownership of a project to add it to an organization
|
|
||||||
if !current_user.role.is_admin()
|
|
||||||
&& !project_team_member
|
|
||||||
.role
|
|
||||||
.eq(crate::models::teams::OWNER_ROLE)
|
|
||||||
{
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You need to be an owner of a project to add it to an organization!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let permissions = OrganizationPermissions::get_permissions_by_role(
|
|
||||||
¤t_user.role,
|
|
||||||
&Some(organization_team_member),
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
if permissions.contains(OrganizationPermissions::ADD_PROJECT) {
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE mods
|
|
||||||
SET organization_id = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
organization.id as database::models::OrganizationId,
|
|
||||||
project_item.inner.id as database::models::ids::ProjectId
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?;
|
|
||||||
database::models::Project::clear_cache(
|
|
||||||
project_item.inner.id,
|
|
||||||
project_item.inner.slug,
|
|
||||||
None,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to add projects to this organization!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Ok(HttpResponse::Ok().finish())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{organization_id}/projects/{project_id}")]
|
#[delete("{organization_id}/projects/{project_id}")]
|
||||||
@ -649,83 +204,14 @@ pub async fn organization_projects_remove(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let (organization_id, project_id) = info.into_inner();
|
v3::organizations::organization_projects_remove(
|
||||||
let current_user = get_user_from_headers(
|
req,
|
||||||
&req,
|
info,
|
||||||
&**pool,
|
pool.clone(),
|
||||||
&redis,
|
redis.clone(),
|
||||||
&session_queue,
|
session_queue,
|
||||||
Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]),
|
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let organization = database::models::Organization::get(&organization_id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let project_item = database::models::Project::get(&project_id, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !project_item
|
|
||||||
.inner
|
|
||||||
.organization_id
|
|
||||||
.eq(&Some(organization.id))
|
|
||||||
{
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The specified project is not owned by this organization!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let organization_team_member = database::models::TeamMember::get_from_user_id_organization(
|
|
||||||
organization.id,
|
|
||||||
current_user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("You are not a member of this organization!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let permissions = OrganizationPermissions::get_permissions_by_role(
|
|
||||||
¤t_user.role,
|
|
||||||
&Some(organization_team_member),
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
if permissions.contains(OrganizationPermissions::REMOVE_PROJECT) {
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE mods
|
|
||||||
SET organization_id = NULL
|
|
||||||
WHERE (id = $1)
|
|
||||||
",
|
|
||||||
project_item.inner.id as database::models::ids::ProjectId
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?;
|
|
||||||
database::models::Project::clear_cache(
|
|
||||||
project_item.inner.id,
|
|
||||||
project_item.inner.slug,
|
|
||||||
None,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to add projects to this organization!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Ok(HttpResponse::Ok().finish())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@ -742,102 +228,20 @@ pub async fn organization_icon_edit(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
mut payload: web::Payload,
|
payload: web::Payload,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) {
|
v3::organizations::organization_icon_edit(
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
web::Query(v3::organizations::Extension { ext: ext.ext }),
|
||||||
let user = get_user_from_headers(
|
req,
|
||||||
&req,
|
info,
|
||||||
&**pool,
|
pool.clone(),
|
||||||
&redis,
|
redis.clone(),
|
||||||
&session_queue,
|
file_host,
|
||||||
Some(&[Scopes::ORGANIZATION_WRITE]),
|
payload,
|
||||||
)
|
session_queue,
|
||||||
.await?
|
)
|
||||||
.1;
|
.await
|
||||||
let string = info.into_inner().0;
|
|
||||||
|
|
||||||
let organization_item = database::models::Organization::get(&string, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !user.role.is_mod() {
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id(
|
|
||||||
organization_item.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(ApiError::Database)?;
|
|
||||||
|
|
||||||
let permissions =
|
|
||||||
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit this organization's icon.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(icon) = organization_item.icon_url {
|
|
||||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
|
||||||
|
|
||||||
if let Some(icon_path) = name {
|
|
||||||
file_host.delete_file_version("", icon_path).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let bytes =
|
|
||||||
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
|
|
||||||
|
|
||||||
let color = crate::util::img::get_color_from_img(&bytes)?;
|
|
||||||
|
|
||||||
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
|
||||||
let organization_id: OrganizationId = organization_item.id.into();
|
|
||||||
let upload_data = file_host
|
|
||||||
.upload_file(
|
|
||||||
content_type,
|
|
||||||
&format!("data/{}/{}.{}", organization_id, hash, ext.ext),
|
|
||||||
bytes.freeze(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE organizations
|
|
||||||
SET icon_url = $1, color = $2
|
|
||||||
WHERE (id = $3)
|
|
||||||
",
|
|
||||||
format!("{}/{}", cdn_url, upload_data.file_name),
|
|
||||||
color.map(|x| x as i32),
|
|
||||||
organization_item.id as database::models::ids::OrganizationId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
database::models::Organization::clear_cache(
|
|
||||||
organization_item.id,
|
|
||||||
Some(organization_item.title),
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::InvalidInput(format!(
|
|
||||||
"Invalid format for project icon: {}",
|
|
||||||
ext.ext
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{id}/icon")]
|
#[delete("{id}/icon")]
|
||||||
@ -849,73 +253,13 @@ pub async fn delete_organization_icon(
|
|||||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::organizations::delete_organization_icon(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
info,
|
||||||
&redis,
|
pool.clone(),
|
||||||
&session_queue,
|
redis.clone(),
|
||||||
Some(&[Scopes::ORGANIZATION_WRITE]),
|
file_host,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
|
|
||||||
let organization_item = database::models::Organization::get(&string, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !user.role.is_mod() {
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id(
|
|
||||||
organization_item.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(ApiError::Database)?;
|
|
||||||
|
|
||||||
let permissions =
|
|
||||||
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit this organization's icon.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
|
||||||
if let Some(icon) = organization_item.icon_url {
|
|
||||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
|
||||||
|
|
||||||
if let Some(icon_path) = name {
|
|
||||||
file_host.delete_file_version("", icon_path).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE organizations
|
|
||||||
SET icon_url = NULL, color = NULL
|
|
||||||
WHERE (id = $1)
|
|
||||||
",
|
|
||||||
organization_item.id as database::models::ids::OrganizationId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
database::models::Organization::clear_cache(
|
|
||||||
organization_item.id,
|
|
||||||
Some(organization_item.title),
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,20 +1,9 @@
|
|||||||
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
|
|
||||||
use crate::database;
|
|
||||||
use crate::database::models::image_item;
|
|
||||||
use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::models::ids::ImageId;
|
use crate::models::ids::ImageId;
|
||||||
use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId};
|
use crate::models::reports::ItemType;
|
||||||
use crate::models::images::{Image, ImageContext};
|
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::models::reports::{ItemType, Report};
|
|
||||||
use crate::models::threads::{MessageBody, ThreadType};
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::{v3, ApiError};
|
||||||
use crate::util::img;
|
|
||||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||||
use chrono::Utc;
|
|
||||||
use futures::StreamExt;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
@ -44,177 +33,11 @@ pub struct CreateReport {
|
|||||||
pub async fn report_create(
|
pub async fn report_create(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
mut body: web::Payload,
|
body: web::Payload,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let mut transaction = pool.begin().await?;
|
v3::reports::report_create(req, pool, body, redis, session_queue).await
|
||||||
|
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::REPORT_CREATE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let mut bytes = web::BytesMut::new();
|
|
||||||
while let Some(item) = body.next().await {
|
|
||||||
bytes.extend_from_slice(&item.map_err(|_| {
|
|
||||||
ApiError::InvalidInput("Error while parsing request payload!".to_string())
|
|
||||||
})?);
|
|
||||||
}
|
|
||||||
let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?;
|
|
||||||
|
|
||||||
let id = crate::database::models::generate_report_id(&mut transaction).await?;
|
|
||||||
let report_type = crate::database::models::categories::ReportType::get_id(
|
|
||||||
&new_report.report_type,
|
|
||||||
&mut *transaction,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(format!("Invalid report type: {}", new_report.report_type))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut report = crate::database::models::report_item::Report {
|
|
||||||
id,
|
|
||||||
report_type_id: report_type,
|
|
||||||
project_id: None,
|
|
||||||
version_id: None,
|
|
||||||
user_id: None,
|
|
||||||
body: new_report.body.clone(),
|
|
||||||
reporter: current_user.id.into(),
|
|
||||||
created: Utc::now(),
|
|
||||||
closed: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
match new_report.item_type {
|
|
||||||
ItemType::Project => {
|
|
||||||
let project_id = ProjectId(parse_base62(new_report.item_id.as_str())?);
|
|
||||||
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"SELECT EXISTS(SELECT 1 FROM mods WHERE id = $1)",
|
|
||||||
project_id.0 as i64
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !result.exists.unwrap_or(false) {
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Project could not be found: {}",
|
|
||||||
new_report.item_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
report.project_id = Some(project_id.into())
|
|
||||||
}
|
|
||||||
ItemType::Version => {
|
|
||||||
let version_id = VersionId(parse_base62(new_report.item_id.as_str())?);
|
|
||||||
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)",
|
|
||||||
version_id.0 as i64
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !result.exists.unwrap_or(false) {
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Version could not be found: {}",
|
|
||||||
new_report.item_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
report.version_id = Some(version_id.into())
|
|
||||||
}
|
|
||||||
ItemType::User => {
|
|
||||||
let user_id = UserId(parse_base62(new_report.item_id.as_str())?);
|
|
||||||
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"SELECT EXISTS(SELECT 1 FROM users WHERE id = $1)",
|
|
||||||
user_id.0 as i64
|
|
||||||
)
|
|
||||||
.fetch_one(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !result.exists.unwrap_or(false) {
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"User could not be found: {}",
|
|
||||||
new_report.item_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
report.user_id = Some(user_id.into())
|
|
||||||
}
|
|
||||||
ItemType::Unknown => {
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Invalid report item type: {}",
|
|
||||||
new_report.item_type.as_str()
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
report.insert(&mut transaction).await?;
|
|
||||||
|
|
||||||
for image_id in new_report.uploaded_images {
|
|
||||||
if let Some(db_image) =
|
|
||||||
image_item::Image::get(image_id.into(), &mut *transaction, &redis).await?
|
|
||||||
{
|
|
||||||
let image: Image = db_image.into();
|
|
||||||
if !matches!(image.context, ImageContext::Report { .. })
|
|
||||||
|| image.context.inner_id().is_some()
|
|
||||||
{
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Image {} is not unused and in the 'report' context",
|
|
||||||
image_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE uploaded_images
|
|
||||||
SET report_id = $1
|
|
||||||
WHERE id = $2
|
|
||||||
",
|
|
||||||
id.0 as i64,
|
|
||||||
image_id.0 as i64
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
image_item::Image::clear_cache(image.id.into(), &redis).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Image {} could not be found",
|
|
||||||
image_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let thread_id = ThreadBuilder {
|
|
||||||
type_: ThreadType::Report,
|
|
||||||
members: vec![],
|
|
||||||
project_id: None,
|
|
||||||
report_id: Some(report.id),
|
|
||||||
}
|
|
||||||
.insert(&mut transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(Report {
|
|
||||||
id: id.into(),
|
|
||||||
report_type: new_report.report_type.clone(),
|
|
||||||
item_id: new_report.item_id.clone(),
|
|
||||||
item_type: new_report.item_type.clone(),
|
|
||||||
reporter: current_user.id,
|
|
||||||
body: new_report.body.clone(),
|
|
||||||
created: Utc::now(),
|
|
||||||
closed: false,
|
|
||||||
thread_id: thread_id.into(),
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -240,65 +63,17 @@ pub async fn reports(
|
|||||||
count: web::Query<ReportsRequestOptions>,
|
count: web::Query<ReportsRequestOptions>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::reports::reports(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
pool,
|
||||||
&redis,
|
redis,
|
||||||
&session_queue,
|
web::Query(v3::reports::ReportsRequestOptions {
|
||||||
Some(&[Scopes::REPORT_READ]),
|
count: count.count,
|
||||||
|
all: count.all,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
use futures::stream::TryStreamExt;
|
|
||||||
|
|
||||||
let report_ids = if user.role.is_mod() && count.all {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id FROM reports
|
|
||||||
WHERE closed = FALSE
|
|
||||||
ORDER BY created ASC
|
|
||||||
LIMIT $1;
|
|
||||||
",
|
|
||||||
count.count as i64
|
|
||||||
)
|
|
||||||
.fetch_many(&**pool)
|
|
||||||
.try_filter_map(|e| async {
|
|
||||||
Ok(e.right()
|
|
||||||
.map(|m| crate::database::models::ids::ReportId(m.id)))
|
|
||||||
})
|
|
||||||
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id FROM reports
|
|
||||||
WHERE closed = FALSE AND reporter = $1
|
|
||||||
ORDER BY created ASC
|
|
||||||
LIMIT $2;
|
|
||||||
",
|
|
||||||
user.id.0 as i64,
|
|
||||||
count.count as i64
|
|
||||||
)
|
|
||||||
.fetch_many(&**pool)
|
|
||||||
.try_filter_map(|e| async {
|
|
||||||
Ok(e.right()
|
|
||||||
.map(|m| crate::database::models::ids::ReportId(m.id)))
|
|
||||||
})
|
|
||||||
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
|
||||||
.await?
|
|
||||||
};
|
|
||||||
|
|
||||||
let query_reports =
|
|
||||||
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
|
||||||
|
|
||||||
let mut reports: Vec<Report> = Vec::new();
|
|
||||||
|
|
||||||
for x in query_reports {
|
|
||||||
reports.push(x.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(reports))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -314,32 +89,14 @@ pub async fn reports_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let report_ids: Vec<crate::database::models::ids::ReportId> =
|
v3::reports::reports_get(
|
||||||
serde_json::from_str::<Vec<crate::models::ids::ReportId>>(&ids.ids)?
|
req,
|
||||||
.into_iter()
|
web::Query(v3::reports::ReportIds { ids: ids.ids }),
|
||||||
.map(|x| x.into())
|
pool,
|
||||||
.collect();
|
redis,
|
||||||
|
session_queue,
|
||||||
let reports_data =
|
|
||||||
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
|
||||||
|
|
||||||
let user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::REPORT_READ]),
|
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let all_reports = reports_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| user.role.is_mod() || x.reporter == user.id.into())
|
|
||||||
.map(|x| x.into())
|
|
||||||
.collect::<Vec<Report>>();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(all_reports))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("report/{id}")]
|
#[get("report/{id}")]
|
||||||
@ -350,29 +107,7 @@ pub async fn report_get(
|
|||||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::reports::report_get(req, pool, redis, info, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::REPORT_READ]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
let id = info.into_inner().0.into();
|
|
||||||
|
|
||||||
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(report) = report {
|
|
||||||
if !user.role.is_mod() && report.reporter != user.id.into() {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let report: Report = report.into();
|
|
||||||
Ok(HttpResponse::Ok().json(report))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Validate)]
|
#[derive(Deserialize, Validate)]
|
||||||
@ -391,101 +126,19 @@ pub async fn report_edit(
|
|||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
edit_report: web::Json<EditReport>,
|
edit_report: web::Json<EditReport>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let edit_report = edit_report.into_inner();
|
||||||
&req,
|
v3::reports::report_edit(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
pool,
|
||||||
&session_queue,
|
redis,
|
||||||
Some(&[Scopes::REPORT_WRITE]),
|
info,
|
||||||
|
session_queue,
|
||||||
|
web::Json(v3::reports::EditReport {
|
||||||
|
body: edit_report.body,
|
||||||
|
closed: edit_report.closed,
|
||||||
|
}),
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
let id = info.into_inner().0.into();
|
|
||||||
|
|
||||||
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(report) = report {
|
|
||||||
if !user.role.is_mod() && report.reporter != user.id.into() {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
if let Some(edit_body) = &edit_report.body {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE reports
|
|
||||||
SET body = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
edit_body,
|
|
||||||
id as crate::database::models::ids::ReportId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(edit_closed) = edit_report.closed {
|
|
||||||
if !user.role.is_mod() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You cannot reopen a report!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
ThreadMessageBuilder {
|
|
||||||
author_id: Some(user.id.into()),
|
|
||||||
body: if !edit_closed && report.closed {
|
|
||||||
MessageBody::ThreadReopen
|
|
||||||
} else {
|
|
||||||
MessageBody::ThreadClosure
|
|
||||||
},
|
|
||||||
thread_id: report.thread_id,
|
|
||||||
}
|
|
||||||
.insert(&mut transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE reports
|
|
||||||
SET closed = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
edit_closed,
|
|
||||||
id as crate::database::models::ids::ReportId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE threads
|
|
||||||
SET show_in_mod_inbox = $1
|
|
||||||
WHERE id = $2
|
|
||||||
",
|
|
||||||
!(edit_closed || report.closed),
|
|
||||||
report.thread_id.0,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// delete any images no longer in the body
|
|
||||||
let checkable_strings: Vec<&str> = vec![&edit_report.body]
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|x: &Option<String>| x.as_ref().map(|y| y.as_str()))
|
|
||||||
.collect();
|
|
||||||
let image_context = ImageContext::Report {
|
|
||||||
report_id: Some(id.into()),
|
|
||||||
};
|
|
||||||
img::delete_unused_images(image_context, checkable_strings, &mut transaction, &redis)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("report/{id}")]
|
#[delete("report/{id}")]
|
||||||
@ -496,35 +149,5 @@ pub async fn report_delete(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
check_is_moderator_from_headers(
|
v3::reports::report_delete(req, pool, info, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::REPORT_DELETE]),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let id = info.into_inner().0;
|
|
||||||
let context = ImageContext::Report {
|
|
||||||
report_id: Some(id),
|
|
||||||
};
|
|
||||||
let uploaded_images =
|
|
||||||
database::models::Image::get_many_contexted(context, &mut transaction).await?;
|
|
||||||
for image in uploaded_images {
|
|
||||||
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let result =
|
|
||||||
crate::database::models::report_item::Report::remove_full(id.into(), &mut transaction)
|
|
||||||
.await?;
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
if result.is_some() {
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
use crate::routes::ApiError;
|
use crate::routes::{v3, ApiError};
|
||||||
use actix_web::{get, web, HttpResponse};
|
use actix_web::{get, web, HttpResponse};
|
||||||
use serde_json::json;
|
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
|
|
||||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
@ -9,78 +8,5 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
|||||||
|
|
||||||
#[get("statistics")]
|
#[get("statistics")]
|
||||||
pub async fn get_stats(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
|
pub async fn get_stats(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
|
||||||
let projects = sqlx::query!(
|
v3::statistics::get_stats(pool).await
|
||||||
"
|
|
||||||
SELECT COUNT(id)
|
|
||||||
FROM mods
|
|
||||||
WHERE status = ANY($1)
|
|
||||||
",
|
|
||||||
&*crate::models::projects::ProjectStatus::iterator()
|
|
||||||
.filter(|x| x.is_searchable())
|
|
||||||
.map(|x| x.to_string())
|
|
||||||
.collect::<Vec<String>>(),
|
|
||||||
)
|
|
||||||
.fetch_one(&**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let versions = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT COUNT(v.id)
|
|
||||||
FROM versions v
|
|
||||||
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
|
|
||||||
WHERE v.status = ANY($2)
|
|
||||||
",
|
|
||||||
&*crate::models::projects::ProjectStatus::iterator()
|
|
||||||
.filter(|x| x.is_searchable())
|
|
||||||
.map(|x| x.to_string())
|
|
||||||
.collect::<Vec<String>>(),
|
|
||||||
&*crate::models::projects::VersionStatus::iterator()
|
|
||||||
.filter(|x| x.is_listed())
|
|
||||||
.map(|x| x.to_string())
|
|
||||||
.collect::<Vec<String>>(),
|
|
||||||
)
|
|
||||||
.fetch_one(&**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let authors = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT COUNT(DISTINCT u.id)
|
|
||||||
FROM users u
|
|
||||||
INNER JOIN team_members tm on u.id = tm.user_id AND tm.accepted = TRUE
|
|
||||||
INNER JOIN mods m on tm.team_id = m.team_id AND m.status = ANY($1)
|
|
||||||
",
|
|
||||||
&*crate::models::projects::ProjectStatus::iterator()
|
|
||||||
.filter(|x| x.is_searchable())
|
|
||||||
.map(|x| x.to_string())
|
|
||||||
.collect::<Vec<String>>(),
|
|
||||||
)
|
|
||||||
.fetch_one(&**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let files = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT COUNT(f.id) FROM files f
|
|
||||||
INNER JOIN versions v on f.version_id = v.id AND v.status = ANY($2)
|
|
||||||
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
|
|
||||||
",
|
|
||||||
&*crate::models::projects::ProjectStatus::iterator()
|
|
||||||
.filter(|x| x.is_searchable())
|
|
||||||
.map(|x| x.to_string())
|
|
||||||
.collect::<Vec<String>>(),
|
|
||||||
&*crate::models::projects::VersionStatus::iterator()
|
|
||||||
.filter(|x| x.is_listed())
|
|
||||||
.map(|x| x.to_string())
|
|
||||||
.collect::<Vec<String>>(),
|
|
||||||
)
|
|
||||||
.fetch_one(&**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let json = json!({
|
|
||||||
"projects": projects.count,
|
|
||||||
"versions": versions.count,
|
|
||||||
"authors": authors.count,
|
|
||||||
"files": files.count,
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(json))
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,10 +1,12 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use super::ApiError;
|
use super::ApiError;
|
||||||
use crate::database::models;
|
use crate::database::models::loader_fields::LoaderFieldEnumValue;
|
||||||
use crate::database::models::categories::{DonationPlatform, ProjectType, ReportType, SideType};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::routes::v3::tags::{LoaderData as LoaderDataV3, LoaderFieldsEnumQuery};
|
||||||
|
use crate::routes::{v2_reroute, v3};
|
||||||
use actix_web::{get, web, HttpResponse};
|
use actix_web::{get, web, HttpResponse};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use models::categories::{Category, GameVersion, Loader};
|
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
|
|
||||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
@ -24,10 +26,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
|||||||
|
|
||||||
#[derive(serde::Serialize, serde::Deserialize)]
|
#[derive(serde::Serialize, serde::Deserialize)]
|
||||||
pub struct CategoryData {
|
pub struct CategoryData {
|
||||||
icon: String,
|
pub icon: String,
|
||||||
name: String,
|
pub name: String,
|
||||||
project_type: String,
|
pub project_type: String,
|
||||||
header: String,
|
pub header: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("category")]
|
#[get("category")]
|
||||||
@ -35,25 +37,14 @@ pub async fn category_list(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let results = Category::list(&**pool, &redis)
|
v3::tags::category_list(pool, redis).await
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| CategoryData {
|
|
||||||
icon: x.icon,
|
|
||||||
name: x.category,
|
|
||||||
project_type: x.project_type,
|
|
||||||
header: x.header,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(results))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(serde::Serialize, serde::Deserialize)]
|
#[derive(serde::Serialize, serde::Deserialize)]
|
||||||
pub struct LoaderData {
|
pub struct LoaderData {
|
||||||
icon: String,
|
pub icon: String,
|
||||||
name: String,
|
pub name: String,
|
||||||
supported_project_types: Vec<String>,
|
pub supported_project_types: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("loader")]
|
#[get("loader")]
|
||||||
@ -61,22 +52,26 @@ pub async fn loader_list(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let mut results = Loader::list(&**pool, &redis)
|
let response = v3::tags::loader_list(pool, redis).await?;
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| LoaderData {
|
|
||||||
icon: x.icon,
|
|
||||||
name: x.loader,
|
|
||||||
supported_project_types: x.supported_project_types,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
results.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
|
// Convert to V2 format
|
||||||
|
match v2_reroute::extract_ok_json::<Vec<LoaderDataV3>>(response).await {
|
||||||
Ok(HttpResponse::Ok().json(results))
|
Ok(loaders) => {
|
||||||
|
let loaders = loaders
|
||||||
|
.into_iter()
|
||||||
|
.map(|l| LoaderData {
|
||||||
|
icon: l.icon,
|
||||||
|
name: l.name,
|
||||||
|
supported_project_types: l.supported_project_types,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
Ok(HttpResponse::Ok().json(loaders))
|
||||||
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(serde::Serialize)]
|
#[derive(serde::Serialize, serde::Deserialize)]
|
||||||
pub struct GameVersionQueryData {
|
pub struct GameVersionQueryData {
|
||||||
pub version: String,
|
pub version: String,
|
||||||
pub version_type: String,
|
pub version_type: String,
|
||||||
@ -97,21 +92,50 @@ pub async fn game_version_list(
|
|||||||
query: web::Query<GameVersionQuery>,
|
query: web::Query<GameVersionQuery>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let results: Vec<GameVersionQueryData> = if query.type_.is_some() || query.major.is_some() {
|
let mut filters = HashMap::new();
|
||||||
GameVersion::list_filter(query.type_.as_deref(), query.major, &**pool, &redis).await?
|
if let Some(type_) = &query.type_ {
|
||||||
} else {
|
filters.insert("type".to_string(), serde_json::json!(type_));
|
||||||
GameVersion::list(&**pool, &redis).await?
|
|
||||||
}
|
}
|
||||||
.into_iter()
|
if let Some(major) = query.major {
|
||||||
.map(|x| GameVersionQueryData {
|
filters.insert("major".to_string(), serde_json::json!(major));
|
||||||
version: x.version,
|
}
|
||||||
version_type: x.type_,
|
let response = v3::tags::loader_fields_list(
|
||||||
date: x.created,
|
pool,
|
||||||
major: x.major,
|
web::Query(LoaderFieldsEnumQuery {
|
||||||
})
|
loader_field: "game_versions".to_string(),
|
||||||
.collect();
|
filters: Some(filters),
|
||||||
|
}),
|
||||||
|
redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(results))
|
// Convert to V2 format
|
||||||
|
Ok(
|
||||||
|
match v2_reroute::extract_ok_json::<Vec<LoaderFieldEnumValue>>(response).await {
|
||||||
|
Ok(fields) => {
|
||||||
|
let fields = fields
|
||||||
|
.into_iter()
|
||||||
|
.map(|f| GameVersionQueryData {
|
||||||
|
version: f.value,
|
||||||
|
version_type: f
|
||||||
|
.metadata
|
||||||
|
.get("type")
|
||||||
|
.and_then(|m| m.as_str())
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string(),
|
||||||
|
date: f.created,
|
||||||
|
major: f
|
||||||
|
.metadata
|
||||||
|
.get("major")
|
||||||
|
.and_then(|m| m.as_bool())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
HttpResponse::Ok().json(fields)
|
||||||
|
}
|
||||||
|
Err(response) => response,
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(serde::Serialize)]
|
#[derive(serde::Serialize)]
|
||||||
@ -122,17 +146,7 @@ pub struct License {
|
|||||||
|
|
||||||
#[get("license")]
|
#[get("license")]
|
||||||
pub async fn license_list() -> HttpResponse {
|
pub async fn license_list() -> HttpResponse {
|
||||||
let licenses = spdx::identifiers::LICENSES;
|
v3::tags::license_list().await
|
||||||
let mut results: Vec<License> = Vec::with_capacity(licenses.len());
|
|
||||||
|
|
||||||
for (short, name, _) in licenses {
|
|
||||||
results.push(License {
|
|
||||||
short: short.to_string(),
|
|
||||||
name: name.to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
HttpResponse::Ok().json(results)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(serde::Serialize)]
|
#[derive(serde::Serialize)]
|
||||||
@ -143,25 +157,7 @@ pub struct LicenseText {
|
|||||||
|
|
||||||
#[get("license/{id}")]
|
#[get("license/{id}")]
|
||||||
pub async fn license_text(params: web::Path<(String,)>) -> Result<HttpResponse, ApiError> {
|
pub async fn license_text(params: web::Path<(String,)>) -> Result<HttpResponse, ApiError> {
|
||||||
let license_id = params.into_inner().0;
|
v3::tags::license_text(params).await
|
||||||
|
|
||||||
if license_id == *crate::models::projects::DEFAULT_LICENSE_ID {
|
|
||||||
return Ok(HttpResponse::Ok().json(LicenseText {
|
|
||||||
title: "All Rights Reserved".to_string(),
|
|
||||||
body: "All rights reserved unless explicitly stated.".to_string(),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(license) = spdx::license_id(&license_id) {
|
|
||||||
return Ok(HttpResponse::Ok().json(LicenseText {
|
|
||||||
title: license.full_name.to_string(),
|
|
||||||
body: license.text().to_string(),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(ApiError::InvalidInput(
|
|
||||||
"Invalid SPDX identifier specified".to_string(),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(serde::Serialize)]
|
#[derive(serde::Serialize)]
|
||||||
@ -175,15 +171,7 @@ pub async fn donation_platform_list(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let results: Vec<DonationPlatformQueryData> = DonationPlatform::list(&**pool, &redis)
|
v3::tags::donation_platform_list(pool, redis).await
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| DonationPlatformQueryData {
|
|
||||||
short: x.short,
|
|
||||||
name: x.name,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Ok(HttpResponse::Ok().json(results))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("report_type")]
|
#[get("report_type")]
|
||||||
@ -191,8 +179,7 @@ pub async fn report_type_list(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let results = ReportType::list(&**pool, &redis).await?;
|
v3::tags::report_type_list(pool, redis).await
|
||||||
Ok(HttpResponse::Ok().json(results))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("project_type")]
|
#[get("project_type")]
|
||||||
@ -200,8 +187,7 @@ pub async fn project_type_list(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let results = ProjectType::list(&**pool, &redis).await?;
|
v3::tags::project_type_list(pool, redis).await
|
||||||
Ok(HttpResponse::Ok().json(results))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("side_type")]
|
#[get("side_type")]
|
||||||
@ -209,6 +195,24 @@ pub async fn side_type_list(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let results = SideType::list(&**pool, &redis).await?;
|
let response = v3::tags::loader_fields_list(
|
||||||
Ok(HttpResponse::Ok().json(results))
|
pool,
|
||||||
|
web::Query(LoaderFieldsEnumQuery {
|
||||||
|
loader_field: "client_side".to_string(), // same as server_side
|
||||||
|
filters: None,
|
||||||
|
}),
|
||||||
|
redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Convert to V2 format
|
||||||
|
Ok(
|
||||||
|
match v2_reroute::extract_ok_json::<Vec<LoaderFieldEnumValue>>(response).await {
|
||||||
|
Ok(fields) => {
|
||||||
|
let fields = fields.into_iter().map(|f| f.value).collect::<Vec<_>>();
|
||||||
|
HttpResponse::Ok().json(fields)
|
||||||
|
}
|
||||||
|
Err(response) => response,
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,15 +1,8 @@
|
|||||||
use crate::auth::{get_user_from_headers, is_authorized};
|
|
||||||
use crate::database::models::notification_item::NotificationBuilder;
|
|
||||||
use crate::database::models::team_item::TeamAssociationId;
|
|
||||||
use crate::database::models::{Organization, Team, TeamMember, User};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::database::Project;
|
|
||||||
use crate::models::notifications::NotificationBody;
|
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId};
|
use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId};
|
||||||
use crate::models::users::UserId;
|
use crate::models::users::UserId;
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::{v3, ApiError};
|
||||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||||
use rust_decimal::Decimal;
|
use rust_decimal::Decimal;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -41,75 +34,7 @@ pub async fn team_members_get_project(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let string = info.into_inner().0;
|
v3::teams::team_members_get_project(req, info, pool, redis, session_queue).await
|
||||||
let project_data = crate::database::models::Project::get(&string, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(project) = project_data {
|
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
if !is_authorized(&project.inner, ¤t_user, &pool).await? {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
let mut members_data =
|
|
||||||
TeamMember::get_from_team_full(project.inner.team_id, &**pool, &redis).await?;
|
|
||||||
let mut member_user_ids = members_data.iter().map(|x| x.user_id).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
// Adds the organization's team members to the list of members, if the project is associated with an organization
|
|
||||||
if let Some(oid) = project.inner.organization_id {
|
|
||||||
let organization_data = Organization::get_id(oid, &**pool, &redis).await?;
|
|
||||||
if let Some(organization_data) = organization_data {
|
|
||||||
let org_team =
|
|
||||||
TeamMember::get_from_team_full(organization_data.team_id, &**pool, &redis)
|
|
||||||
.await?;
|
|
||||||
for member in org_team {
|
|
||||||
if !member_user_ids.contains(&member.user_id) {
|
|
||||||
member_user_ids.push(member.user_id);
|
|
||||||
members_data.push(member);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let users =
|
|
||||||
crate::database::models::User::get_many_ids(&member_user_ids, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
let user_id = current_user.as_ref().map(|x| x.id.into());
|
|
||||||
|
|
||||||
let logged_in = current_user
|
|
||||||
.and_then(|user| {
|
|
||||||
members_data
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
|
||||||
})
|
|
||||||
.is_some();
|
|
||||||
let team_members: Vec<_> = members_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| {
|
|
||||||
logged_in
|
|
||||||
|| x.accepted
|
|
||||||
|| user_id
|
|
||||||
.map(|y: crate::database::models::UserId| y == x.user_id)
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
.flat_map(|data| {
|
|
||||||
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
|
||||||
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Ok(HttpResponse::Ok().json(team_members))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}/members")]
|
#[get("{id}/members")]
|
||||||
@ -120,61 +45,7 @@ pub async fn team_members_get_organization(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let string = info.into_inner().0;
|
v3::teams::team_members_get_organization(req, info, pool, redis, session_queue).await
|
||||||
let organization_data =
|
|
||||||
crate::database::models::Organization::get(&string, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(organization) = organization_data {
|
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::ORGANIZATION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let members_data =
|
|
||||||
TeamMember::get_from_team_full(organization.team_id, &**pool, &redis).await?;
|
|
||||||
let users = crate::database::models::User::get_many_ids(
|
|
||||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let user_id = current_user.as_ref().map(|x| x.id.into());
|
|
||||||
|
|
||||||
let logged_in = current_user
|
|
||||||
.and_then(|user| {
|
|
||||||
members_data
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
|
||||||
})
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let team_members: Vec<_> = members_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| {
|
|
||||||
logged_in
|
|
||||||
|| x.accepted
|
|
||||||
|| user_id
|
|
||||||
.map(|y: crate::database::models::UserId| y == x.user_id)
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
.flat_map(|data| {
|
|
||||||
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
|
||||||
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(team_members))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns all members of a team, but not necessarily those of a project-team's organization (unlike team_members_get_project)
|
// Returns all members of a team, but not necessarily those of a project-team's organization (unlike team_members_get_project)
|
||||||
@ -186,53 +57,7 @@ pub async fn team_members_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let id = info.into_inner().0;
|
v3::teams::team_members_get(req, info, pool, redis, session_queue).await
|
||||||
let members_data = TeamMember::get_from_team_full(id.into(), &**pool, &redis).await?;
|
|
||||||
let users = crate::database::models::User::get_many_ids(
|
|
||||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
let user_id = current_user.as_ref().map(|x| x.id.into());
|
|
||||||
|
|
||||||
let logged_in = current_user
|
|
||||||
.and_then(|user| {
|
|
||||||
members_data
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
|
||||||
})
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let team_members: Vec<_> = members_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| {
|
|
||||||
logged_in
|
|
||||||
|| x.accepted
|
|
||||||
|| user_id
|
|
||||||
.map(|y: crate::database::models::UserId| y == x.user_id)
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
|
||||||
.flat_map(|data| {
|
|
||||||
users
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.id == data.user_id)
|
|
||||||
.map(|user| crate::models::teams::TeamMember::from(data, user.clone(), !logged_in))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(team_members))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@ -248,61 +73,14 @@ pub async fn teams_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
use itertools::Itertools;
|
v3::teams::teams_get(
|
||||||
|
req,
|
||||||
let team_ids = serde_json::from_str::<Vec<TeamId>>(&ids.ids)?
|
web::Query(v3::teams::TeamIds { ids: ids.ids }),
|
||||||
.into_iter()
|
pool,
|
||||||
.map(|x| x.into())
|
redis,
|
||||||
.collect::<Vec<crate::database::models::ids::TeamId>>();
|
session_queue,
|
||||||
|
|
||||||
let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
|
|
||||||
let users = crate::database::models::User::get_many_ids(
|
|
||||||
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_READ]),
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let teams_groups = teams_data.into_iter().group_by(|data| data.team_id.0);
|
|
||||||
|
|
||||||
let mut teams: Vec<Vec<crate::models::teams::TeamMember>> = vec![];
|
|
||||||
|
|
||||||
for (_, member_data) in &teams_groups {
|
|
||||||
let members = member_data.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let logged_in = current_user
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|user| {
|
|
||||||
members
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
|
||||||
})
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let team_members = members
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| logged_in || x.accepted)
|
|
||||||
.flat_map(|data| {
|
|
||||||
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
|
||||||
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
teams.push(team_members.collect());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(teams))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("{id}/join")]
|
#[post("{id}/join")]
|
||||||
@ -313,53 +91,7 @@ pub async fn join_team(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let team_id = info.into_inner().0.into();
|
v3::teams::join_team(req, info, pool, redis, session_queue).await
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let member =
|
|
||||||
TeamMember::get_from_user_id_pending(team_id, current_user.id.into(), &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(member) = member {
|
|
||||||
if member.accepted {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You are already a member of this team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
// Edit Team Member to set Accepted to True
|
|
||||||
TeamMember::edit_team_member(
|
|
||||||
team_id,
|
|
||||||
current_user.id.into(),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
Some(true),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
&mut transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
User::clear_project_cache(&[current_user.id.into()], &redis).await?;
|
|
||||||
TeamMember::clear_cache(team_id, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"There is no pending request from this team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_role() -> String {
|
fn default_role() -> String {
|
||||||
@ -394,165 +126,22 @@ pub async fn add_team_member(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let team_id = info.into_inner().0.into();
|
v3::teams::add_team_member(
|
||||||
|
req,
|
||||||
let mut transaction = pool.begin().await?;
|
info,
|
||||||
|
pool,
|
||||||
let current_user = get_user_from_headers(
|
web::Json(v3::teams::NewTeamMember {
|
||||||
&req,
|
user_id: new_member.user_id,
|
||||||
&**pool,
|
role: new_member.role.clone(),
|
||||||
&redis,
|
permissions: new_member.permissions,
|
||||||
&session_queue,
|
organization_permissions: new_member.organization_permissions,
|
||||||
Some(&[Scopes::PROJECT_WRITE]),
|
payouts_split: new_member.payouts_split,
|
||||||
|
ordering: new_member.ordering,
|
||||||
|
}),
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
let team_association = Team::get_association(team_id, &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?;
|
|
||||||
let member = TeamMember::get_from_user_id(team_id, current_user.id.into(), &**pool).await?;
|
|
||||||
match team_association {
|
|
||||||
// If team is associated with a project, check if they have permissions to invite users to that project
|
|
||||||
TeamAssociationId::Project(pid) => {
|
|
||||||
let organization =
|
|
||||||
Organization::get_associated_organization_project_id(pid, &**pool).await?;
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
TeamMember::get_from_user_id(organization.team_id, current_user.id.into(), &**pool)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
¤t_user.role,
|
|
||||||
&member,
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(ProjectPermissions::MANAGE_INVITES) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to invite users to this team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if !permissions.contains(new_member.permissions) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The new member has permissions that you don't have".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if new_member.organization_permissions.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The organization permissions of a project team member cannot be set"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If team is associated with an organization, check if they have permissions to invite users to that organization
|
|
||||||
TeamAssociationId::Organization(_) => {
|
|
||||||
let organization_permissions =
|
|
||||||
OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member)
|
|
||||||
.unwrap_or_default();
|
|
||||||
if !organization_permissions.contains(OrganizationPermissions::MANAGE_INVITES) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to invite users to this organization".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if !organization_permissions
|
|
||||||
.contains(new_member.organization_permissions.unwrap_or_default())
|
|
||||||
{
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The new member has organization permissions that you don't have".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if !organization_permissions
|
|
||||||
.contains(OrganizationPermissions::EDIT_MEMBER_DEFAULT_PERMISSIONS)
|
|
||||||
&& !new_member.permissions.is_empty()
|
|
||||||
{
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to give this user default project permissions. Ensure 'permissions' is set if it is not, and empty (0)."
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if new_member.role == crate::models::teams::OWNER_ROLE {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The `Owner` role is restricted to one person".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if new_member.payouts_split < Decimal::ZERO || new_member.payouts_split > Decimal::from(5000) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Payouts split must be between 0 and 5000!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let request =
|
|
||||||
TeamMember::get_from_user_id_pending(team_id, new_member.user_id.into(), &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(req) = request {
|
|
||||||
if req.accepted {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The user is already a member of that team".to_string(),
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"There is already a pending member request for this user".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
crate::database::models::User::get_id(new_member.user_id.into(), &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| ApiError::InvalidInput("An invalid User ID specified".to_string()))?;
|
|
||||||
|
|
||||||
let new_id = crate::database::models::ids::generate_team_member_id(&mut transaction).await?;
|
|
||||||
TeamMember {
|
|
||||||
id: new_id,
|
|
||||||
team_id,
|
|
||||||
user_id: new_member.user_id.into(),
|
|
||||||
role: new_member.role.clone(),
|
|
||||||
permissions: new_member.permissions,
|
|
||||||
organization_permissions: new_member.organization_permissions,
|
|
||||||
accepted: false,
|
|
||||||
payouts_split: new_member.payouts_split,
|
|
||||||
ordering: new_member.ordering,
|
|
||||||
}
|
|
||||||
.insert(&mut transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
match team_association {
|
|
||||||
TeamAssociationId::Project(pid) => {
|
|
||||||
NotificationBuilder {
|
|
||||||
body: NotificationBody::TeamInvite {
|
|
||||||
project_id: pid.into(),
|
|
||||||
team_id: team_id.into(),
|
|
||||||
invited_by: current_user.id,
|
|
||||||
role: new_member.role.clone(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
.insert(new_member.user_id.into(), &mut transaction, &redis)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
TeamAssociationId::Organization(oid) => {
|
|
||||||
NotificationBuilder {
|
|
||||||
body: NotificationBody::OrganizationInvite {
|
|
||||||
organization_id: oid.into(),
|
|
||||||
team_id: team_id.into(),
|
|
||||||
invited_by: current_user.id,
|
|
||||||
role: new_member.role.clone(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
.insert(new_member.user_id.into(), &mut transaction, &redis)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TeamMember::clear_cache(team_id, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone)]
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
@ -573,143 +162,21 @@ pub async fn edit_team_member(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let ids = info.into_inner();
|
v3::teams::edit_team_member(
|
||||||
let id = ids.0.into();
|
req,
|
||||||
let user_id = ids.1.into();
|
info,
|
||||||
|
pool,
|
||||||
let current_user = get_user_from_headers(
|
web::Json(v3::teams::EditTeamMember {
|
||||||
&req,
|
permissions: edit_member.permissions,
|
||||||
&**pool,
|
organization_permissions: edit_member.organization_permissions,
|
||||||
&redis,
|
role: edit_member.role.clone(),
|
||||||
&session_queue,
|
payouts_split: edit_member.payouts_split,
|
||||||
Some(&[Scopes::PROJECT_WRITE]),
|
ordering: edit_member.ordering,
|
||||||
|
}),
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let team_association = Team::get_association(id, &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?;
|
|
||||||
let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool).await?;
|
|
||||||
let edit_member_db = TeamMember::get_from_user_id_pending(id, user_id, &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit members of this team".to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
if &*edit_member_db.role == crate::models::teams::OWNER_ROLE
|
|
||||||
&& (edit_member.role.is_some() || edit_member.permissions.is_some())
|
|
||||||
{
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The owner's permission and role of a team cannot be edited".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
match team_association {
|
|
||||||
TeamAssociationId::Project(project_id) => {
|
|
||||||
let organization =
|
|
||||||
Organization::get_associated_organization_project_id(project_id, &**pool).await?;
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
TeamMember::get_from_user_id(organization.team_id, current_user.id.into(), &**pool)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
¤t_user.role,
|
|
||||||
&member.clone(),
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
if !permissions.contains(ProjectPermissions::EDIT_MEMBER) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit members of this team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(new_permissions) = edit_member.permissions {
|
|
||||||
if !permissions.contains(new_permissions) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The new permissions have permissions that you don't have".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if edit_member.organization_permissions.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The organization permissions of a project team member cannot be edited"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TeamAssociationId::Organization(_) => {
|
|
||||||
let organization_permissions =
|
|
||||||
OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !organization_permissions.contains(OrganizationPermissions::EDIT_MEMBER) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit members of this team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(new_permissions) = edit_member.organization_permissions {
|
|
||||||
if !organization_permissions.contains(new_permissions) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The new organization permissions have permissions that you don't have"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if edit_member.permissions.is_some()
|
|
||||||
&& !organization_permissions
|
|
||||||
.contains(OrganizationPermissions::EDIT_MEMBER_DEFAULT_PERMISSIONS)
|
|
||||||
{
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to give this user default project permissions."
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(payouts_split) = edit_member.payouts_split {
|
|
||||||
if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Payouts split must be between 0 and 5000!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if edit_member.role.as_deref() == Some(crate::models::teams::OWNER_ROLE) {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The `Owner` role is restricted to one person".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
TeamMember::edit_team_member(
|
|
||||||
id,
|
|
||||||
user_id,
|
|
||||||
edit_member.permissions,
|
|
||||||
edit_member.organization_permissions,
|
|
||||||
edit_member.role.clone(),
|
|
||||||
None,
|
|
||||||
edit_member.payouts_split,
|
|
||||||
edit_member.ordering,
|
|
||||||
&mut transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
TeamMember::clear_cache(id, &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -726,94 +193,17 @@ pub async fn transfer_ownership(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let id = info.into_inner().0;
|
v3::teams::transfer_ownership(
|
||||||
|
req,
|
||||||
let current_user = get_user_from_headers(
|
info,
|
||||||
&req,
|
pool,
|
||||||
&**pool,
|
web::Json(v3::teams::TransferOwnership {
|
||||||
&redis,
|
user_id: new_owner.user_id,
|
||||||
&session_queue,
|
}),
|
||||||
Some(&[Scopes::PROJECT_WRITE]),
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
// Forbid transferring ownership of a project team that is owned by an organization
|
|
||||||
// These are owned by the organization owner, and must be removed from the organization first
|
|
||||||
let pid = Team::get_association(id.into(), &**pool).await?;
|
|
||||||
if let Some(TeamAssociationId::Project(pid)) = pid {
|
|
||||||
let result = Project::get_id(pid, &**pool, &redis).await?;
|
|
||||||
if let Some(project_item) = result {
|
|
||||||
if project_item.inner.organization_id.is_some() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You cannot transfer ownership of a project team that is owend by an organization"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !current_user.role.is_admin() {
|
|
||||||
let member = TeamMember::get_from_user_id(id.into(), current_user.id.into(), &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit members of this team".to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if member.role != crate::models::teams::OWNER_ROLE {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit the ownership of this team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_member = TeamMember::get_from_user_id(id.into(), new_owner.user_id.into(), &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The new owner specified does not exist".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !new_member.accepted {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You can only transfer ownership to members who are currently in your team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
TeamMember::edit_team_member(
|
|
||||||
id.into(),
|
|
||||||
current_user.id.into(),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
Some(crate::models::teams::DEFAULT_ROLE.to_string()),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
&mut transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
TeamMember::edit_team_member(
|
|
||||||
id.into(),
|
|
||||||
new_owner.user_id.into(),
|
|
||||||
Some(ProjectPermissions::all()),
|
|
||||||
Some(OrganizationPermissions::all()),
|
|
||||||
Some(crate::models::teams::OWNER_ROLE.to_string()),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
&mut transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
TeamMember::clear_cache(id.into(), &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{id}/members/{user_id}")]
|
#[delete("{id}/members/{user_id}")]
|
||||||
@ -824,126 +214,5 @@ pub async fn remove_team_member(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let ids = info.into_inner();
|
v3::teams::remove_team_member(req, info, pool, redis, session_queue).await
|
||||||
let id = ids.0.into();
|
|
||||||
let user_id = ids.1.into();
|
|
||||||
|
|
||||||
let current_user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let team_association = Team::get_association(id, &**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?;
|
|
||||||
let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool).await?;
|
|
||||||
|
|
||||||
let delete_member = TeamMember::get_from_user_id_pending(id, user_id, &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(delete_member) = delete_member {
|
|
||||||
if delete_member.role == crate::models::teams::OWNER_ROLE {
|
|
||||||
// The owner cannot be removed from a team
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"The owner can't be removed from a team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
// Organization attached to a project this team is attached to
|
|
||||||
match team_association {
|
|
||||||
TeamAssociationId::Project(pid) => {
|
|
||||||
let organization =
|
|
||||||
Organization::get_associated_organization_project_id(pid, &**pool).await?;
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
TeamMember::get_from_user_id(
|
|
||||||
organization.team_id,
|
|
||||||
current_user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
¤t_user.role,
|
|
||||||
&member,
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if delete_member.accepted {
|
|
||||||
// Members other than the owner can either leave the team, or be
|
|
||||||
// removed by a member with the REMOVE_MEMBER permission.
|
|
||||||
if Some(delete_member.user_id) == member.as_ref().map(|m| m.user_id)
|
|
||||||
|| permissions.contains(ProjectPermissions::REMOVE_MEMBER)
|
|
||||||
// true as if the permission exists, but the member does not, they are part of an org
|
|
||||||
{
|
|
||||||
TeamMember::delete(id, user_id, &mut transaction).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to remove a member from this team"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else if Some(delete_member.user_id) == member.as_ref().map(|m| m.user_id)
|
|
||||||
|| permissions.contains(ProjectPermissions::MANAGE_INVITES)
|
|
||||||
// true as if the permission exists, but the member does not, they are part of an org
|
|
||||||
{
|
|
||||||
// This is a pending invite rather than a member, so the
|
|
||||||
// user being invited or team members with the MANAGE_INVITES
|
|
||||||
// permission can remove it.
|
|
||||||
TeamMember::delete(id, user_id, &mut transaction).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to cancel a team invite".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TeamAssociationId::Organization(_) => {
|
|
||||||
let organization_permissions =
|
|
||||||
OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member)
|
|
||||||
.unwrap_or_default();
|
|
||||||
// Organization teams requires a TeamMember, so we can 'unwrap'
|
|
||||||
if delete_member.accepted {
|
|
||||||
// Members other than the owner can either leave the team, or be
|
|
||||||
// removed by a member with the REMOVE_MEMBER permission.
|
|
||||||
if Some(delete_member.user_id) == member.map(|m| m.user_id)
|
|
||||||
|| organization_permissions.contains(OrganizationPermissions::REMOVE_MEMBER)
|
|
||||||
{
|
|
||||||
TeamMember::delete(id, user_id, &mut transaction).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to remove a member from this organization"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else if Some(delete_member.user_id) == member.map(|m| m.user_id)
|
|
||||||
|| organization_permissions.contains(OrganizationPermissions::MANAGE_INVITES)
|
|
||||||
{
|
|
||||||
// This is a pending invite rather than a member, so the
|
|
||||||
// user being invited or team members with the MANAGE_INVITES
|
|
||||||
// permission can remove it.
|
|
||||||
TeamMember::delete(id, user_id, &mut transaction).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to cancel an organization invite".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TeamMember::clear_cache(id, &redis).await?;
|
|
||||||
User::clear_project_cache(&[delete_member.user_id], &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,23 +1,12 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
|
|
||||||
use crate::database;
|
|
||||||
use crate::database::models::image_item;
|
|
||||||
use crate::database::models::notification_item::NotificationBuilder;
|
|
||||||
use crate::database::models::thread_item::ThreadMessageBuilder;
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::file_hosting::FileHost;
|
use crate::file_hosting::FileHost;
|
||||||
use crate::models::ids::ThreadMessageId;
|
use crate::models::ids::ThreadMessageId;
|
||||||
use crate::models::images::{Image, ImageContext};
|
use crate::models::threads::{MessageBody, ThreadId};
|
||||||
use crate::models::notifications::NotificationBody;
|
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::models::projects::ProjectStatus;
|
|
||||||
use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType};
|
|
||||||
use crate::models::users::User;
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::{v3, ApiError};
|
||||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||||
use futures::TryStreamExt;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
|
|
||||||
@ -33,194 +22,6 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
|||||||
cfg.service(threads_get);
|
cfg.service(threads_get);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn is_authorized_thread(
|
|
||||||
thread: &database::models::Thread,
|
|
||||||
user: &User,
|
|
||||||
pool: &PgPool,
|
|
||||||
) -> Result<bool, ApiError> {
|
|
||||||
if user.role.is_mod() {
|
|
||||||
return Ok(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
let user_id: database::models::UserId = user.id.into();
|
|
||||||
Ok(match thread.type_ {
|
|
||||||
ThreadType::Report => {
|
|
||||||
if let Some(report_id) = thread.report_id {
|
|
||||||
let report_exists = sqlx::query!(
|
|
||||||
"SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1 AND reporter = $2)",
|
|
||||||
report_id as database::models::ids::ReportId,
|
|
||||||
user_id as database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.fetch_one(pool)
|
|
||||||
.await?
|
|
||||||
.exists;
|
|
||||||
|
|
||||||
report_exists.unwrap_or(false)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ThreadType::Project => {
|
|
||||||
if let Some(project_id) = thread.project_id {
|
|
||||||
let project_exists = sqlx::query!(
|
|
||||||
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 WHERE m.id = $1)",
|
|
||||||
project_id as database::models::ids::ProjectId,
|
|
||||||
user_id as database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.fetch_one(pool)
|
|
||||||
.await?
|
|
||||||
.exists;
|
|
||||||
|
|
||||||
project_exists.unwrap_or(false)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ThreadType::DirectMessage => thread.members.contains(&user_id),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn filter_authorized_threads(
|
|
||||||
threads: Vec<database::models::Thread>,
|
|
||||||
user: &User,
|
|
||||||
pool: &web::Data<PgPool>,
|
|
||||||
redis: &RedisPool,
|
|
||||||
) -> Result<Vec<Thread>, ApiError> {
|
|
||||||
let user_id: database::models::UserId = user.id.into();
|
|
||||||
|
|
||||||
let mut return_threads = Vec::new();
|
|
||||||
let mut check_threads = Vec::new();
|
|
||||||
|
|
||||||
for thread in threads {
|
|
||||||
if user.role.is_mod()
|
|
||||||
|| (thread.type_ == ThreadType::DirectMessage && thread.members.contains(&user_id))
|
|
||||||
{
|
|
||||||
return_threads.push(thread);
|
|
||||||
} else {
|
|
||||||
check_threads.push(thread);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !check_threads.is_empty() {
|
|
||||||
let project_thread_ids = check_threads
|
|
||||||
.iter()
|
|
||||||
.filter(|x| x.type_ == ThreadType::Project)
|
|
||||||
.flat_map(|x| x.project_id.map(|x| x.0))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if !project_thread_ids.is_empty() {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT m.id FROM mods m
|
|
||||||
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2
|
|
||||||
WHERE m.id = ANY($1)
|
|
||||||
",
|
|
||||||
&*project_thread_ids,
|
|
||||||
user_id as database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.fetch_many(&***pool)
|
|
||||||
.try_for_each(|e| {
|
|
||||||
if let Some(row) = e.right() {
|
|
||||||
check_threads.retain(|x| {
|
|
||||||
let bool = x.project_id.map(|x| x.0) == Some(row.id);
|
|
||||||
|
|
||||||
if bool {
|
|
||||||
return_threads.push(x.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
!bool
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
futures::future::ready(Ok(()))
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let report_thread_ids = check_threads
|
|
||||||
.iter()
|
|
||||||
.filter(|x| x.type_ == ThreadType::Report)
|
|
||||||
.flat_map(|x| x.report_id.map(|x| x.0))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if !report_thread_ids.is_empty() {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id FROM reports
|
|
||||||
WHERE id = ANY($1) AND reporter = $2
|
|
||||||
",
|
|
||||||
&*report_thread_ids,
|
|
||||||
user_id as database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.fetch_many(&***pool)
|
|
||||||
.try_for_each(|e| {
|
|
||||||
if let Some(row) = e.right() {
|
|
||||||
check_threads.retain(|x| {
|
|
||||||
let bool = x.report_id.map(|x| x.0) == Some(row.id);
|
|
||||||
|
|
||||||
if bool {
|
|
||||||
return_threads.push(x.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
!bool
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
futures::future::ready(Ok(()))
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut user_ids = return_threads
|
|
||||||
.iter()
|
|
||||||
.flat_map(|x| x.members.clone())
|
|
||||||
.collect::<Vec<database::models::UserId>>();
|
|
||||||
user_ids.append(
|
|
||||||
&mut return_threads
|
|
||||||
.iter()
|
|
||||||
.flat_map(|x| {
|
|
||||||
x.messages
|
|
||||||
.iter()
|
|
||||||
.filter_map(|x| x.author_id)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
})
|
|
||||||
.collect::<Vec<database::models::UserId>>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let users: Vec<User> = database::models::User::get_many_ids(&user_ids, &***pool, redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(From::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut final_threads = Vec::new();
|
|
||||||
|
|
||||||
for thread in return_threads {
|
|
||||||
let mut authors = thread.members.clone();
|
|
||||||
|
|
||||||
authors.append(
|
|
||||||
&mut thread
|
|
||||||
.messages
|
|
||||||
.iter()
|
|
||||||
.filter_map(|x| x.author_id)
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
final_threads.push(Thread::from(
|
|
||||||
thread,
|
|
||||||
users
|
|
||||||
.iter()
|
|
||||||
.filter(|x| authors.contains(&x.id.into()))
|
|
||||||
.cloned()
|
|
||||||
.collect(),
|
|
||||||
user,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(final_threads)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("{id}")]
|
#[get("{id}")]
|
||||||
pub async fn thread_get(
|
pub async fn thread_get(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
@ -229,42 +30,7 @@ pub async fn thread_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let string = info.into_inner().0.into();
|
v3::threads::thread_get(req, info, pool, redis, session_queue).await
|
||||||
|
|
||||||
let thread_data = database::models::Thread::get(string, &**pool).await?;
|
|
||||||
|
|
||||||
let user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::THREAD_READ]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
if let Some(mut data) = thread_data {
|
|
||||||
if is_authorized_thread(&data, &user, &pool).await? {
|
|
||||||
let authors = &mut data.members;
|
|
||||||
|
|
||||||
authors.append(
|
|
||||||
&mut data
|
|
||||||
.messages
|
|
||||||
.iter()
|
|
||||||
.filter_map(|x| x.author_id)
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let users: Vec<User> = database::models::User::get_many_ids(authors, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(From::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -280,27 +46,14 @@ pub async fn threads_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::threads::threads_get(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
web::Query(v3::threads::ThreadIds { ids: ids.ids }),
|
||||||
&redis,
|
pool,
|
||||||
&session_queue,
|
redis,
|
||||||
Some(&[Scopes::THREAD_READ]),
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let thread_ids: Vec<database::models::ids::ThreadId> =
|
|
||||||
serde_json::from_str::<Vec<ThreadId>>(&ids.ids)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|x| x.into())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let threads_data = database::models::Thread::get_many(&thread_ids, &**pool).await?;
|
|
||||||
|
|
||||||
let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(threads))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -317,193 +70,18 @@ pub async fn thread_send_message(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let new_message = new_message.into_inner();
|
||||||
&req,
|
v3::threads::thread_send_message(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
info,
|
||||||
&session_queue,
|
pool,
|
||||||
Some(&[Scopes::THREAD_WRITE]),
|
web::Json(v3::threads::NewThreadMessage {
|
||||||
|
body: new_message.body,
|
||||||
|
}),
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
let string: database::models::ThreadId = info.into_inner().0.into();
|
|
||||||
|
|
||||||
if let MessageBody::Text {
|
|
||||||
body,
|
|
||||||
replying_to,
|
|
||||||
private,
|
|
||||||
..
|
|
||||||
} = &new_message.body
|
|
||||||
{
|
|
||||||
if body.len() > 65536 {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Input body is too long!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if *private && !user.role.is_mod() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You are not allowed to send private messages!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(replying_to) = replying_to {
|
|
||||||
let thread_message =
|
|
||||||
database::models::ThreadMessage::get((*replying_to).into(), &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(thread_message) = thread_message {
|
|
||||||
if thread_message.thread_id != string {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Message replied to is from another thread!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Message replied to does not exist!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You may only send text messages through this route!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = database::models::Thread::get(string, &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(thread) = result {
|
|
||||||
if !is_authorized_thread(&thread, &user, &pool).await? {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let id = ThreadMessageBuilder {
|
|
||||||
author_id: Some(user.id.into()),
|
|
||||||
body: new_message.body.clone(),
|
|
||||||
thread_id: thread.id,
|
|
||||||
}
|
|
||||||
.insert(&mut transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mod_notif = if let Some(project_id) = thread.project_id {
|
|
||||||
let project = database::models::Project::get_id(project_id, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(project) = project {
|
|
||||||
if project.inner.status != ProjectStatus::Processing && user.role.is_mod() {
|
|
||||||
let members = database::models::TeamMember::get_from_team_full(
|
|
||||||
project.inner.team_id,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
NotificationBuilder {
|
|
||||||
body: NotificationBody::ModeratorMessage {
|
|
||||||
thread_id: thread.id.into(),
|
|
||||||
message_id: id.into(),
|
|
||||||
project_id: Some(project.inner.id.into()),
|
|
||||||
report_id: None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
.insert_many(
|
|
||||||
members.into_iter().map(|x| x.user_id).collect(),
|
|
||||||
&mut transaction,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
!user.role.is_mod()
|
|
||||||
} else if let Some(report_id) = thread.report_id {
|
|
||||||
let report = database::models::report_item::Report::get(report_id, &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(report) = report {
|
|
||||||
if report.closed && !user.role.is_mod() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You may not reply to a closed report".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if user.id != report.reporter.into() {
|
|
||||||
NotificationBuilder {
|
|
||||||
body: NotificationBody::ModeratorMessage {
|
|
||||||
thread_id: thread.id.into(),
|
|
||||||
message_id: id.into(),
|
|
||||||
project_id: None,
|
|
||||||
report_id: Some(report.id.into()),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
.insert(report.reporter, &mut transaction, &redis)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
!user.role.is_mod()
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE threads
|
|
||||||
SET show_in_mod_inbox = $1
|
|
||||||
WHERE id = $2
|
|
||||||
",
|
|
||||||
mod_notif,
|
|
||||||
thread.id.0,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if let MessageBody::Text {
|
|
||||||
associated_images, ..
|
|
||||||
} = &new_message.body
|
|
||||||
{
|
|
||||||
for image_id in associated_images {
|
|
||||||
if let Some(db_image) =
|
|
||||||
image_item::Image::get((*image_id).into(), &mut *transaction, &redis).await?
|
|
||||||
{
|
|
||||||
let image: Image = db_image.into();
|
|
||||||
if !matches!(image.context, ImageContext::ThreadMessage { .. })
|
|
||||||
|| image.context.inner_id().is_some()
|
|
||||||
{
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Image {} is not unused and in the 'thread_message' context",
|
|
||||||
image_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE uploaded_images
|
|
||||||
SET thread_message_id = $1
|
|
||||||
WHERE id = $2
|
|
||||||
",
|
|
||||||
thread.id.0,
|
|
||||||
image_id.0 as i64
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
image_item::Image::clear_cache(image.id.into(), &redis).await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Image {} does not exist",
|
|
||||||
image_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("inbox")]
|
#[get("inbox")]
|
||||||
@ -513,30 +91,7 @@ pub async fn moderation_inbox(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = check_is_moderator_from_headers(
|
v3::threads::moderation_inbox(req, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::THREAD_READ]),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let ids = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id
|
|
||||||
FROM threads
|
|
||||||
WHERE show_in_mod_inbox = TRUE
|
|
||||||
"
|
|
||||||
)
|
|
||||||
.fetch_many(&**pool)
|
|
||||||
.try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ThreadId(m.id))) })
|
|
||||||
.try_collect::<Vec<database::models::ThreadId>>()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let threads_data = database::models::Thread::get_many(&ids, &**pool).await?;
|
|
||||||
let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?;
|
|
||||||
Ok(HttpResponse::Ok().json(threads))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("{id}/read")]
|
#[post("{id}/read")]
|
||||||
@ -547,32 +102,7 @@ pub async fn thread_read(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
check_is_moderator_from_headers(
|
v3::threads::thread_read(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::THREAD_READ]),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let id = info.into_inner().0;
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE threads
|
|
||||||
SET show_in_mod_inbox = FALSE
|
|
||||||
WHERE id = $1
|
|
||||||
",
|
|
||||||
id.0 as i64,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{id}")]
|
#[delete("{id}")]
|
||||||
@ -584,45 +114,5 @@ pub async fn message_delete(
|
|||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::threads::message_delete(req, info, pool, redis, session_queue, file_host).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::THREAD_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let result = database::models::ThreadMessage::get(info.into_inner().0.into(), &**pool).await?;
|
|
||||||
|
|
||||||
if let Some(thread) = result {
|
|
||||||
if !user.role.is_mod() && thread.author_id != Some(user.id.into()) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You cannot delete this message!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let context = ImageContext::ThreadMessage {
|
|
||||||
thread_message_id: Some(thread.id.into()),
|
|
||||||
};
|
|
||||||
let images = database::Image::get_many_contexted(context, &mut transaction).await?;
|
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
|
||||||
for image in images {
|
|
||||||
let name = image.url.split(&format!("{cdn_url}/")).nth(1);
|
|
||||||
if let Some(icon_path) = name {
|
|
||||||
file_host.delete_file_version("", icon_path).await?;
|
|
||||||
}
|
|
||||||
database::Image::remove(image.id, &mut transaction, &redis).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
database::models::ThreadMessage::remove_full(thread.id, &mut transaction).await?;
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,27 +1,17 @@
|
|||||||
use crate::auth::get_user_from_headers;
|
|
||||||
use crate::database::models::User;
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::file_hosting::FileHost;
|
use crate::file_hosting::FileHost;
|
||||||
use crate::models::collections::{Collection, CollectionStatus};
|
|
||||||
use crate::models::notifications::Notification;
|
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::models::projects::Project;
|
use crate::models::projects::Project;
|
||||||
use crate::models::users::{
|
use crate::models::users::{Badges, Role};
|
||||||
Badges, Payout, PayoutStatus, RecipientStatus, Role, UserId, UserPayoutData,
|
use crate::models::v2::projects::LegacyProject;
|
||||||
};
|
|
||||||
use crate::queue::payouts::PayoutsQueue;
|
use crate::queue::payouts::PayoutsQueue;
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::routes::ApiError;
|
use crate::routes::{v2_reroute, v3, ApiError};
|
||||||
use crate::util::routes::read_from_payload;
|
|
||||||
use crate::util::validate::validation_errors_to_string;
|
|
||||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rust_decimal::Decimal;
|
use rust_decimal::Decimal;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
@ -54,24 +44,7 @@ pub async fn user_auth_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let (scopes, mut user) = get_user_from_headers(
|
v3::users::user_auth_get(req, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::USER_READ]),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !scopes.contains(Scopes::USER_READ_EMAIL) {
|
|
||||||
user.email = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !scopes.contains(Scopes::PAYOUTS_READ) {
|
|
||||||
user.payout_data = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(user))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@ -85,13 +58,7 @@ pub async fn users_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_ids = serde_json::from_str::<Vec<String>>(&ids.ids)?;
|
v3::users::users_get(web::Query(v3::users::UserIds { ids: ids.ids }), pool, redis).await
|
||||||
|
|
||||||
let users_data = User::get_many(&user_ids, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
let users: Vec<crate::models::users::User> = users_data.into_iter().map(From::from).collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(users))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}")]
|
#[get("{id}")]
|
||||||
@ -100,14 +67,7 @@ pub async fn user_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
v3::users::user_get(info, pool, redis).await
|
||||||
|
|
||||||
if let Some(data) = user_data {
|
|
||||||
let response: crate::models::users::User = data.into();
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{user_id}/projects")]
|
#[get("{user_id}/projects")]
|
||||||
@ -118,39 +78,16 @@ pub async fn projects_list(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let response =
|
||||||
&req,
|
v3::users::projects_list(req, info, pool.clone(), redis.clone(), session_queue).await?;
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
// Convert to V2 projects
|
||||||
|
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
Ok(project) => {
|
||||||
let user_id: UserId = id.into();
|
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||||
|
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||||
let can_view_private = user
|
}
|
||||||
.map(|y| y.role.is_mod() || y.id == user_id)
|
Err(response) => Ok(response),
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
let project_data = User::get_projects(id, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
let response: Vec<_> =
|
|
||||||
crate::database::Project::get_many_ids(&project_data, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| can_view_private || x.inner.status.is_searchable())
|
|
||||||
.map(Project::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -162,40 +99,7 @@ pub async fn collections_list(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::users::collections_list(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::COLLECTION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
|
||||||
let user_id: UserId = id.into();
|
|
||||||
|
|
||||||
let can_view_private = user
|
|
||||||
.map(|y| y.role.is_mod() || y.id == user_id)
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
let project_data = User::get_collections(id, &**pool).await?;
|
|
||||||
|
|
||||||
let response: Vec<_> =
|
|
||||||
crate::database::models::Collection::get_many(&project_data, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| can_view_private || matches!(x.status, CollectionStatus::Listed))
|
|
||||||
.map(Collection::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{user_id}/organizations")]
|
#[get("{user_id}/organizations")]
|
||||||
@ -206,79 +110,7 @@ pub async fn orgs_list(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::users::orgs_list(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
|
||||||
let org_data = User::get_organizations(id, &**pool).await?;
|
|
||||||
|
|
||||||
let organizations_data =
|
|
||||||
crate::database::models::organization_item::Organization::get_many_ids(
|
|
||||||
&org_data, &**pool, &redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let team_ids = organizations_data
|
|
||||||
.iter()
|
|
||||||
.map(|x| x.team_id)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let teams_data = crate::database::models::TeamMember::get_from_team_full_many(
|
|
||||||
&team_ids, &**pool, &redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let users = User::get_many_ids(
|
|
||||||
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut organizations = vec![];
|
|
||||||
let mut team_groups = HashMap::new();
|
|
||||||
for item in teams_data {
|
|
||||||
team_groups.entry(item.team_id).or_insert(vec![]).push(item);
|
|
||||||
}
|
|
||||||
|
|
||||||
for data in organizations_data {
|
|
||||||
let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]);
|
|
||||||
let logged_in = user
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|user| {
|
|
||||||
members_data
|
|
||||||
.iter()
|
|
||||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
|
||||||
})
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let team_members: Vec<_> = members_data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| logged_in || x.accepted || id == x.user_id)
|
|
||||||
.flat_map(|data| {
|
|
||||||
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
|
||||||
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let organization = crate::models::organizations::Organization::from(data, team_members);
|
|
||||||
organizations.push(organization);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(organizations))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
@ -316,137 +148,22 @@ pub async fn user_edit(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let (_scopes, user) = get_user_from_headers(
|
let new_user = new_user.into_inner();
|
||||||
&req,
|
v3::users::user_edit(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
info,
|
||||||
&session_queue,
|
web::Json(v3::users::EditUser {
|
||||||
Some(&[Scopes::USER_WRITE]),
|
username: new_user.username,
|
||||||
|
name: new_user.name,
|
||||||
|
bio: new_user.bio,
|
||||||
|
role: new_user.role,
|
||||||
|
badges: new_user.badges,
|
||||||
|
}),
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?;
|
.await
|
||||||
|
|
||||||
new_user
|
|
||||||
.validate()
|
|
||||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
|
||||||
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(actual_user) = id_option {
|
|
||||||
let id = actual_user.id;
|
|
||||||
let user_id: UserId = id.into();
|
|
||||||
|
|
||||||
if user.id == user_id || user.role.is_mod() {
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
if let Some(username) = &new_user.username {
|
|
||||||
let existing_user_id_option = User::get(username, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if existing_user_id_option
|
|
||||||
.map(|x| UserId::from(x.id))
|
|
||||||
.map(|id| id == user.id)
|
|
||||||
.unwrap_or(true)
|
|
||||||
{
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE users
|
|
||||||
SET username = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
username,
|
|
||||||
id as crate::database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(format!(
|
|
||||||
"Username {username} is taken!"
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(name) = &new_user.name {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE users
|
|
||||||
SET name = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
name.as_deref(),
|
|
||||||
id as crate::database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(bio) = &new_user.bio {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE users
|
|
||||||
SET bio = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
bio.as_deref(),
|
|
||||||
id as crate::database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(role) = &new_user.role {
|
|
||||||
if !user.role.is_admin() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have the permissions to edit the role of this user!"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let role = role.to_string();
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE users
|
|
||||||
SET role = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
role,
|
|
||||||
id as crate::database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(badges) = &new_user.badges {
|
|
||||||
if !user.role.is_admin() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have the permissions to edit the badges of this user!"
|
|
||||||
.to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE users
|
|
||||||
SET badges = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
badges.bits() as i64,
|
|
||||||
id as crate::database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
User::clear_caches(&[(id, Some(actual_user.username))], &redis).await?;
|
|
||||||
transaction.commit().await?;
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to edit this user!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@ -463,75 +180,20 @@ pub async fn user_icon_edit(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
mut payload: web::Payload,
|
payload: web::Payload,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) {
|
v3::users::user_icon_edit(
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
web::Query(v3::users::Extension { ext: ext.ext }),
|
||||||
let user = get_user_from_headers(
|
req,
|
||||||
&req,
|
info,
|
||||||
&**pool,
|
pool,
|
||||||
&redis,
|
redis,
|
||||||
&session_queue,
|
file_host,
|
||||||
Some(&[Scopes::USER_WRITE]),
|
payload,
|
||||||
)
|
session_queue,
|
||||||
.await?
|
)
|
||||||
.1;
|
.await
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(actual_user) = id_option {
|
|
||||||
if user.id != actual_user.id.into() && !user.role.is_mod() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to edit this user's icon.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let icon_url = actual_user.avatar_url;
|
|
||||||
let user_id: UserId = actual_user.id.into();
|
|
||||||
|
|
||||||
if let Some(icon) = icon_url {
|
|
||||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
|
||||||
|
|
||||||
if let Some(icon_path) = name {
|
|
||||||
file_host.delete_file_version("", icon_path).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let bytes =
|
|
||||||
read_from_payload(&mut payload, 2097152, "Icons must be smaller than 2MiB").await?;
|
|
||||||
|
|
||||||
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
|
||||||
let upload_data = file_host
|
|
||||||
.upload_file(
|
|
||||||
content_type,
|
|
||||||
&format!("user/{}/{}.{}", user_id, hash, ext.ext),
|
|
||||||
bytes.freeze(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE users
|
|
||||||
SET avatar_url = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
format!("{}/{}", cdn_url, upload_data.file_name),
|
|
||||||
actual_user.id as crate::database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.execute(&**pool)
|
|
||||||
.await?;
|
|
||||||
User::clear_caches(&[(actual_user.id, None)], &redis).await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(ApiError::InvalidInput(format!(
|
|
||||||
"Invalid format for user icon: {}",
|
|
||||||
ext.ext
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -553,44 +215,18 @@ pub async fn user_delete(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let removal_type = removal_type.into_inner();
|
||||||
&req,
|
v3::users::user_delete(
|
||||||
&**pool,
|
req,
|
||||||
&redis,
|
info,
|
||||||
&session_queue,
|
pool,
|
||||||
Some(&[Scopes::USER_DELETE]),
|
web::Query(v3::users::RemovalType {
|
||||||
|
removal_type: removal_type.removal_type,
|
||||||
|
}),
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
|
||||||
if !user.role.is_admin() && user.id != id.into() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to delete this user!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let result = User::remove(
|
|
||||||
id,
|
|
||||||
removal_type.removal_type == "full",
|
|
||||||
&mut transaction,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
if result.is_some() {
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}/follows")]
|
#[get("{id}/follows")]
|
||||||
@ -601,52 +237,7 @@ pub async fn user_follows(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::users::user_follows(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::USER_READ]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
|
||||||
if !user.role.is_admin() && user.id != id.into() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to see the projects this user follows!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
use futures::TryStreamExt;
|
|
||||||
|
|
||||||
let project_ids = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT mf.mod_id FROM mod_follows mf
|
|
||||||
WHERE mf.follower_id = $1
|
|
||||||
",
|
|
||||||
id as crate::database::models::ids::UserId,
|
|
||||||
)
|
|
||||||
.fetch_many(&**pool)
|
|
||||||
.try_filter_map(|e| async {
|
|
||||||
Ok(e.right()
|
|
||||||
.map(|m| crate::database::models::ProjectId(m.mod_id)))
|
|
||||||
})
|
|
||||||
.try_collect::<Vec<crate::database::models::ProjectId>>()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let projects: Vec<_> =
|
|
||||||
crate::database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(Project::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(projects))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}/notifications")]
|
#[get("{id}/notifications")]
|
||||||
@ -657,39 +248,7 @@ pub async fn user_notifications(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::users::user_notifications(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::NOTIFICATION_READ]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
|
||||||
if !user.role.is_admin() && user.id != id.into() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to see the notifications of this user!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut notifications: Vec<Notification> =
|
|
||||||
crate::database::models::notification_item::Notification::get_many_user(
|
|
||||||
id, &**pool, &redis,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(Into::into)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
notifications.sort_by(|a, b| b.created.cmp(&a.created));
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(notifications))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{id}/payouts")]
|
#[get("{id}/payouts")]
|
||||||
@ -700,74 +259,7 @@ pub async fn user_payouts(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::users::user_payouts(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PAYOUTS_READ]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
|
||||||
if !user.role.is_admin() && user.id != id.into() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to see the payouts of this user!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (all_time, last_month, payouts) = futures::future::try_join3(
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT SUM(pv.amount) amount
|
|
||||||
FROM payouts_values pv
|
|
||||||
WHERE pv.user_id = $1
|
|
||||||
",
|
|
||||||
id as crate::database::models::UserId
|
|
||||||
)
|
|
||||||
.fetch_one(&**pool),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT SUM(pv.amount) amount
|
|
||||||
FROM payouts_values pv
|
|
||||||
WHERE pv.user_id = $1 AND created > NOW() - '1 month'::interval
|
|
||||||
",
|
|
||||||
id as crate::database::models::UserId
|
|
||||||
)
|
|
||||||
.fetch_one(&**pool),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT hp.created, hp.amount, hp.status
|
|
||||||
FROM historical_payouts hp
|
|
||||||
WHERE hp.user_id = $1
|
|
||||||
ORDER BY hp.created DESC
|
|
||||||
",
|
|
||||||
id as crate::database::models::UserId
|
|
||||||
)
|
|
||||||
.fetch_many(&**pool)
|
|
||||||
.try_filter_map(|e| async {
|
|
||||||
Ok(e.right().map(|row| Payout {
|
|
||||||
created: row.created,
|
|
||||||
amount: row.amount,
|
|
||||||
status: PayoutStatus::from_string(&row.status),
|
|
||||||
}))
|
|
||||||
})
|
|
||||||
.try_collect::<Vec<Payout>>(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
use futures::TryStreamExt;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(json!({
|
|
||||||
"all_time": all_time.amount,
|
|
||||||
"last_month": last_month.amount,
|
|
||||||
"payouts": payouts,
|
|
||||||
})))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -785,44 +277,18 @@ pub async fn user_payouts_fees(
|
|||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
payouts_queue: web::Data<Mutex<PayoutsQueue>>,
|
payouts_queue: web::Data<Mutex<PayoutsQueue>>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::users::user_payouts_fees(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
info,
|
||||||
&redis,
|
web::Query(v3::users::FeeEstimateAmount {
|
||||||
&session_queue,
|
amount: amount.amount,
|
||||||
Some(&[Scopes::PAYOUTS_READ]),
|
}),
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
|
payouts_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
let actual_user = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(actual_user) = actual_user {
|
|
||||||
if !user.role.is_admin() && user.id != actual_user.id.into() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to request payouts of this user!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(UserPayoutData {
|
|
||||||
trolley_id: Some(trolley_id),
|
|
||||||
..
|
|
||||||
}) = user.payout_data
|
|
||||||
{
|
|
||||||
let payouts = payouts_queue
|
|
||||||
.lock()
|
|
||||||
.await
|
|
||||||
.get_estimated_fees(&trolley_id, amount.amount)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(payouts))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::InvalidInput(
|
|
||||||
"You must set up your trolley account first!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -840,87 +306,16 @@ pub async fn user_payouts_request(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let mut payouts_queue = payouts_queue.lock().await;
|
v3::users::user_payouts_request(
|
||||||
|
req,
|
||||||
let user = get_user_from_headers(
|
info,
|
||||||
&req,
|
pool,
|
||||||
&**pool,
|
web::Json(v3::users::PayoutData {
|
||||||
&redis,
|
amount: data.amount,
|
||||||
&session_queue,
|
}),
|
||||||
Some(&[Scopes::PAYOUTS_WRITE]),
|
payouts_queue,
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(id) = id_option.map(|x| x.id) {
|
|
||||||
if !user.role.is_admin() && user.id != id.into() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to request payouts of this user!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(UserPayoutData {
|
|
||||||
trolley_id: Some(trolley_id),
|
|
||||||
trolley_status: Some(trolley_status),
|
|
||||||
balance,
|
|
||||||
..
|
|
||||||
}) = user.payout_data
|
|
||||||
{
|
|
||||||
if trolley_status == RecipientStatus::Active {
|
|
||||||
return if data.amount < balance {
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
let (batch_id, payment_id) =
|
|
||||||
payouts_queue.send_payout(&trolley_id, data.amount).await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
INSERT INTO historical_payouts (user_id, amount, status, batch_id, payment_id)
|
|
||||||
VALUES ($1, $2, $3, $4, $5)
|
|
||||||
",
|
|
||||||
id as crate::database::models::ids::UserId,
|
|
||||||
data.amount,
|
|
||||||
"processing",
|
|
||||||
batch_id,
|
|
||||||
payment_id,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE users
|
|
||||||
SET balance = balance - $1
|
|
||||||
WHERE id = $2
|
|
||||||
",
|
|
||||||
data.amount,
|
|
||||||
id as crate::database::models::ids::UserId
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
User::clear_caches(&[(id, None)], &redis).await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::InvalidInput(
|
|
||||||
"You do not have enough funds to make this payout!".to_string(),
|
|
||||||
))
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Please complete payout information via the trolley dashboard!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(ApiError::InvalidInput(
|
|
||||||
"You are not enrolled in the payouts program yet!".to_string(),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,37 +1,24 @@
|
|||||||
use super::project_creation::{CreateError, UploadedFile};
|
|
||||||
use crate::auth::get_user_from_headers;
|
|
||||||
use crate::database::models::notification_item::NotificationBuilder;
|
|
||||||
use crate::database::models::version_item::{
|
|
||||||
DependencyBuilder, VersionBuilder, VersionFileBuilder,
|
|
||||||
};
|
|
||||||
use crate::database::models::{self, image_item, Organization};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::file_hosting::FileHost;
|
use crate::file_hosting::FileHost;
|
||||||
use crate::models::images::{Image, ImageContext, ImageId};
|
use crate::models::ids::ImageId;
|
||||||
use crate::models::notifications::NotificationBody;
|
|
||||||
use crate::models::pack::PackFileHash;
|
|
||||||
use crate::models::pats::Scopes;
|
|
||||||
use crate::models::projects::{
|
use crate::models::projects::{
|
||||||
Dependency, DependencyType, FileType, GameVersion, Loader, ProjectId, Version, VersionFile,
|
Dependency, FileType, Loader, ProjectId, Version, VersionId, VersionStatus, VersionType,
|
||||||
VersionId, VersionStatus, VersionType,
|
|
||||||
};
|
};
|
||||||
use crate::models::teams::ProjectPermissions;
|
use crate::models::v2::projects::LegacyVersion;
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::util::routes::read_from_field;
|
use crate::routes::v3::project_creation::CreateError;
|
||||||
use crate::util::validate::validation_errors_to_string;
|
use crate::routes::{v2_reroute, v3};
|
||||||
use crate::validate::{validate_file, ValidationResult};
|
use actix_multipart::Multipart;
|
||||||
use actix_multipart::{Field, Multipart};
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||||
use chrono::Utc;
|
|
||||||
use futures::stream::StreamExt;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::json;
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::postgres::PgPool;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
fn default_requested_status() -> VersionStatus {
|
pub fn default_requested_status() -> VersionStatus {
|
||||||
VersionStatus::Listed
|
VersionStatus::Listed
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,7 +48,7 @@ pub struct InitialVersionData {
|
|||||||
)]
|
)]
|
||||||
pub dependencies: Vec<Dependency>,
|
pub dependencies: Vec<Dependency>,
|
||||||
#[validate(length(min = 1))]
|
#[validate(length(min = 1))]
|
||||||
pub game_versions: Vec<GameVersion>,
|
pub game_versions: Vec<String>,
|
||||||
#[serde(alias = "version_type")]
|
#[serde(alias = "version_type")]
|
||||||
pub release_channel: VersionType,
|
pub release_channel: VersionType,
|
||||||
#[validate(length(min = 1))]
|
#[validate(length(min = 1))]
|
||||||
@ -91,420 +78,72 @@ struct InitialFileData {
|
|||||||
#[post("version")]
|
#[post("version")]
|
||||||
pub async fn version_create(
|
pub async fn version_create(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
mut payload: Multipart,
|
payload: Multipart,
|
||||||
client: Data<PgPool>,
|
client: Data<PgPool>,
|
||||||
redis: Data<RedisPool>,
|
redis: Data<RedisPool>,
|
||||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
session_queue: Data<AuthQueue>,
|
session_queue: Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, CreateError> {
|
) -> Result<HttpResponse, CreateError> {
|
||||||
let mut transaction = client.begin().await?;
|
let payload = v2_reroute::alter_actix_multipart(
|
||||||
let mut uploaded_files = Vec::new();
|
payload,
|
||||||
|
req.headers().clone(),
|
||||||
|
|legacy_create: InitialVersionData| {
|
||||||
|
// Convert input data to V3 format
|
||||||
|
let mut fields = HashMap::new();
|
||||||
|
fields.insert(
|
||||||
|
"game_versions".to_string(),
|
||||||
|
json!(legacy_create.game_versions),
|
||||||
|
);
|
||||||
|
|
||||||
let result = version_create_inner(
|
// TODO: Some kind of handling here to ensure project type is fine.
|
||||||
req,
|
// We expect the version uploaded to be of loader type modpack, but there might not be a way to check here for that.
|
||||||
&mut payload,
|
// After all, theoretically, they could be creating a genuine 'fabric' mod, and modpack no longer carries information on whether its a mod or modpack,
|
||||||
&mut transaction,
|
// as those are out to the versions.
|
||||||
&redis,
|
|
||||||
&***file_host,
|
|
||||||
&mut uploaded_files,
|
|
||||||
&client,
|
|
||||||
&session_queue,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
if result.is_err() {
|
// Ideally this would, if the project 'should' be a modpack:
|
||||||
let undo_result =
|
// - change the loaders to mrpack only
|
||||||
super::project_creation::undo_uploads(&***file_host, &uploaded_files).await;
|
// - add loader fields to the project for the corresponding loaders
|
||||||
let rollback_result = transaction.rollback().await;
|
|
||||||
|
|
||||||
undo_result?;
|
Ok(v3::version_creation::InitialVersionData {
|
||||||
if let Err(e) = rollback_result {
|
project_id: legacy_create.project_id,
|
||||||
return Err(e.into());
|
file_parts: legacy_create.file_parts,
|
||||||
}
|
version_number: legacy_create.version_number,
|
||||||
} else {
|
version_title: legacy_create.version_title,
|
||||||
transaction.commit().await?;
|
version_body: legacy_create.version_body,
|
||||||
}
|
dependencies: legacy_create.dependencies,
|
||||||
|
release_channel: legacy_create.release_channel,
|
||||||
result
|
loaders: legacy_create.loaders,
|
||||||
}
|
featured: legacy_create.featured,
|
||||||
|
primary_file: legacy_create.primary_file,
|
||||||
#[allow(clippy::too_many_arguments)]
|
status: legacy_create.status,
|
||||||
async fn version_create_inner(
|
file_types: legacy_create.file_types,
|
||||||
req: HttpRequest,
|
uploaded_images: legacy_create.uploaded_images,
|
||||||
payload: &mut Multipart,
|
ordering: legacy_create.ordering,
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
fields,
|
||||||
redis: &RedisPool,
|
|
||||||
file_host: &dyn FileHost,
|
|
||||||
uploaded_files: &mut Vec<UploadedFile>,
|
|
||||||
pool: &PgPool,
|
|
||||||
session_queue: &AuthQueue,
|
|
||||||
) -> Result<HttpResponse, CreateError> {
|
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
|
||||||
|
|
||||||
let mut initial_version_data = None;
|
|
||||||
let mut version_builder = None;
|
|
||||||
|
|
||||||
let all_game_versions =
|
|
||||||
models::categories::GameVersion::list(&mut **transaction, redis).await?;
|
|
||||||
let all_loaders = models::categories::Loader::list(&mut **transaction, redis).await?;
|
|
||||||
|
|
||||||
let user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
pool,
|
|
||||||
redis,
|
|
||||||
session_queue,
|
|
||||||
Some(&[Scopes::VERSION_CREATE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let mut error = None;
|
|
||||||
while let Some(item) = payload.next().await {
|
|
||||||
let mut field: Field = item?;
|
|
||||||
|
|
||||||
if error.is_some() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = async {
|
|
||||||
let content_disposition = field.content_disposition().clone();
|
|
||||||
let name = content_disposition.get_name().ok_or_else(|| {
|
|
||||||
CreateError::MissingValueError("Missing content name".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if name == "data" {
|
|
||||||
let mut data = Vec::new();
|
|
||||||
while let Some(chunk) = field.next().await {
|
|
||||||
data.extend_from_slice(&chunk?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let version_create_data: InitialVersionData = serde_json::from_slice(&data)?;
|
|
||||||
initial_version_data = Some(version_create_data);
|
|
||||||
let version_create_data = initial_version_data.as_ref().unwrap();
|
|
||||||
if version_create_data.project_id.is_none() {
|
|
||||||
return Err(CreateError::MissingValueError(
|
|
||||||
"Missing project id".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
version_create_data.validate().map_err(|err| {
|
|
||||||
CreateError::ValidationError(validation_errors_to_string(err, None))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !version_create_data.status.can_be_requested() {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"Status specified cannot be requested".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let project_id: models::ProjectId = version_create_data.project_id.unwrap().into();
|
|
||||||
|
|
||||||
// Ensure that the project this version is being added to exists
|
|
||||||
let results = sqlx::query!(
|
|
||||||
"SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)",
|
|
||||||
project_id as models::ProjectId
|
|
||||||
)
|
|
||||||
.fetch_one(&mut **transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !results.exists.unwrap_or(false) {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"An invalid project id was supplied".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that the user creating this version is a team member
|
|
||||||
// of the project the version is being added to.
|
|
||||||
let team_member = models::TeamMember::get_from_user_id_project(
|
|
||||||
project_id,
|
|
||||||
user.id.into(),
|
|
||||||
&mut **transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Get organization attached, if exists, and the member project permissions
|
|
||||||
let organization = models::Organization::get_associated_organization_project_id(
|
|
||||||
project_id,
|
|
||||||
&mut **transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
models::TeamMember::get_from_user_id(
|
|
||||||
organization.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&mut **transaction,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
&user.role,
|
|
||||||
&team_member,
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(ProjectPermissions::UPLOAD_VERSION) {
|
|
||||||
return Err(CreateError::CustomAuthenticationError(
|
|
||||||
"You don't have permission to upload this version!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
|
|
||||||
|
|
||||||
let project_type = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT name FROM project_types pt
|
|
||||||
INNER JOIN mods ON mods.project_type = pt.id
|
|
||||||
WHERE mods.id = $1
|
|
||||||
",
|
|
||||||
project_id as models::ProjectId,
|
|
||||||
)
|
|
||||||
.fetch_one(&mut **transaction)
|
|
||||||
.await?
|
|
||||||
.name;
|
|
||||||
|
|
||||||
let game_versions = version_create_data
|
|
||||||
.game_versions
|
|
||||||
.iter()
|
|
||||||
.map(|x| {
|
|
||||||
all_game_versions
|
|
||||||
.iter()
|
|
||||||
.find(|y| y.version == x.0)
|
|
||||||
.ok_or_else(|| CreateError::InvalidGameVersion(x.0.clone()))
|
|
||||||
.map(|y| y.id)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<models::GameVersionId>, CreateError>>()?;
|
|
||||||
|
|
||||||
let loaders = version_create_data
|
|
||||||
.loaders
|
|
||||||
.iter()
|
|
||||||
.map(|x| {
|
|
||||||
all_loaders
|
|
||||||
.iter()
|
|
||||||
.find(|y| {
|
|
||||||
y.loader == x.0 && y.supported_project_types.contains(&project_type)
|
|
||||||
})
|
|
||||||
.ok_or_else(|| CreateError::InvalidLoader(x.0.clone()))
|
|
||||||
.map(|y| y.id)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<models::LoaderId>, CreateError>>()?;
|
|
||||||
|
|
||||||
let dependencies = version_create_data
|
|
||||||
.dependencies
|
|
||||||
.iter()
|
|
||||||
.map(|d| models::version_item::DependencyBuilder {
|
|
||||||
version_id: d.version_id.map(|x| x.into()),
|
|
||||||
project_id: d.project_id.map(|x| x.into()),
|
|
||||||
dependency_type: d.dependency_type.to_string(),
|
|
||||||
file_name: None,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
version_builder = Some(VersionBuilder {
|
|
||||||
version_id: version_id.into(),
|
|
||||||
project_id,
|
|
||||||
author_id: user.id.into(),
|
|
||||||
name: version_create_data.version_title.clone(),
|
|
||||||
version_number: version_create_data.version_number.clone(),
|
|
||||||
changelog: version_create_data.version_body.clone().unwrap_or_default(),
|
|
||||||
files: Vec::new(),
|
|
||||||
dependencies,
|
|
||||||
game_versions,
|
|
||||||
loaders,
|
|
||||||
version_type: version_create_data.release_channel.to_string(),
|
|
||||||
featured: version_create_data.featured,
|
|
||||||
status: version_create_data.status,
|
|
||||||
requested_status: None,
|
|
||||||
ordering: version_create_data.ordering,
|
|
||||||
});
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let version = version_builder.as_mut().ok_or_else(|| {
|
|
||||||
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let project_type = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT name FROM project_types pt
|
|
||||||
INNER JOIN mods ON mods.project_type = pt.id
|
|
||||||
WHERE mods.id = $1
|
|
||||||
",
|
|
||||||
version.project_id as models::ProjectId,
|
|
||||||
)
|
|
||||||
.fetch_one(&mut **transaction)
|
|
||||||
.await?
|
|
||||||
.name;
|
|
||||||
|
|
||||||
let version_data = initial_version_data
|
|
||||||
.clone()
|
|
||||||
.ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?;
|
|
||||||
|
|
||||||
upload_file(
|
|
||||||
&mut field,
|
|
||||||
file_host,
|
|
||||||
version_data.file_parts.len(),
|
|
||||||
uploaded_files,
|
|
||||||
&mut version.files,
|
|
||||||
&mut version.dependencies,
|
|
||||||
&cdn_url,
|
|
||||||
&content_disposition,
|
|
||||||
version.project_id.into(),
|
|
||||||
version.version_id.into(),
|
|
||||||
&project_type,
|
|
||||||
version_data.loaders,
|
|
||||||
version_data.game_versions,
|
|
||||||
all_game_versions.clone(),
|
|
||||||
version_data.primary_file.is_some(),
|
|
||||||
version_data.primary_file.as_deref() == Some(name),
|
|
||||||
version_data.file_types.get(name).copied().flatten(),
|
|
||||||
transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
.await;
|
|
||||||
|
|
||||||
if result.is_err() {
|
|
||||||
error = result.err();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(error) = error {
|
|
||||||
return Err(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
let version_data = initial_version_data
|
|
||||||
.ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?;
|
|
||||||
let builder = version_builder
|
|
||||||
.ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?;
|
|
||||||
|
|
||||||
if builder.files.is_empty() {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"Versions must have at least one file uploaded to them".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
use futures::stream::TryStreamExt;
|
|
||||||
|
|
||||||
let users = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT follower_id FROM mod_follows
|
|
||||||
WHERE mod_id = $1
|
|
||||||
",
|
|
||||||
builder.project_id as crate::database::models::ids::ProjectId
|
|
||||||
)
|
|
||||||
.fetch_many(&mut **transaction)
|
|
||||||
.try_filter_map(|e| async { Ok(e.right().map(|m| models::ids::UserId(m.follower_id))) })
|
|
||||||
.try_collect::<Vec<models::ids::UserId>>()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let project_id: ProjectId = builder.project_id.into();
|
|
||||||
let version_id: VersionId = builder.version_id.into();
|
|
||||||
|
|
||||||
NotificationBuilder {
|
|
||||||
body: NotificationBody::ProjectUpdate {
|
|
||||||
project_id,
|
|
||||||
version_id,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
.insert_many(users, transaction, redis)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let response = Version {
|
|
||||||
id: builder.version_id.into(),
|
|
||||||
project_id: builder.project_id.into(),
|
|
||||||
author_id: user.id,
|
|
||||||
featured: builder.featured,
|
|
||||||
name: builder.name.clone(),
|
|
||||||
version_number: builder.version_number.clone(),
|
|
||||||
changelog: builder.changelog.clone(),
|
|
||||||
changelog_url: None,
|
|
||||||
date_published: Utc::now(),
|
|
||||||
downloads: 0,
|
|
||||||
version_type: version_data.release_channel,
|
|
||||||
status: builder.status,
|
|
||||||
requested_status: builder.requested_status,
|
|
||||||
ordering: builder.ordering,
|
|
||||||
files: builder
|
|
||||||
.files
|
|
||||||
.iter()
|
|
||||||
.map(|file| VersionFile {
|
|
||||||
hashes: file
|
|
||||||
.hashes
|
|
||||||
.iter()
|
|
||||||
.map(|hash| {
|
|
||||||
(
|
|
||||||
hash.algorithm.clone(),
|
|
||||||
// This is a hack since the hashes are currently stored as ASCII
|
|
||||||
// in the database, but represented here as a Vec<u8>. At some
|
|
||||||
// point we need to change the hash to be the real bytes in the
|
|
||||||
// database and add more processing here.
|
|
||||||
String::from_utf8(hash.hash.clone()).unwrap(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
url: file.url.clone(),
|
|
||||||
filename: file.filename.clone(),
|
|
||||||
primary: file.primary,
|
|
||||||
size: file.size,
|
|
||||||
file_type: file.file_type,
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>(),
|
},
|
||||||
dependencies: version_data.dependencies,
|
)
|
||||||
game_versions: version_data.game_versions,
|
.await?;
|
||||||
loaders: version_data.loaders,
|
|
||||||
};
|
|
||||||
|
|
||||||
let project_id = builder.project_id;
|
// Call V3 project creation
|
||||||
builder.insert(transaction).await?;
|
let response = v3::version_creation::version_create(
|
||||||
|
req,
|
||||||
|
payload,
|
||||||
|
client.clone(),
|
||||||
|
redis.clone(),
|
||||||
|
file_host,
|
||||||
|
session_queue,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
for image_id in version_data.uploaded_images {
|
// Convert response to V2 format
|
||||||
if let Some(db_image) =
|
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||||
image_item::Image::get(image_id.into(), &mut **transaction, redis).await?
|
Ok(version) => {
|
||||||
{
|
let v2_version = LegacyVersion::from(version);
|
||||||
let image: Image = db_image.into();
|
Ok(HttpResponse::Ok().json(v2_version))
|
||||||
if !matches!(image.context, ImageContext::Report { .. })
|
|
||||||
|| image.context.inner_id().is_some()
|
|
||||||
{
|
|
||||||
return Err(CreateError::InvalidInput(format!(
|
|
||||||
"Image {} is not unused and in the 'version' context",
|
|
||||||
image_id
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE uploaded_images
|
|
||||||
SET version_id = $1
|
|
||||||
WHERE id = $2
|
|
||||||
",
|
|
||||||
version_id.0 as i64,
|
|
||||||
image_id.0 as i64
|
|
||||||
)
|
|
||||||
.execute(&mut **transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
image_item::Image::clear_cache(image.id.into(), redis).await?;
|
|
||||||
} else {
|
|
||||||
return Err(CreateError::InvalidInput(format!(
|
|
||||||
"Image {} does not exist",
|
|
||||||
image_id
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
}
|
}
|
||||||
|
|
||||||
models::Project::update_game_versions(project_id, transaction).await?;
|
|
||||||
models::Project::update_loaders(project_id, transaction).await?;
|
|
||||||
models::Project::clear_cache(project_id, None, Some(true), redis).await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// under /api/v1/version/{version_id}
|
// under /api/v1/version/{version_id}
|
||||||
@ -512,452 +151,21 @@ async fn version_create_inner(
|
|||||||
pub async fn upload_file_to_version(
|
pub async fn upload_file_to_version(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
url_data: web::Path<(VersionId,)>,
|
url_data: web::Path<(VersionId,)>,
|
||||||
mut payload: Multipart,
|
payload: Multipart,
|
||||||
client: Data<PgPool>,
|
client: Data<PgPool>,
|
||||||
redis: Data<RedisPool>,
|
redis: Data<RedisPool>,
|
||||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, CreateError> {
|
) -> Result<HttpResponse, CreateError> {
|
||||||
let mut transaction = client.begin().await?;
|
let response = v3::version_creation::upload_file_to_version(
|
||||||
let mut uploaded_files = Vec::new();
|
|
||||||
|
|
||||||
let version_id = models::VersionId::from(url_data.into_inner().0);
|
|
||||||
|
|
||||||
let result = upload_file_to_version_inner(
|
|
||||||
req,
|
req,
|
||||||
&mut payload,
|
url_data,
|
||||||
client,
|
payload,
|
||||||
&mut transaction,
|
client.clone(),
|
||||||
redis,
|
redis.clone(),
|
||||||
&***file_host,
|
file_host,
|
||||||
&mut uploaded_files,
|
|
||||||
version_id,
|
|
||||||
&session_queue,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
if result.is_err() {
|
|
||||||
let undo_result =
|
|
||||||
super::project_creation::undo_uploads(&***file_host, &uploaded_files).await;
|
|
||||||
let rollback_result = transaction.rollback().await;
|
|
||||||
|
|
||||||
undo_result?;
|
|
||||||
if let Err(e) = rollback_result {
|
|
||||||
return Err(e.into());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
transaction.commit().await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
async fn upload_file_to_version_inner(
|
|
||||||
req: HttpRequest,
|
|
||||||
payload: &mut Multipart,
|
|
||||||
client: Data<PgPool>,
|
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
|
||||||
redis: Data<RedisPool>,
|
|
||||||
file_host: &dyn FileHost,
|
|
||||||
uploaded_files: &mut Vec<UploadedFile>,
|
|
||||||
version_id: models::VersionId,
|
|
||||||
session_queue: &AuthQueue,
|
|
||||||
) -> Result<HttpResponse, CreateError> {
|
|
||||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
|
||||||
|
|
||||||
let mut initial_file_data: Option<InitialFileData> = None;
|
|
||||||
let mut file_builders: Vec<VersionFileBuilder> = Vec::new();
|
|
||||||
|
|
||||||
let user = get_user_from_headers(
|
|
||||||
&req,
|
|
||||||
&**client,
|
|
||||||
&redis,
|
|
||||||
session_queue,
|
session_queue,
|
||||||
Some(&[Scopes::VERSION_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let result = models::Version::get(version_id, &**client, &redis).await?;
|
|
||||||
|
|
||||||
let version = match result {
|
|
||||||
Some(v) => v,
|
|
||||||
None => {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"An invalid version id was supplied".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if !user.role.is_admin() {
|
|
||||||
let team_member = models::TeamMember::get_from_user_id_project(
|
|
||||||
version.inner.project_id,
|
|
||||||
user.id.into(),
|
|
||||||
&mut **transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let organization = Organization::get_associated_organization_project_id(
|
|
||||||
version.inner.project_id,
|
|
||||||
&**client,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
models::TeamMember::get_from_user_id(
|
|
||||||
organization.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&mut **transaction,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
&user.role,
|
|
||||||
&team_member,
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(ProjectPermissions::UPLOAD_VERSION) {
|
|
||||||
return Err(CreateError::CustomAuthenticationError(
|
|
||||||
"You don't have permission to upload files to this version!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let project_id = ProjectId(version.inner.project_id.0 as u64);
|
|
||||||
|
|
||||||
let project_type = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT name FROM project_types pt
|
|
||||||
INNER JOIN mods ON mods.project_type = pt.id
|
|
||||||
WHERE mods.id = $1
|
|
||||||
",
|
|
||||||
version.inner.project_id as models::ProjectId,
|
|
||||||
)
|
|
||||||
.fetch_one(&mut **transaction)
|
|
||||||
.await?
|
|
||||||
.name;
|
|
||||||
|
|
||||||
let all_game_versions =
|
|
||||||
models::categories::GameVersion::list(&mut **transaction, &redis).await?;
|
|
||||||
|
|
||||||
let mut error = None;
|
|
||||||
while let Some(item) = payload.next().await {
|
|
||||||
let mut field: Field = item?;
|
|
||||||
|
|
||||||
if error.is_some() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = async {
|
|
||||||
let content_disposition = field.content_disposition().clone();
|
|
||||||
let name = content_disposition.get_name().ok_or_else(|| {
|
|
||||||
CreateError::MissingValueError("Missing content name".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if name == "data" {
|
|
||||||
let mut data = Vec::new();
|
|
||||||
while let Some(chunk) = field.next().await {
|
|
||||||
data.extend_from_slice(&chunk?);
|
|
||||||
}
|
|
||||||
let file_data: InitialFileData = serde_json::from_slice(&data)?;
|
|
||||||
|
|
||||||
initial_file_data = Some(file_data);
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let file_data = initial_file_data.as_ref().ok_or_else(|| {
|
|
||||||
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut dependencies = version
|
|
||||||
.dependencies
|
|
||||||
.iter()
|
|
||||||
.map(|x| DependencyBuilder {
|
|
||||||
project_id: x.project_id,
|
|
||||||
version_id: x.version_id,
|
|
||||||
file_name: x.file_name.clone(),
|
|
||||||
dependency_type: x.dependency_type.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
upload_file(
|
|
||||||
&mut field,
|
|
||||||
file_host,
|
|
||||||
0,
|
|
||||||
uploaded_files,
|
|
||||||
&mut file_builders,
|
|
||||||
&mut dependencies,
|
|
||||||
&cdn_url,
|
|
||||||
&content_disposition,
|
|
||||||
project_id,
|
|
||||||
version_id.into(),
|
|
||||||
&project_type,
|
|
||||||
version.loaders.clone().into_iter().map(Loader).collect(),
|
|
||||||
version
|
|
||||||
.game_versions
|
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.map(GameVersion)
|
|
||||||
.collect(),
|
|
||||||
all_game_versions.clone(),
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
file_data.file_types.get(name).copied().flatten(),
|
|
||||||
transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
.await;
|
|
||||||
|
|
||||||
if result.is_err() {
|
|
||||||
error = result.err();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(error) = error {
|
|
||||||
return Err(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
if file_builders.is_empty() {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"At least one file must be specified".to_string(),
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
VersionFileBuilder::insert_many(file_builders, version_id, transaction).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear version cache
|
|
||||||
models::Version::clear_cache(&version, &redis).await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
}
|
|
||||||
|
|
||||||
// This function is used for adding a file to a version, uploading the initial
|
|
||||||
// files for a version, and for uploading the initial version files for a project
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub async fn upload_file(
|
|
||||||
field: &mut Field,
|
|
||||||
file_host: &dyn FileHost,
|
|
||||||
total_files_len: usize,
|
|
||||||
uploaded_files: &mut Vec<UploadedFile>,
|
|
||||||
version_files: &mut Vec<VersionFileBuilder>,
|
|
||||||
dependencies: &mut Vec<DependencyBuilder>,
|
|
||||||
cdn_url: &str,
|
|
||||||
content_disposition: &actix_web::http::header::ContentDisposition,
|
|
||||||
project_id: ProjectId,
|
|
||||||
version_id: VersionId,
|
|
||||||
project_type: &str,
|
|
||||||
loaders: Vec<Loader>,
|
|
||||||
game_versions: Vec<GameVersion>,
|
|
||||||
all_game_versions: Vec<models::categories::GameVersion>,
|
|
||||||
ignore_primary: bool,
|
|
||||||
force_primary: bool,
|
|
||||||
file_type: Option<FileType>,
|
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
|
||||||
) -> Result<(), CreateError> {
|
|
||||||
let (file_name, file_extension) = get_name_ext(content_disposition)?;
|
|
||||||
|
|
||||||
if file_name.contains('/') {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"File names must not contain slashes!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let content_type = crate::util::ext::project_file_type(file_extension)
|
|
||||||
.ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?;
|
|
||||||
|
|
||||||
let data = read_from_field(
|
|
||||||
field, 500 * (1 << 20),
|
|
||||||
"Project file exceeds the maximum of 500MiB. Contact a moderator or admin to request permission to upload larger files."
|
|
||||||
).await?;
|
|
||||||
|
|
||||||
let hash = sha1::Sha1::from(&data).hexdigest();
|
|
||||||
let exists = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT EXISTS(SELECT 1 FROM hashes h
|
|
||||||
INNER JOIN files f ON f.id = h.file_id
|
|
||||||
INNER JOIN versions v ON v.id = f.version_id
|
|
||||||
WHERE h.algorithm = $2 AND h.hash = $1 AND v.mod_id != $3)
|
|
||||||
",
|
|
||||||
hash.as_bytes(),
|
|
||||||
"sha1",
|
|
||||||
project_id.0 as i64
|
|
||||||
)
|
|
||||||
.fetch_one(&mut **transaction)
|
|
||||||
.await?
|
|
||||||
.exists
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
if exists {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"Duplicate files are not allowed to be uploaded to Modrinth!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let validation_result = validate_file(
|
|
||||||
data.clone().into(),
|
|
||||||
file_extension.to_string(),
|
|
||||||
project_type.to_string(),
|
|
||||||
loaders.clone(),
|
|
||||||
game_versions.clone(),
|
|
||||||
all_game_versions.clone(),
|
|
||||||
file_type,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
Ok(response)
|
||||||
if let ValidationResult::PassWithPackDataAndFiles {
|
|
||||||
ref format,
|
|
||||||
ref files,
|
|
||||||
} = validation_result
|
|
||||||
{
|
|
||||||
if dependencies.is_empty() {
|
|
||||||
let hashes: Vec<Vec<u8>> = format
|
|
||||||
.files
|
|
||||||
.iter()
|
|
||||||
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
|
|
||||||
.map(|x| x.as_bytes().to_vec())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let res = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
|
|
||||||
INNER JOIN files f on h.file_id = f.id
|
|
||||||
INNER JOIN versions v on f.version_id = v.id
|
|
||||||
WHERE h.algorithm = 'sha1' AND h.hash = ANY($1)
|
|
||||||
",
|
|
||||||
&*hashes
|
|
||||||
)
|
|
||||||
.fetch_all(&mut **transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
for file in &format.files {
|
|
||||||
if let Some(dep) = res.iter().find(|x| {
|
|
||||||
Some(&*x.hash) == file.hashes.get(&PackFileHash::Sha1).map(|x| x.as_bytes())
|
|
||||||
}) {
|
|
||||||
dependencies.push(DependencyBuilder {
|
|
||||||
project_id: Some(models::ProjectId(dep.project_id)),
|
|
||||||
version_id: Some(models::VersionId(dep.version_id)),
|
|
||||||
file_name: None,
|
|
||||||
dependency_type: DependencyType::Embedded.to_string(),
|
|
||||||
});
|
|
||||||
} else if let Some(first_download) = file.downloads.first() {
|
|
||||||
dependencies.push(DependencyBuilder {
|
|
||||||
project_id: None,
|
|
||||||
version_id: None,
|
|
||||||
file_name: Some(
|
|
||||||
first_download
|
|
||||||
.rsplit('/')
|
|
||||||
.next()
|
|
||||||
.unwrap_or(first_download)
|
|
||||||
.to_string(),
|
|
||||||
),
|
|
||||||
dependency_type: DependencyType::Embedded.to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for file in files {
|
|
||||||
if !file.is_empty() {
|
|
||||||
dependencies.push(DependencyBuilder {
|
|
||||||
project_id: None,
|
|
||||||
version_id: None,
|
|
||||||
file_name: Some(file.to_string()),
|
|
||||||
dependency_type: DependencyType::Embedded.to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let data = data.freeze();
|
|
||||||
|
|
||||||
let primary = (validation_result.is_passed()
|
|
||||||
&& version_files.iter().all(|x| !x.primary)
|
|
||||||
&& !ignore_primary)
|
|
||||||
|| force_primary
|
|
||||||
|| total_files_len == 1;
|
|
||||||
|
|
||||||
let file_path_encode = format!(
|
|
||||||
"data/{}/versions/{}/{}",
|
|
||||||
project_id,
|
|
||||||
version_id,
|
|
||||||
urlencoding::encode(file_name)
|
|
||||||
);
|
|
||||||
let file_path = format!("data/{}/versions/{}/{}", project_id, version_id, &file_name);
|
|
||||||
|
|
||||||
let upload_data = file_host
|
|
||||||
.upload_file(content_type, &file_path, data)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
uploaded_files.push(UploadedFile {
|
|
||||||
file_id: upload_data.file_id,
|
|
||||||
file_name: file_path,
|
|
||||||
});
|
|
||||||
|
|
||||||
let sha1_bytes = upload_data.content_sha1.into_bytes();
|
|
||||||
let sha512_bytes = upload_data.content_sha512.into_bytes();
|
|
||||||
|
|
||||||
if version_files.iter().any(|x| {
|
|
||||||
x.hashes
|
|
||||||
.iter()
|
|
||||||
.any(|y| y.hash == sha1_bytes || y.hash == sha512_bytes)
|
|
||||||
}) {
|
|
||||||
return Err(CreateError::InvalidInput(
|
|
||||||
"Duplicate files are not allowed to be uploaded to Modrinth!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let ValidationResult::Warning(msg) = validation_result {
|
|
||||||
if primary {
|
|
||||||
return Err(CreateError::InvalidInput(msg.to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
version_files.push(VersionFileBuilder {
|
|
||||||
filename: file_name.to_string(),
|
|
||||||
url: format!("{cdn_url}/{file_path_encode}"),
|
|
||||||
hashes: vec![
|
|
||||||
models::version_item::HashBuilder {
|
|
||||||
algorithm: "sha1".to_string(),
|
|
||||||
// This is an invalid cast - the database expects the hash's
|
|
||||||
// bytes, but this is the string version.
|
|
||||||
hash: sha1_bytes,
|
|
||||||
},
|
|
||||||
models::version_item::HashBuilder {
|
|
||||||
algorithm: "sha512".to_string(),
|
|
||||||
// This is an invalid cast - the database expects the hash's
|
|
||||||
// bytes, but this is the string version.
|
|
||||||
hash: sha512_bytes,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
primary,
|
|
||||||
size: upload_data.content_length,
|
|
||||||
file_type,
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_name_ext(
|
|
||||||
content_disposition: &actix_web::http::header::ContentDisposition,
|
|
||||||
) -> Result<(&str, &str), CreateError> {
|
|
||||||
let file_name = content_disposition
|
|
||||||
.get_filename()
|
|
||||||
.ok_or_else(|| CreateError::MissingValueError("Missing content file name".to_string()))?;
|
|
||||||
let file_extension = if let Some(last_period) = file_name.rfind('.') {
|
|
||||||
file_name.get((last_period + 1)..).unwrap_or("")
|
|
||||||
} else {
|
|
||||||
return Err(CreateError::MissingValueError(
|
|
||||||
"Missing content file extension".to_string(),
|
|
||||||
));
|
|
||||||
};
|
|
||||||
Ok((file_name, file_extension))
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,17 +1,11 @@
|
|||||||
use super::ApiError;
|
use super::ApiError;
|
||||||
use crate::auth::{
|
|
||||||
filter_authorized_projects, filter_authorized_versions, get_user_from_headers,
|
|
||||||
is_authorized_version,
|
|
||||||
};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::models::ids::VersionId;
|
use crate::models::projects::{Project, Version, VersionType};
|
||||||
use crate::models::pats::Scopes;
|
use crate::models::v2::projects::{LegacyProject, LegacyVersion};
|
||||||
use crate::models::projects::VersionType;
|
|
||||||
use crate::models::teams::ProjectPermissions;
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::{database, models};
|
use crate::routes::v3::version_file::{default_algorithm, HashQuery};
|
||||||
|
use crate::routes::{v2_reroute, v3};
|
||||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||||
use itertools::Itertools;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
@ -34,17 +28,6 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub struct HashQuery {
|
|
||||||
#[serde(default = "default_algorithm")]
|
|
||||||
pub algorithm: String,
|
|
||||||
pub version_id: Option<VersionId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_algorithm() -> String {
|
|
||||||
"sha1".into()
|
|
||||||
}
|
|
||||||
|
|
||||||
// under /api/v1/version_file/{hash}
|
// under /api/v1/version_file/{hash}
|
||||||
#[get("{version_id}")]
|
#[get("{version_id}")]
|
||||||
pub async fn get_version_from_hash(
|
pub async fn get_version_from_hash(
|
||||||
@ -55,46 +38,20 @@ pub async fn get_version_from_hash(
|
|||||||
hash_query: web::Query<HashQuery>,
|
hash_query: web::Query<HashQuery>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
let response =
|
||||||
&req,
|
v3::version_file::get_version_from_hash(req, info, pool, redis, hash_query, session_queue)
|
||||||
&**pool,
|
.await;
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
let hash = info.into_inner().0.to_lowercase();
|
|
||||||
let file = database::models::Version::get_file_from_hash(
|
|
||||||
hash_query.algorithm.clone(),
|
|
||||||
hash,
|
|
||||||
hash_query.version_id.map(|x| x.into()),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
if let Some(file) = file {
|
|
||||||
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
|
|
||||||
if let Some(version) = version {
|
|
||||||
if !is_authorized_version(&version.inner, &user_option, &pool).await? {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(models::projects::Version::from(version)))
|
// Convert response to V2 format
|
||||||
} else {
|
match v2_reroute::extract_ok_json::<Version>(response?).await {
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
Ok(version) => {
|
||||||
|
let v2_version = LegacyVersion::from(version);
|
||||||
|
Ok(HttpResponse::Ok().json(v2_version))
|
||||||
}
|
}
|
||||||
} else {
|
Err(response) => Ok(response),
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub struct DownloadRedirect {
|
|
||||||
pub url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
// under /api/v1/version_file/{hash}/download
|
// under /api/v1/version_file/{hash}/download
|
||||||
#[get("{version_id}/download")]
|
#[get("{version_id}/download")]
|
||||||
pub async fn download_version(
|
pub async fn download_version(
|
||||||
@ -105,44 +62,7 @@ pub async fn download_version(
|
|||||||
hash_query: web::Query<HashQuery>,
|
hash_query: web::Query<HashQuery>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
v3::version_file::download_version(req, info, pool, redis, hash_query, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let hash = info.into_inner().0.to_lowercase();
|
|
||||||
let file = database::models::Version::get_file_from_hash(
|
|
||||||
hash_query.algorithm.clone(),
|
|
||||||
hash,
|
|
||||||
hash_query.version_id.map(|x| x.into()),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if let Some(file) = file {
|
|
||||||
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(version) = version {
|
|
||||||
if !is_authorized_version(&version.inner, &user_option, &pool).await? {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::TemporaryRedirect()
|
|
||||||
.append_header(("Location", &*file.url))
|
|
||||||
.json(DownloadRedirect { url: file.url }))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// under /api/v1/version_file/{hash}
|
// under /api/v1/version_file/{hash}
|
||||||
@ -155,113 +75,10 @@ pub async fn delete_file(
|
|||||||
hash_query: web::Query<HashQuery>,
|
hash_query: web::Query<HashQuery>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::version_file::delete_file(req, info, pool, redis, hash_query, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::VERSION_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
let hash = info.into_inner().0.to_lowercase();
|
|
||||||
|
|
||||||
let file = database::models::Version::get_file_from_hash(
|
|
||||||
hash_query.algorithm.clone(),
|
|
||||||
hash,
|
|
||||||
hash_query.version_id.map(|x| x.into()),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if let Some(row) = file {
|
|
||||||
if !user.role.is_admin() {
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id_version(
|
|
||||||
row.version_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(ApiError::Database)?;
|
|
||||||
|
|
||||||
let organization =
|
|
||||||
database::models::Organization::get_associated_organization_project_id(
|
|
||||||
row.project_id,
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(ApiError::Database)?;
|
|
||||||
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
database::models::TeamMember::get_from_user_id_organization(
|
|
||||||
organization.id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(ApiError::Database)?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
&user.role,
|
|
||||||
&team_member,
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(ProjectPermissions::DELETE_VERSION) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to delete this file!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let version = database::models::Version::get(row.version_id, &**pool, &redis).await?;
|
|
||||||
if let Some(version) = version {
|
|
||||||
if version.files.len() < 2 {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Versions must have at least one file uploaded to them".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
database::models::Version::clear_cache(&version, &redis).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
DELETE FROM hashes
|
|
||||||
WHERE file_id = $1
|
|
||||||
",
|
|
||||||
row.id.0
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
DELETE FROM files
|
|
||||||
WHERE files.id = $1
|
|
||||||
",
|
|
||||||
row.id.0,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct UpdateData {
|
pub struct UpdateData {
|
||||||
pub loaders: Option<Vec<String>>,
|
pub loaders: Option<Vec<String>>,
|
||||||
pub game_versions: Option<Vec<String>>,
|
pub game_versions: Option<Vec<String>>,
|
||||||
@ -278,65 +95,40 @@ pub async fn get_update_from_hash(
|
|||||||
update_data: web::Json<UpdateData>,
|
update_data: web::Json<UpdateData>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
let update_data = update_data.into_inner();
|
||||||
&req,
|
let mut loader_fields = HashMap::new();
|
||||||
&**pool,
|
let mut game_versions = vec![];
|
||||||
&redis,
|
for gv in update_data.game_versions.into_iter().flatten() {
|
||||||
&session_queue,
|
game_versions.push(serde_json::json!(gv.clone()));
|
||||||
Some(&[Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
let hash = info.into_inner().0.to_lowercase();
|
|
||||||
|
|
||||||
if let Some(file) = database::models::Version::get_file_from_hash(
|
|
||||||
hash_query.algorithm.clone(),
|
|
||||||
hash,
|
|
||||||
hash_query.version_id.map(|x| x.into()),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
if let Some(project) =
|
|
||||||
database::models::Project::get_id(file.project_id, &**pool, &redis).await?
|
|
||||||
{
|
|
||||||
let mut versions =
|
|
||||||
database::models::Version::get_many(&project.versions, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| {
|
|
||||||
let mut bool = true;
|
|
||||||
|
|
||||||
if let Some(version_types) = &update_data.version_types {
|
|
||||||
bool &= version_types
|
|
||||||
.iter()
|
|
||||||
.any(|y| y.as_str() == x.inner.version_type);
|
|
||||||
}
|
|
||||||
if let Some(loaders) = &update_data.loaders {
|
|
||||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
|
||||||
}
|
|
||||||
if let Some(game_versions) = &update_data.game_versions {
|
|
||||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
})
|
|
||||||
.sorted()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if let Some(first) = versions.pop() {
|
|
||||||
if !is_authorized_version(&first.inner, &user_option, &pool).await? {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(HttpResponse::Ok().json(models::projects::Version::from(first)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if !game_versions.is_empty() {
|
||||||
|
loader_fields.insert("game_versions".to_string(), game_versions);
|
||||||
|
}
|
||||||
|
let update_data = v3::version_file::UpdateData {
|
||||||
|
loaders: update_data.loaders.clone(),
|
||||||
|
version_types: update_data.version_types.clone(),
|
||||||
|
loader_fields: Some(loader_fields),
|
||||||
|
};
|
||||||
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
let response = v3::version_file::get_update_from_hash(
|
||||||
|
req,
|
||||||
|
info,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
|
hash_query,
|
||||||
|
web::Json(update_data),
|
||||||
|
session_queue,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Convert response to V2 format
|
||||||
|
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||||
|
Ok(version) => {
|
||||||
|
let v2_version = LegacyVersion::from(version);
|
||||||
|
Ok(HttpResponse::Ok().json(v2_version))
|
||||||
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Requests above with multiple versions below
|
// Requests above with multiple versions below
|
||||||
@ -356,44 +148,34 @@ pub async fn get_versions_from_hashes(
|
|||||||
file_data: web::Json<FileHashes>,
|
file_data: web::Json<FileHashes>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
let file_data = file_data.into_inner();
|
||||||
&req,
|
let file_data = v3::version_file::FileHashes {
|
||||||
&**pool,
|
algorithm: file_data.algorithm,
|
||||||
&redis,
|
hashes: file_data.hashes,
|
||||||
&session_queue,
|
};
|
||||||
Some(&[Scopes::VERSION_READ]),
|
let response = v3::version_file::get_versions_from_hashes(
|
||||||
)
|
req,
|
||||||
.await
|
pool,
|
||||||
.map(|x| x.1)
|
redis,
|
||||||
.ok();
|
web::Json(file_data),
|
||||||
|
session_queue,
|
||||||
let files = database::models::Version::get_files_from_hash(
|
|
||||||
file_data.algorithm.clone(),
|
|
||||||
&file_data.hashes,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let version_ids = files.iter().map(|x| x.version_id).collect::<Vec<_>>();
|
// Convert to V2
|
||||||
let versions_data = filter_authorized_versions(
|
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
|
||||||
database::models::Version::get_many(&version_ids, &**pool, &redis).await?,
|
Ok(versions) => {
|
||||||
&user_option,
|
let v2_versions = versions
|
||||||
&pool,
|
.into_iter()
|
||||||
)
|
.map(|(hash, version)| {
|
||||||
.await?;
|
let v2_version = LegacyVersion::from(version);
|
||||||
|
(hash, v2_version)
|
||||||
let mut response = HashMap::new();
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
for version in versions_data {
|
Ok(HttpResponse::Ok().json(v2_versions))
|
||||||
for file in files.iter().filter(|x| x.version_id == version.id.into()) {
|
|
||||||
if let Some(hash) = file.hashes.get(&file_data.algorithm) {
|
|
||||||
response.insert(hash.clone(), version.clone());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("project")]
|
#[post("project")]
|
||||||
@ -404,45 +186,46 @@ pub async fn get_projects_from_hashes(
|
|||||||
file_data: web::Json<FileHashes>,
|
file_data: web::Json<FileHashes>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
let file_data = file_data.into_inner();
|
||||||
&req,
|
let file_data = v3::version_file::FileHashes {
|
||||||
&**pool,
|
algorithm: file_data.algorithm,
|
||||||
&redis,
|
hashes: file_data.hashes,
|
||||||
&session_queue,
|
};
|
||||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
let response = v3::version_file::get_projects_from_hashes(
|
||||||
)
|
req,
|
||||||
.await
|
pool.clone(),
|
||||||
.map(|x| x.1)
|
redis.clone(),
|
||||||
.ok();
|
web::Json(file_data),
|
||||||
|
session_queue,
|
||||||
let files = database::models::Version::get_files_from_hash(
|
|
||||||
file_data.algorithm.clone(),
|
|
||||||
&file_data.hashes,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let project_ids = files.iter().map(|x| x.project_id).collect::<Vec<_>>();
|
// Convert to V2
|
||||||
|
match v2_reroute::extract_ok_json::<HashMap<String, Project>>(response).await {
|
||||||
|
Ok(projects_hashes) => {
|
||||||
|
let hash_to_project_id = projects_hashes
|
||||||
|
.iter()
|
||||||
|
.map(|(hash, project)| {
|
||||||
|
let project_id = project.id;
|
||||||
|
(hash.clone(), project_id)
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
let legacy_projects =
|
||||||
|
LegacyProject::from_many(projects_hashes.into_values().collect(), &**pool, &redis)
|
||||||
|
.await?;
|
||||||
|
let legacy_projects_hashes = hash_to_project_id
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(hash, project_id)| {
|
||||||
|
let legacy_project =
|
||||||
|
legacy_projects.iter().find(|x| x.id == project_id)?.clone();
|
||||||
|
Some((hash, legacy_project))
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let projects_data = filter_authorized_projects(
|
Ok(HttpResponse::Ok().json(legacy_projects_hashes))
|
||||||
database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?,
|
|
||||||
&user_option,
|
|
||||||
&pool,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut response = HashMap::new();
|
|
||||||
|
|
||||||
for project in projects_data {
|
|
||||||
for file in files.iter().filter(|x| x.project_id == project.id.into()) {
|
|
||||||
if let Some(hash) = file.hashes.get(&file_data.algorithm) {
|
|
||||||
response.insert(hash.clone(), project.clone());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -463,85 +246,44 @@ pub async fn update_files(
|
|||||||
update_data: web::Json<ManyUpdateData>,
|
update_data: web::Json<ManyUpdateData>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
let update_data = update_data.into_inner();
|
||||||
&req,
|
let mut loader_fields = HashMap::new();
|
||||||
&**pool,
|
let mut game_versions = vec![];
|
||||||
&redis,
|
for gv in update_data.game_versions.into_iter().flatten() {
|
||||||
&session_queue,
|
game_versions.push(serde_json::json!(gv.clone()));
|
||||||
Some(&[Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let files = database::models::Version::get_files_from_hash(
|
|
||||||
update_data.algorithm.clone(),
|
|
||||||
&update_data.hashes,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let projects = database::models::Project::get_many_ids(
|
|
||||||
&files.iter().map(|x| x.project_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let all_versions = database::models::Version::get_many(
|
|
||||||
&projects
|
|
||||||
.iter()
|
|
||||||
.flat_map(|x| x.versions.clone())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut response = HashMap::new();
|
|
||||||
|
|
||||||
for project in projects {
|
|
||||||
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
|
|
||||||
let version = all_versions
|
|
||||||
.iter()
|
|
||||||
.filter(|x| x.inner.project_id == file.project_id)
|
|
||||||
.filter(|x| {
|
|
||||||
let mut bool = true;
|
|
||||||
|
|
||||||
if let Some(version_types) = &update_data.version_types {
|
|
||||||
bool &= version_types
|
|
||||||
.iter()
|
|
||||||
.any(|y| y.as_str() == x.inner.version_type);
|
|
||||||
}
|
|
||||||
if let Some(loaders) = &update_data.loaders {
|
|
||||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
|
||||||
}
|
|
||||||
if let Some(game_versions) = &update_data.game_versions {
|
|
||||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
})
|
|
||||||
.sorted()
|
|
||||||
.next();
|
|
||||||
|
|
||||||
if let Some(version) = version {
|
|
||||||
if is_authorized_version(&version.inner, &user_option, &pool).await? {
|
|
||||||
if let Some(hash) = file.hashes.get(&update_data.algorithm) {
|
|
||||||
response.insert(
|
|
||||||
hash.clone(),
|
|
||||||
models::projects::Version::from(version.clone()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if !game_versions.is_empty() {
|
||||||
|
loader_fields.insert("game_versions".to_string(), game_versions);
|
||||||
|
}
|
||||||
|
let update_data = v3::version_file::ManyUpdateData {
|
||||||
|
loaders: update_data.loaders.clone(),
|
||||||
|
version_types: update_data.version_types.clone(),
|
||||||
|
loader_fields: Some(loader_fields),
|
||||||
|
algorithm: update_data.algorithm,
|
||||||
|
hashes: update_data.hashes,
|
||||||
|
};
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
let response =
|
||||||
|
v3::version_file::update_files(req, pool, redis, web::Json(update_data), session_queue)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Convert response to V2 format
|
||||||
|
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
|
||||||
|
Ok(returned_versions) => {
|
||||||
|
let v3_versions = returned_versions
|
||||||
|
.into_iter()
|
||||||
|
.map(|(hash, version)| {
|
||||||
|
let v2_version = LegacyVersion::from(version);
|
||||||
|
(hash, v2_version)
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
Ok(HttpResponse::Ok().json(v3_versions))
|
||||||
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct FileUpdateData {
|
pub struct FileUpdateData {
|
||||||
pub hash: String,
|
pub hash: String,
|
||||||
pub loaders: Option<Vec<String>>,
|
pub loaders: Option<Vec<String>>,
|
||||||
@ -564,86 +306,52 @@ pub async fn update_individual_files(
|
|||||||
update_data: web::Json<ManyFileUpdateData>,
|
update_data: web::Json<ManyFileUpdateData>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user_option = get_user_from_headers(
|
let update_data = update_data.into_inner();
|
||||||
&req,
|
let update_data = v3::version_file::ManyFileUpdateData {
|
||||||
&**pool,
|
algorithm: update_data.algorithm,
|
||||||
&redis,
|
hashes: update_data
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let files = database::models::Version::get_files_from_hash(
|
|
||||||
update_data.algorithm.clone(),
|
|
||||||
&update_data
|
|
||||||
.hashes
|
.hashes
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|x| x.hash.clone())
|
.map(|x| {
|
||||||
.collect::<Vec<_>>(),
|
let mut loader_fields = HashMap::new();
|
||||||
&**pool,
|
let mut game_versions = vec![];
|
||||||
&redis,
|
for gv in x.game_versions.into_iter().flatten() {
|
||||||
)
|
game_versions.push(serde_json::json!(gv.clone()));
|
||||||
.await?;
|
|
||||||
|
|
||||||
let projects = database::models::Project::get_many_ids(
|
|
||||||
&files.iter().map(|x| x.project_id).collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let all_versions = database::models::Version::get_many(
|
|
||||||
&projects
|
|
||||||
.iter()
|
|
||||||
.flat_map(|x| x.versions.clone())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut response = HashMap::new();
|
|
||||||
|
|
||||||
for project in projects {
|
|
||||||
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
|
|
||||||
if let Some(hash) = file.hashes.get(&update_data.algorithm) {
|
|
||||||
if let Some(query_file) = update_data.hashes.iter().find(|x| &x.hash == hash) {
|
|
||||||
let version = all_versions
|
|
||||||
.iter()
|
|
||||||
.filter(|x| x.inner.project_id == file.project_id)
|
|
||||||
.filter(|x| {
|
|
||||||
let mut bool = true;
|
|
||||||
|
|
||||||
if let Some(version_types) = &query_file.version_types {
|
|
||||||
bool &= version_types
|
|
||||||
.iter()
|
|
||||||
.any(|y| y.as_str() == x.inner.version_type);
|
|
||||||
}
|
|
||||||
if let Some(loaders) = &query_file.loaders {
|
|
||||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
|
||||||
}
|
|
||||||
if let Some(game_versions) = &query_file.game_versions {
|
|
||||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
})
|
|
||||||
.sorted()
|
|
||||||
.next();
|
|
||||||
|
|
||||||
if let Some(version) = version {
|
|
||||||
if is_authorized_version(&version.inner, &user_option, &pool).await? {
|
|
||||||
response.insert(
|
|
||||||
hash.clone(),
|
|
||||||
models::projects::Version::from(version.clone()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
if !game_versions.is_empty() {
|
||||||
}
|
loader_fields.insert("game_versions".to_string(), game_versions);
|
||||||
}
|
}
|
||||||
|
v3::version_file::FileUpdateData {
|
||||||
|
hash: x.hash.clone(),
|
||||||
|
loaders: x.loaders.clone(),
|
||||||
|
loader_fields: Some(loader_fields),
|
||||||
|
version_types: x.version_types,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
let response = v3::version_file::update_individual_files(
|
||||||
|
req,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
|
web::Json(update_data),
|
||||||
|
session_queue,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Convert response to V2 format
|
||||||
|
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
|
||||||
|
Ok(returned_versions) => {
|
||||||
|
let v3_versions = returned_versions
|
||||||
|
.into_iter()
|
||||||
|
.map(|(hash, version)| {
|
||||||
|
let v2_version = LegacyVersion::from(version);
|
||||||
|
(hash, v2_version)
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
Ok(HttpResponse::Ok().json(v3_versions))
|
||||||
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,20 +1,13 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use super::ApiError;
|
use super::ApiError;
|
||||||
use crate::auth::{
|
|
||||||
filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version,
|
|
||||||
};
|
|
||||||
use crate::database;
|
|
||||||
use crate::database::models::version_item::{DependencyBuilder, LoaderVersion, VersionVersion};
|
|
||||||
use crate::database::models::{image_item, Organization};
|
|
||||||
use crate::database::redis::RedisPool;
|
use crate::database::redis::RedisPool;
|
||||||
use crate::models;
|
use crate::models;
|
||||||
use crate::models::ids::base62_impl::parse_base62;
|
use crate::models::ids::VersionId;
|
||||||
use crate::models::images::ImageContext;
|
use crate::models::projects::{Dependency, FileType, Version, VersionStatus, VersionType};
|
||||||
use crate::models::pats::Scopes;
|
use crate::models::v2::projects::LegacyVersion;
|
||||||
use crate::models::projects::{Dependency, FileType, VersionStatus, VersionType};
|
|
||||||
use crate::models::teams::ProjectPermissions;
|
|
||||||
use crate::queue::session::AuthQueue;
|
use crate::queue::session::AuthQueue;
|
||||||
use crate::util::img;
|
use crate::routes::{v2_reroute, v3};
|
||||||
use crate::util::validate::validation_errors_to_string;
|
|
||||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -54,115 +47,46 @@ pub async fn version_list(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let string = info.into_inner().0;
|
let loader_fields = if let Some(game_versions) = filters.game_versions {
|
||||||
|
// TODO: extract this logic which is similar to the other v2->v3 version_file functions
|
||||||
let result = database::models::Project::get(&string, &**pool, &redis).await?;
|
let mut loader_fields = HashMap::new();
|
||||||
|
serde_json::from_str::<Vec<String>>(&game_versions)
|
||||||
let user_option = get_user_from_headers(
|
.ok()
|
||||||
&req,
|
.and_then(|versions| {
|
||||||
&**pool,
|
let mut game_versions: Vec<serde_json::Value> = vec![];
|
||||||
&redis,
|
for gv in versions {
|
||||||
&session_queue,
|
game_versions.push(serde_json::json!(gv.clone()));
|
||||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
if let Some(project) = result {
|
|
||||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let version_filters = filters
|
|
||||||
.game_versions
|
|
||||||
.as_ref()
|
|
||||||
.map(|x| serde_json::from_str::<Vec<String>>(x).unwrap_or_default());
|
|
||||||
let loader_filters = filters
|
|
||||||
.loaders
|
|
||||||
.as_ref()
|
|
||||||
.map(|x| serde_json::from_str::<Vec<String>>(x).unwrap_or_default());
|
|
||||||
let mut versions = database::models::Version::get_many(&project.versions, &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.skip(filters.offset.unwrap_or(0))
|
|
||||||
.take(filters.limit.unwrap_or(usize::MAX))
|
|
||||||
.filter(|x| {
|
|
||||||
let mut bool = true;
|
|
||||||
|
|
||||||
if let Some(version_type) = filters.version_type {
|
|
||||||
bool &= &*x.inner.version_type == version_type.as_str();
|
|
||||||
}
|
}
|
||||||
if let Some(loaders) = &loader_filters {
|
loader_fields.insert("game_versions".to_string(), game_versions);
|
||||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
serde_json::to_string(&loader_fields).ok()
|
||||||
}
|
|
||||||
if let Some(game_versions) = &version_filters {
|
|
||||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let mut response = versions
|
let filters = v3::versions::VersionListFilters {
|
||||||
.iter()
|
loader_fields,
|
||||||
.filter(|version| {
|
loaders: filters.loaders,
|
||||||
filters
|
featured: filters.featured,
|
||||||
.featured
|
version_type: filters.version_type,
|
||||||
.map(|featured| featured == version.inner.featured)
|
limit: filters.limit,
|
||||||
.unwrap_or(true)
|
offset: filters.offset,
|
||||||
})
|
};
|
||||||
.cloned()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
versions.sort();
|
let response =
|
||||||
|
v3::versions::version_list(req, info, web::Query(filters), pool, redis, session_queue)
|
||||||
// Attempt to populate versions with "auto featured" versions
|
|
||||||
if response.is_empty() && !versions.is_empty() && filters.featured.unwrap_or(false) {
|
|
||||||
let (loaders, game_versions) = futures::future::try_join(
|
|
||||||
database::models::categories::Loader::list(&**pool, &redis),
|
|
||||||
database::models::categories::GameVersion::list_filter(
|
|
||||||
None,
|
|
||||||
Some(true),
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let mut joined_filters = Vec::new();
|
// Convert response to V2 format
|
||||||
for game_version in &game_versions {
|
match v2_reroute::extract_ok_json::<Vec<Version>>(response).await {
|
||||||
for loader in &loaders {
|
Ok(versions) => {
|
||||||
joined_filters.push((game_version, loader))
|
let v2_versions = versions
|
||||||
}
|
.into_iter()
|
||||||
}
|
.map(LegacyVersion::from)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
joined_filters.into_iter().for_each(|filter| {
|
Ok(HttpResponse::Ok().json(v2_versions))
|
||||||
versions
|
|
||||||
.iter()
|
|
||||||
.find(|version| {
|
|
||||||
version.game_versions.contains(&filter.0.version)
|
|
||||||
&& version.loaders.contains(&filter.1.loader)
|
|
||||||
})
|
|
||||||
.map(|version| response.push(version.clone()))
|
|
||||||
.unwrap_or(());
|
|
||||||
});
|
|
||||||
|
|
||||||
if response.is_empty() {
|
|
||||||
versions
|
|
||||||
.into_iter()
|
|
||||||
.for_each(|version| response.push(version));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
response.sort();
|
|
||||||
response.dedup_by(|a, b| a.inner.id == b.inner.id);
|
|
||||||
|
|
||||||
let response = filter_authorized_versions(response, &user_option, &pool).await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,41 +100,16 @@ pub async fn version_project_get(
|
|||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let id = info.into_inner();
|
let id = info.into_inner();
|
||||||
|
let response =
|
||||||
let result = database::models::Project::get(&id.0, &**pool, &redis).await?;
|
v3::versions::version_project_get_helper(req, id, pool, redis, session_queue).await?;
|
||||||
|
// Convert response to V2 format
|
||||||
let user_option = get_user_from_headers(
|
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||||
&req,
|
Ok(version) => {
|
||||||
&**pool,
|
let v2_version = LegacyVersion::from(version);
|
||||||
&redis,
|
Ok(HttpResponse::Ok().json(v2_version))
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
if let Some(project) = result {
|
|
||||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
|
||||||
return Ok(HttpResponse::NotFound().body(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let versions =
|
|
||||||
database::models::Version::get_many(&project.versions, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
let id_opt = parse_base62(&id.1).ok();
|
|
||||||
let version = versions
|
|
||||||
.into_iter()
|
|
||||||
.find(|x| Some(x.inner.id.0 as u64) == id_opt || x.inner.version_number == id.1);
|
|
||||||
|
|
||||||
if let Some(version) = version {
|
|
||||||
if is_authorized_version(&version.inner, &user_option, &pool).await? {
|
|
||||||
return Ok(HttpResponse::Ok().json(models::projects::Version::from(version)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@ -226,26 +125,21 @@ pub async fn versions_get(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let version_ids = serde_json::from_str::<Vec<models::ids::VersionId>>(&ids.ids)?
|
let ids = v3::versions::VersionIds { ids: ids.ids };
|
||||||
.into_iter()
|
let response =
|
||||||
.map(|x| x.into())
|
v3::versions::versions_get(req, web::Query(ids), pool, redis, session_queue).await?;
|
||||||
.collect::<Vec<database::models::VersionId>>();
|
|
||||||
let versions_data = database::models::Version::get_many(&version_ids, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
let user_option = get_user_from_headers(
|
// Convert response to V2 format
|
||||||
&req,
|
match v2_reroute::extract_ok_json::<Vec<Version>>(response).await {
|
||||||
&**pool,
|
Ok(versions) => {
|
||||||
&redis,
|
let v2_versions = versions
|
||||||
&session_queue,
|
.into_iter()
|
||||||
Some(&[Scopes::VERSION_READ]),
|
.map(LegacyVersion::from)
|
||||||
)
|
.collect::<Vec<_>>();
|
||||||
.await
|
Ok(HttpResponse::Ok().json(v2_versions))
|
||||||
.map(|x| x.1)
|
}
|
||||||
.ok();
|
Err(response) => Ok(response),
|
||||||
|
}
|
||||||
let versions = filter_authorized_versions(versions_data, &user_option, &pool).await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(versions))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("{version_id}")]
|
#[get("{version_id}")]
|
||||||
@ -257,26 +151,15 @@ pub async fn version_get(
|
|||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let id = info.into_inner().0;
|
let id = info.into_inner().0;
|
||||||
let version_data = database::models::Version::get(id.into(), &**pool, &redis).await?;
|
let response = v3::versions::version_get_helper(req, id, pool, redis, session_queue).await?;
|
||||||
|
// Convert response to V2 format
|
||||||
let user_option = get_user_from_headers(
|
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||||
&req,
|
Ok(version) => {
|
||||||
&**pool,
|
let v2_version = LegacyVersion::from(version);
|
||||||
&redis,
|
Ok(HttpResponse::Ok().json(v2_version))
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::VERSION_READ]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map(|x| x.1)
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
if let Some(data) = version_data {
|
|
||||||
if is_authorized_version(&data.inner, &user_option, &pool).await? {
|
|
||||||
return Ok(HttpResponse::Ok().json(models::projects::Version::from(data)));
|
|
||||||
}
|
}
|
||||||
|
Err(response) => Ok(response),
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Validate)]
|
#[derive(Serialize, Deserialize, Validate)]
|
||||||
@ -299,7 +182,7 @@ pub struct EditVersion {
|
|||||||
custom(function = "crate::util::validate::validate_deps")
|
custom(function = "crate::util::validate::validate_deps")
|
||||||
)]
|
)]
|
||||||
pub dependencies: Option<Vec<Dependency>>,
|
pub dependencies: Option<Vec<Dependency>>,
|
||||||
pub game_versions: Option<Vec<models::projects::GameVersion>>,
|
pub game_versions: Option<Vec<String>>,
|
||||||
pub loaders: Option<Vec<models::projects::Loader>>,
|
pub loaders: Option<Vec<models::projects::Loader>>,
|
||||||
pub featured: Option<bool>,
|
pub featured: Option<bool>,
|
||||||
pub primary_file: Option<(String, String)>,
|
pub primary_file: Option<(String, String)>,
|
||||||
@ -319,415 +202,56 @@ pub struct EditVersionFileType {
|
|||||||
#[patch("{id}")]
|
#[patch("{id}")]
|
||||||
pub async fn version_edit(
|
pub async fn version_edit(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
info: web::Path<(models::ids::VersionId,)>,
|
info: web::Path<(VersionId,)>,
|
||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
new_version: web::Json<EditVersion>,
|
new_version: web::Json<EditVersion>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
let new_version = new_version.into_inner();
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::VERSION_WRITE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
|
|
||||||
new_version
|
let mut fields = HashMap::new();
|
||||||
.validate()
|
if new_version.game_versions.is_some() {
|
||||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
fields.insert(
|
||||||
|
"game_versions".to_string(),
|
||||||
let version_id = info.into_inner().0;
|
serde_json::json!(new_version.game_versions),
|
||||||
let id = version_id.into();
|
|
||||||
|
|
||||||
let result = database::models::Version::get(id, &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(version_item) = result {
|
|
||||||
let project_item =
|
|
||||||
database::models::Project::get_id(version_item.inner.project_id, &**pool, &redis)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
|
||||||
version_item.inner.project_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let organization = Organization::get_associated_organization_project_id(
|
|
||||||
version_item.inner.project_id,
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
database::models::TeamMember::get_from_user_id(
|
|
||||||
organization.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
&user.role,
|
|
||||||
&team_member,
|
|
||||||
&organization_team_member,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(perms) = permissions {
|
|
||||||
if !perms.contains(ProjectPermissions::UPLOAD_VERSION) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have the permissions to edit this version!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
|
|
||||||
if let Some(name) = &new_version.name {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET name = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
name.trim(),
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(number) = &new_version.version_number {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET version_number = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
number,
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(version_type) = &new_version.version_type {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET version_type = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
version_type.as_str(),
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(dependencies) = &new_version.dependencies {
|
|
||||||
if let Some(project) = project_item {
|
|
||||||
if project.project_type != "modpack" {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
DELETE FROM dependencies WHERE dependent_id = $1
|
|
||||||
",
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let builders = dependencies
|
|
||||||
.iter()
|
|
||||||
.map(|x| database::models::version_item::DependencyBuilder {
|
|
||||||
project_id: x.project_id.map(|x| x.into()),
|
|
||||||
version_id: x.version_id.map(|x| x.into()),
|
|
||||||
file_name: x.file_name.clone(),
|
|
||||||
dependency_type: x.dependency_type.to_string(),
|
|
||||||
})
|
|
||||||
.collect::<Vec<database::models::version_item::DependencyBuilder>>();
|
|
||||||
|
|
||||||
DependencyBuilder::insert_many(
|
|
||||||
builders,
|
|
||||||
version_item.inner.id,
|
|
||||||
&mut transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(game_versions) = &new_version.game_versions {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
DELETE FROM game_versions_versions WHERE joining_version_id = $1
|
|
||||||
",
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut version_versions = Vec::new();
|
|
||||||
for game_version in game_versions {
|
|
||||||
let game_version_id = database::models::categories::GameVersion::get_id(
|
|
||||||
&game_version.0,
|
|
||||||
&mut *transaction,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(
|
|
||||||
"No database entry for game version provided.".to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
version_versions.push(VersionVersion::new(game_version_id, id));
|
|
||||||
}
|
|
||||||
VersionVersion::insert_many(version_versions, &mut transaction).await?;
|
|
||||||
|
|
||||||
database::models::Project::update_game_versions(
|
|
||||||
version_item.inner.project_id,
|
|
||||||
&mut transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(loaders) = &new_version.loaders {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
DELETE FROM loaders_versions WHERE version_id = $1
|
|
||||||
",
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut loader_versions = Vec::new();
|
|
||||||
for loader in loaders {
|
|
||||||
let loader_id =
|
|
||||||
database::models::categories::Loader::get_id(&loader.0, &mut *transaction)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(
|
|
||||||
"No database entry for loader provided.".to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
loader_versions.push(LoaderVersion::new(loader_id, id));
|
|
||||||
}
|
|
||||||
LoaderVersion::insert_many(loader_versions, &mut transaction).await?;
|
|
||||||
|
|
||||||
database::models::Project::update_loaders(
|
|
||||||
version_item.inner.project_id,
|
|
||||||
&mut transaction,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(featured) = &new_version.featured {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET featured = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
featured,
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(primary_file) = &new_version.primary_file {
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT f.id id FROM hashes h
|
|
||||||
INNER JOIN files f ON h.file_id = f.id
|
|
||||||
WHERE h.algorithm = $2 AND h.hash = $1
|
|
||||||
",
|
|
||||||
primary_file.1.as_bytes(),
|
|
||||||
primary_file.0
|
|
||||||
)
|
|
||||||
.fetch_optional(&**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(format!(
|
|
||||||
"Specified file with hash {} does not exist.",
|
|
||||||
primary_file.1.clone()
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE files
|
|
||||||
SET is_primary = FALSE
|
|
||||||
WHERE (version_id = $1)
|
|
||||||
",
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE files
|
|
||||||
SET is_primary = TRUE
|
|
||||||
WHERE (id = $1)
|
|
||||||
",
|
|
||||||
result.id,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(body) = &new_version.changelog {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET changelog = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
body,
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(downloads) = &new_version.downloads {
|
|
||||||
if !user.role.is_mod() {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You don't have permission to set the downloads of this mod".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET downloads = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
*downloads as i32,
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let diff = *downloads - (version_item.inner.downloads as u32);
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE mods
|
|
||||||
SET downloads = downloads + $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
diff as i32,
|
|
||||||
version_item.inner.project_id as database::models::ids::ProjectId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(status) = &new_version.status {
|
|
||||||
if !status.can_be_requested() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"The requested status cannot be set!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET status = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
status.as_str(),
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(file_types) = &new_version.file_types {
|
|
||||||
for file_type in file_types {
|
|
||||||
let result = sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT f.id id FROM hashes h
|
|
||||||
INNER JOIN files f ON h.file_id = f.id
|
|
||||||
WHERE h.algorithm = $2 AND h.hash = $1
|
|
||||||
",
|
|
||||||
file_type.hash.as_bytes(),
|
|
||||||
file_type.algorithm
|
|
||||||
)
|
|
||||||
.fetch_optional(&**pool)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput(format!(
|
|
||||||
"Specified file with hash {} does not exist.",
|
|
||||||
file_type.algorithm.clone()
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE files
|
|
||||||
SET file_type = $2
|
|
||||||
WHERE (id = $1)
|
|
||||||
",
|
|
||||||
result.id,
|
|
||||||
file_type.file_type.as_ref().map(|x| x.as_str()),
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ordering) = &new_version.ordering {
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET ordering = $1
|
|
||||||
WHERE (id = $2)
|
|
||||||
",
|
|
||||||
ordering.to_owned() as Option<i32>,
|
|
||||||
id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// delete any images no longer in the changelog
|
|
||||||
let checkable_strings: Vec<&str> = vec![&new_version.changelog]
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|x| x.as_ref().map(|y| y.as_str()))
|
|
||||||
.collect();
|
|
||||||
let context = ImageContext::Version {
|
|
||||||
version_id: Some(version_item.inner.id.into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?;
|
|
||||||
|
|
||||||
database::models::Version::clear_cache(&version_item, &redis).await?;
|
|
||||||
database::models::Project::clear_cache(
|
|
||||||
version_item.inner.project_id,
|
|
||||||
None,
|
|
||||||
Some(true),
|
|
||||||
&redis,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
transaction.commit().await?;
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to edit this version!".to_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let new_version = v3::versions::EditVersion {
|
||||||
|
name: new_version.name,
|
||||||
|
version_number: new_version.version_number,
|
||||||
|
changelog: new_version.changelog,
|
||||||
|
version_type: new_version.version_type,
|
||||||
|
dependencies: new_version.dependencies,
|
||||||
|
loaders: new_version.loaders,
|
||||||
|
featured: new_version.featured,
|
||||||
|
primary_file: new_version.primary_file,
|
||||||
|
downloads: new_version.downloads,
|
||||||
|
status: new_version.status,
|
||||||
|
file_types: new_version.file_types.map(|v| {
|
||||||
|
v.into_iter()
|
||||||
|
.map(|evft| v3::versions::EditVersionFileType {
|
||||||
|
algorithm: evft.algorithm,
|
||||||
|
hash: evft.hash,
|
||||||
|
file_type: evft.file_type,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}),
|
||||||
|
ordering: new_version.ordering,
|
||||||
|
fields,
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = v3::versions::version_edit(
|
||||||
|
req,
|
||||||
|
info,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
|
web::Json(serde_json::to_value(new_version)?),
|
||||||
|
session_queue,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -745,92 +269,18 @@ pub async fn version_schedule(
|
|||||||
scheduling_data: web::Json<SchedulingData>,
|
scheduling_data: web::Json<SchedulingData>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::versions::version_schedule(
|
||||||
&req,
|
req,
|
||||||
&**pool,
|
info,
|
||||||
&redis,
|
pool,
|
||||||
&session_queue,
|
redis,
|
||||||
Some(&[Scopes::VERSION_WRITE]),
|
web::Json(v3::versions::SchedulingData {
|
||||||
|
time: scheduling_data.time,
|
||||||
|
requested_status: scheduling_data.requested_status,
|
||||||
|
}),
|
||||||
|
session_queue,
|
||||||
)
|
)
|
||||||
.await?
|
.await
|
||||||
.1;
|
|
||||||
|
|
||||||
if scheduling_data.time < Utc::now() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"You cannot schedule a version to be released in the past!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if !scheduling_data.requested_status.can_be_requested() {
|
|
||||||
return Err(ApiError::InvalidInput(
|
|
||||||
"Specified requested status cannot be requested!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let string = info.into_inner().0;
|
|
||||||
let result = database::models::Version::get(string.into(), &**pool, &redis).await?;
|
|
||||||
|
|
||||||
if let Some(version_item) = result {
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
|
||||||
version_item.inner.project_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let organization_item =
|
|
||||||
database::models::Organization::get_associated_organization_project_id(
|
|
||||||
version_item.inner.project_id,
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(ApiError::Database)?;
|
|
||||||
|
|
||||||
let organization_team_member = if let Some(organization) = &organization_item {
|
|
||||||
database::models::TeamMember::get_from_user_id(
|
|
||||||
organization.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
&user.role,
|
|
||||||
&team_member,
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !user.role.is_mod() && !permissions.contains(ProjectPermissions::EDIT_DETAILS) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to edit this version's scheduling data!".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
UPDATE versions
|
|
||||||
SET status = $1, date_published = $2
|
|
||||||
WHERE (id = $3)
|
|
||||||
",
|
|
||||||
VersionStatus::Scheduled.as_str(),
|
|
||||||
scheduling_data.time,
|
|
||||||
version_item.inner.id as database::models::ids::VersionId,
|
|
||||||
)
|
|
||||||
.execute(&mut *transaction)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
database::models::Version::clear_cache(&version_item, &redis).await?;
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("{version_id}")]
|
#[delete("{version_id}")]
|
||||||
@ -841,81 +291,5 @@ pub async fn version_delete(
|
|||||||
redis: web::Data<RedisPool>,
|
redis: web::Data<RedisPool>,
|
||||||
session_queue: web::Data<AuthQueue>,
|
session_queue: web::Data<AuthQueue>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let user = get_user_from_headers(
|
v3::versions::version_delete(req, info, pool, redis, session_queue).await
|
||||||
&req,
|
|
||||||
&**pool,
|
|
||||||
&redis,
|
|
||||||
&session_queue,
|
|
||||||
Some(&[Scopes::VERSION_DELETE]),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.1;
|
|
||||||
let id = info.into_inner().0;
|
|
||||||
|
|
||||||
let version = database::models::Version::get(id.into(), &**pool, &redis)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ApiError::InvalidInput("The specified version does not exist!".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !user.role.is_admin() {
|
|
||||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
|
||||||
version.inner.project_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(ApiError::Database)?;
|
|
||||||
|
|
||||||
let organization =
|
|
||||||
Organization::get_associated_organization_project_id(version.inner.project_id, &**pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let organization_team_member = if let Some(organization) = &organization {
|
|
||||||
database::models::TeamMember::get_from_user_id(
|
|
||||||
organization.team_id,
|
|
||||||
user.id.into(),
|
|
||||||
&**pool,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
|
||||||
&user.role,
|
|
||||||
&team_member,
|
|
||||||
&organization_team_member,
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !permissions.contains(ProjectPermissions::DELETE_VERSION) {
|
|
||||||
return Err(ApiError::CustomAuthentication(
|
|
||||||
"You do not have permission to delete versions in this team".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut transaction = pool.begin().await?;
|
|
||||||
let context = ImageContext::Version {
|
|
||||||
version_id: Some(version.inner.id.into()),
|
|
||||||
};
|
|
||||||
let uploaded_images =
|
|
||||||
database::models::Image::get_many_contexted(context, &mut transaction).await?;
|
|
||||||
for image in uploaded_images {
|
|
||||||
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let result =
|
|
||||||
database::models::Version::remove_full(version.inner.id, &redis, &mut transaction).await?;
|
|
||||||
|
|
||||||
database::models::Project::clear_cache(version.inner.project_id, None, Some(true), &redis)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
transaction.commit().await?;
|
|
||||||
|
|
||||||
if result.is_some() {
|
|
||||||
Ok(HttpResponse::NoContent().body(""))
|
|
||||||
} else {
|
|
||||||
Ok(HttpResponse::NotFound().body(""))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
112
src/routes/v2_reroute.rs
Normal file
112
src/routes/v2_reroute.rs
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
use super::v3::project_creation::CreateError;
|
||||||
|
use crate::util::actix::{generate_multipart, MultipartSegment, MultipartSegmentData};
|
||||||
|
use actix_multipart::Multipart;
|
||||||
|
use actix_web::http::header::{HeaderMap, TryIntoHeaderPair};
|
||||||
|
use actix_web::HttpResponse;
|
||||||
|
use futures::{stream, StreamExt};
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
pub async fn extract_ok_json<T>(response: HttpResponse) -> Result<T, HttpResponse>
|
||||||
|
where
|
||||||
|
T: serde::de::DeserializeOwned,
|
||||||
|
{
|
||||||
|
if response.status() == actix_web::http::StatusCode::OK {
|
||||||
|
let failure_http_response = || {
|
||||||
|
HttpResponse::InternalServerError().json(json!({
|
||||||
|
"error": "reroute_error",
|
||||||
|
"description": "Could not parse response from V2 redirection of route."
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
// Takes json out of HttpResponse, mutates it, then regenerates the HttpResponse
|
||||||
|
let body = response.into_body();
|
||||||
|
let bytes = actix_web::body::to_bytes(body)
|
||||||
|
.await
|
||||||
|
.map_err(|_| failure_http_response())?;
|
||||||
|
let json_value: T = serde_json::from_slice(&bytes).map_err(|_| failure_http_response())?;
|
||||||
|
Ok(json_value)
|
||||||
|
} else {
|
||||||
|
Err(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn alter_actix_multipart<T, U>(
|
||||||
|
mut multipart: Multipart,
|
||||||
|
mut headers: HeaderMap,
|
||||||
|
mut closure: impl FnMut(T) -> Result<U, CreateError>,
|
||||||
|
) -> Result<Multipart, CreateError>
|
||||||
|
where
|
||||||
|
T: serde::de::DeserializeOwned,
|
||||||
|
U: serde::Serialize,
|
||||||
|
{
|
||||||
|
let mut segments: Vec<MultipartSegment> = Vec::new();
|
||||||
|
|
||||||
|
if let Some(field) = multipart.next().await {
|
||||||
|
let mut field = field?;
|
||||||
|
let content_disposition = field.content_disposition().clone();
|
||||||
|
let field_name = content_disposition.get_name().unwrap_or("");
|
||||||
|
let field_filename = content_disposition.get_filename();
|
||||||
|
let field_content_type = field.content_type();
|
||||||
|
let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||||
|
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
while let Some(chunk) = field.next().await {
|
||||||
|
let data = chunk?;
|
||||||
|
buffer.extend_from_slice(&data);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let json_value: T = serde_json::from_slice(&buffer)?;
|
||||||
|
let json_value: U = closure(json_value)?;
|
||||||
|
buffer = serde_json::to_vec(&json_value)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
segments.push(MultipartSegment {
|
||||||
|
name: field_name.to_string(),
|
||||||
|
filename: field_filename.map(|s| s.to_string()),
|
||||||
|
content_type: field_content_type,
|
||||||
|
data: MultipartSegmentData::Binary(buffer),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
while let Some(field) = multipart.next().await {
|
||||||
|
let mut field = field?;
|
||||||
|
let content_disposition = field.content_disposition().clone();
|
||||||
|
let field_name = content_disposition.get_name().unwrap_or("");
|
||||||
|
let field_filename = content_disposition.get_filename();
|
||||||
|
let field_content_type = field.content_type();
|
||||||
|
let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||||
|
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
while let Some(chunk) = field.next().await {
|
||||||
|
let data = chunk?;
|
||||||
|
buffer.extend_from_slice(&data);
|
||||||
|
}
|
||||||
|
|
||||||
|
segments.push(MultipartSegment {
|
||||||
|
name: field_name.to_string(),
|
||||||
|
filename: field_filename.map(|s| s.to_string()),
|
||||||
|
content_type: field_content_type,
|
||||||
|
data: MultipartSegmentData::Binary(buffer),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let (boundary, payload) = generate_multipart(segments);
|
||||||
|
|
||||||
|
match (
|
||||||
|
"Content-Type",
|
||||||
|
format!("multipart/form-data; boundary={}", boundary).as_str(),
|
||||||
|
)
|
||||||
|
.try_into_pair()
|
||||||
|
{
|
||||||
|
Ok((key, value)) => {
|
||||||
|
headers.insert(key, value);
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
CreateError::InvalidInput(format!("Error inserting test header: {:?}.", err));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let new_multipart = Multipart::new(&headers, stream::once(async { Ok(payload) }));
|
||||||
|
|
||||||
|
Ok(new_multipart)
|
||||||
|
}
|
||||||
611
src/routes/v3/analytics_get.rs
Normal file
611
src/routes/v3/analytics_get.rs
Normal file
@ -0,0 +1,611 @@
|
|||||||
|
use super::ApiError;
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::{
|
||||||
|
auth::{filter_authorized_projects, filter_authorized_versions, get_user_from_headers},
|
||||||
|
database::models::{project_item, user_item, version_item},
|
||||||
|
models::{
|
||||||
|
ids::{
|
||||||
|
base62_impl::{parse_base62, to_base62},
|
||||||
|
ProjectId, VersionId,
|
||||||
|
},
|
||||||
|
pats::Scopes,
|
||||||
|
},
|
||||||
|
queue::session::AuthQueue,
|
||||||
|
};
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use chrono::{DateTime, Duration, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::postgres::types::PgInterval;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(
|
||||||
|
web::scope("analytics")
|
||||||
|
.route("playtime", web::get().to(playtimes_get))
|
||||||
|
.route("views", web::get().to(views_get))
|
||||||
|
.route("downloads", web::get().to(downloads_get))
|
||||||
|
.route("revenue", web::get().to(revenue_get))
|
||||||
|
.route(
|
||||||
|
"countries/downloads",
|
||||||
|
web::get().to(countries_downloads_get),
|
||||||
|
)
|
||||||
|
.route("countries/views", web::get().to(countries_views_get)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The json data to be passed to fetch analytic data
|
||||||
|
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||||
|
/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively.
|
||||||
|
/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day)
|
||||||
|
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||||
|
pub struct GetData {
|
||||||
|
// only one of project_ids or version_ids should be used
|
||||||
|
// if neither are provided, all projects the user has access to will be used
|
||||||
|
pub project_ids: Option<String>,
|
||||||
|
pub version_ids: Option<String>,
|
||||||
|
|
||||||
|
pub start_date: Option<DateTime<Utc>>, // defaults to 2 weeks ago
|
||||||
|
pub end_date: Option<DateTime<Utc>>, // defaults to now
|
||||||
|
|
||||||
|
pub resolution_minutes: Option<u32>, // defaults to 1 day. Ignored in routes that do not aggregate over a resolution (eg: /countries)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get playtime data for a set of projects or versions
|
||||||
|
/// Data is returned as a hashmap of project/version ids to a hashmap of days to playtime data
|
||||||
|
/// eg:
|
||||||
|
/// {
|
||||||
|
/// "4N1tEhnO": {
|
||||||
|
/// "20230824": 23
|
||||||
|
/// }
|
||||||
|
///}
|
||||||
|
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||||
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
|
pub struct FetchedPlaytime {
|
||||||
|
pub time: u64,
|
||||||
|
pub total_seconds: u64,
|
||||||
|
pub loader_seconds: HashMap<String, u64>,
|
||||||
|
pub game_version_seconds: HashMap<String, u64>,
|
||||||
|
pub parent_seconds: HashMap<VersionId, u64>,
|
||||||
|
}
|
||||||
|
pub async fn playtimes_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
clickhouse: web::Data<clickhouse::Client>,
|
||||||
|
data: web::Query<GetData>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ANALYTICS]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)?;
|
||||||
|
|
||||||
|
let project_ids = data
|
||||||
|
.project_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
let version_ids = data
|
||||||
|
.version_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
if project_ids.is_some() && version_ids.is_some() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||||
|
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||||
|
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||||
|
|
||||||
|
// Convert String list to list of ProjectIds or VersionIds
|
||||||
|
// - Filter out unauthorized projects/versions
|
||||||
|
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||||
|
let (project_ids, version_ids) =
|
||||||
|
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
||||||
|
|
||||||
|
// Get the views
|
||||||
|
let playtimes = crate::clickhouse::fetch_playtimes(
|
||||||
|
project_ids,
|
||||||
|
version_ids,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
resolution_minutes,
|
||||||
|
clickhouse.into_inner(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut hm = HashMap::new();
|
||||||
|
for playtime in playtimes {
|
||||||
|
let id_string = to_base62(playtime.id);
|
||||||
|
if !hm.contains_key(&id_string) {
|
||||||
|
hm.insert(id_string.clone(), HashMap::new());
|
||||||
|
}
|
||||||
|
if let Some(hm) = hm.get_mut(&id_string) {
|
||||||
|
hm.insert(playtime.time, playtime.total_seconds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(hm))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get view data for a set of projects or versions
|
||||||
|
/// Data is returned as a hashmap of project/version ids to a hashmap of days to views
|
||||||
|
/// eg:
|
||||||
|
/// {
|
||||||
|
/// "4N1tEhnO": {
|
||||||
|
/// "20230824": 1090
|
||||||
|
/// }
|
||||||
|
///}
|
||||||
|
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||||
|
pub async fn views_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
clickhouse: web::Data<clickhouse::Client>,
|
||||||
|
data: web::Query<GetData>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ANALYTICS]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)?;
|
||||||
|
|
||||||
|
let project_ids = data
|
||||||
|
.project_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
let version_ids = data
|
||||||
|
.version_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
if project_ids.is_some() && version_ids.is_some() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||||
|
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||||
|
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||||
|
|
||||||
|
// Convert String list to list of ProjectIds or VersionIds
|
||||||
|
// - Filter out unauthorized projects/versions
|
||||||
|
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||||
|
let (project_ids, version_ids) =
|
||||||
|
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
||||||
|
|
||||||
|
// Get the views
|
||||||
|
let views = crate::clickhouse::fetch_views(
|
||||||
|
project_ids,
|
||||||
|
version_ids,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
resolution_minutes,
|
||||||
|
clickhouse.into_inner(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut hm = HashMap::new();
|
||||||
|
for views in views {
|
||||||
|
let id_string = to_base62(views.id);
|
||||||
|
if !hm.contains_key(&id_string) {
|
||||||
|
hm.insert(id_string.clone(), HashMap::new());
|
||||||
|
}
|
||||||
|
if let Some(hm) = hm.get_mut(&id_string) {
|
||||||
|
hm.insert(views.time, views.total_views);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(hm))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get download data for a set of projects or versions
|
||||||
|
/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads
|
||||||
|
/// eg:
|
||||||
|
/// {
|
||||||
|
/// "4N1tEhnO": {
|
||||||
|
/// "20230824": 32
|
||||||
|
/// }
|
||||||
|
///}
|
||||||
|
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||||
|
pub async fn downloads_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
clickhouse: web::Data<clickhouse::Client>,
|
||||||
|
data: web::Query<GetData>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user_option = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ANALYTICS]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)?;
|
||||||
|
|
||||||
|
let project_ids = data
|
||||||
|
.project_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
let version_ids = data
|
||||||
|
.version_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
if project_ids.is_some() && version_ids.is_some() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||||
|
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||||
|
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||||
|
|
||||||
|
// Convert String list to list of ProjectIds or VersionIds
|
||||||
|
// - Filter out unauthorized projects/versions
|
||||||
|
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||||
|
let (project_ids, version_ids) =
|
||||||
|
filter_allowed_ids(project_ids, version_ids, user_option, &pool, &redis).await?;
|
||||||
|
|
||||||
|
// Get the downloads
|
||||||
|
let downloads = crate::clickhouse::fetch_downloads(
|
||||||
|
project_ids,
|
||||||
|
version_ids,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
resolution_minutes,
|
||||||
|
clickhouse.into_inner(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut hm = HashMap::new();
|
||||||
|
for downloads in downloads {
|
||||||
|
let id_string = to_base62(downloads.id);
|
||||||
|
if !hm.contains_key(&id_string) {
|
||||||
|
hm.insert(id_string.clone(), HashMap::new());
|
||||||
|
}
|
||||||
|
if let Some(hm) = hm.get_mut(&id_string) {
|
||||||
|
hm.insert(downloads.time, downloads.total_downloads);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(hm))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get payout data for a set of projects
|
||||||
|
/// Data is returned as a hashmap of project ids to a hashmap of days to amount earned per day
|
||||||
|
/// eg:
|
||||||
|
/// {
|
||||||
|
/// "4N1tEhnO": {
|
||||||
|
/// "20230824": 0.001
|
||||||
|
/// }
|
||||||
|
///}
|
||||||
|
/// ONLY project IDs can be used. Unauthorized projects will be filtered out.
|
||||||
|
pub async fn revenue_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
data: web::Query<GetData>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::PAYOUTS_READ]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)?;
|
||||||
|
|
||||||
|
let project_ids = data
|
||||||
|
.project_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||||
|
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||||
|
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||||
|
|
||||||
|
// Round up/down to nearest duration as we are using pgadmin, does not have rounding in the fetch command
|
||||||
|
// Round start_date down to nearest resolution
|
||||||
|
let diff = start_date.timestamp() % (resolution_minutes as i64 * 60);
|
||||||
|
let start_date = start_date - Duration::seconds(diff);
|
||||||
|
|
||||||
|
// Round end_date up to nearest resolution
|
||||||
|
let diff = end_date.timestamp() % (resolution_minutes as i64 * 60);
|
||||||
|
let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff);
|
||||||
|
|
||||||
|
// Convert String list to list of ProjectIds or VersionIds
|
||||||
|
// - Filter out unauthorized projects/versions
|
||||||
|
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||||
|
let (project_ids, _) = filter_allowed_ids(project_ids, None, user, &pool, &redis).await?;
|
||||||
|
|
||||||
|
let duration: PgInterval = Duration::minutes(resolution_minutes as i64)
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| ApiError::InvalidInput("Invalid resolution_minutes".to_string()))?;
|
||||||
|
// Get the revenue data
|
||||||
|
let payouts_values = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start
|
||||||
|
FROM payouts_values
|
||||||
|
WHERE mod_id = ANY($1) AND created BETWEEN $2 AND $3
|
||||||
|
GROUP by mod_id, interval_start ORDER BY interval_start
|
||||||
|
",
|
||||||
|
&project_ids.unwrap_or_default().into_iter().map(|x| x.0 as i64).collect::<Vec<_>>(),
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
duration,
|
||||||
|
)
|
||||||
|
.fetch_all(&**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut hm = HashMap::new();
|
||||||
|
for value in payouts_values {
|
||||||
|
if let Some(mod_id) = value.mod_id {
|
||||||
|
if let Some(amount) = value.amount_sum {
|
||||||
|
if let Some(interval_start) = value.interval_start {
|
||||||
|
let id_string = to_base62(mod_id as u64);
|
||||||
|
if !hm.contains_key(&id_string) {
|
||||||
|
hm.insert(id_string.clone(), HashMap::new());
|
||||||
|
}
|
||||||
|
if let Some(hm) = hm.get_mut(&id_string) {
|
||||||
|
hm.insert(interval_start.timestamp(), amount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(hm))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get country data for a set of projects or versions
|
||||||
|
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads.
|
||||||
|
/// Unknown countries are labeled "".
|
||||||
|
/// This is usuable to see significant performing countries per project
|
||||||
|
/// eg:
|
||||||
|
/// {
|
||||||
|
/// "4N1tEhnO": {
|
||||||
|
/// "CAN": 22
|
||||||
|
/// }
|
||||||
|
///}
|
||||||
|
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||||
|
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
|
||||||
|
pub async fn countries_downloads_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
clickhouse: web::Data<clickhouse::Client>,
|
||||||
|
data: web::Query<GetData>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ANALYTICS]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)?;
|
||||||
|
|
||||||
|
let project_ids = data
|
||||||
|
.project_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
let version_ids = data
|
||||||
|
.version_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
if project_ids.is_some() && version_ids.is_some() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||||
|
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||||
|
|
||||||
|
// Convert String list to list of ProjectIds or VersionIds
|
||||||
|
// - Filter out unauthorized projects/versions
|
||||||
|
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||||
|
let (project_ids, version_ids) =
|
||||||
|
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
||||||
|
|
||||||
|
// Get the countries
|
||||||
|
let countries = crate::clickhouse::fetch_countries(
|
||||||
|
project_ids,
|
||||||
|
version_ids,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
clickhouse.into_inner(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut hm = HashMap::new();
|
||||||
|
for views in countries {
|
||||||
|
let id_string = to_base62(views.id);
|
||||||
|
if !hm.contains_key(&id_string) {
|
||||||
|
hm.insert(id_string.clone(), HashMap::new());
|
||||||
|
}
|
||||||
|
if let Some(hm) = hm.get_mut(&id_string) {
|
||||||
|
hm.insert(views.country, views.total_downloads);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(hm))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get country data for a set of projects or versions
|
||||||
|
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to views.
|
||||||
|
/// Unknown countries are labeled "".
|
||||||
|
/// This is usuable to see significant performing countries per project
|
||||||
|
/// eg:
|
||||||
|
/// {
|
||||||
|
/// "4N1tEhnO": {
|
||||||
|
/// "CAN": 56165
|
||||||
|
/// }
|
||||||
|
///}
|
||||||
|
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||||
|
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
|
||||||
|
pub async fn countries_views_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
clickhouse: web::Data<clickhouse::Client>,
|
||||||
|
data: web::Query<GetData>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ANALYTICS]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)?;
|
||||||
|
|
||||||
|
let project_ids = data
|
||||||
|
.project_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
let version_ids = data
|
||||||
|
.version_ids
|
||||||
|
.as_ref()
|
||||||
|
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
if project_ids.is_some() && version_ids.is_some() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||||
|
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||||
|
|
||||||
|
// Convert String list to list of ProjectIds or VersionIds
|
||||||
|
// - Filter out unauthorized projects/versions
|
||||||
|
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||||
|
let (project_ids, version_ids) =
|
||||||
|
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
|
||||||
|
|
||||||
|
// Get the countries
|
||||||
|
let countries = crate::clickhouse::fetch_countries(
|
||||||
|
project_ids,
|
||||||
|
version_ids,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
clickhouse.into_inner(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut hm = HashMap::new();
|
||||||
|
for views in countries {
|
||||||
|
let id_string = to_base62(views.id);
|
||||||
|
if !hm.contains_key(&id_string) {
|
||||||
|
hm.insert(id_string.clone(), HashMap::new());
|
||||||
|
}
|
||||||
|
if let Some(hm) = hm.get_mut(&id_string) {
|
||||||
|
hm.insert(views.country, views.total_views);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(hm))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn filter_allowed_ids(
|
||||||
|
mut project_ids: Option<Vec<String>>,
|
||||||
|
version_ids: Option<Vec<String>>,
|
||||||
|
user: crate::models::users::User,
|
||||||
|
pool: &web::Data<PgPool>,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<(Option<Vec<ProjectId>>, Option<Vec<VersionId>>), ApiError> {
|
||||||
|
if project_ids.is_some() && version_ids.is_some() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||||
|
if project_ids.is_none() && version_ids.is_none() {
|
||||||
|
project_ids = Some(
|
||||||
|
user_item::User::get_projects(user.id.into(), &***pool, redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| ProjectId::from(x).to_string())
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert String list to list of ProjectIds or VersionIds
|
||||||
|
// - Filter out unauthorized projects/versions
|
||||||
|
|
||||||
|
let project_ids = if let Some(project_ids) = project_ids {
|
||||||
|
// Submitted project_ids are filtered by the user's permissions
|
||||||
|
let ids = project_ids
|
||||||
|
.iter()
|
||||||
|
.map(|id| Ok(ProjectId(parse_base62(id)?).into()))
|
||||||
|
.collect::<Result<Vec<_>, ApiError>>()?;
|
||||||
|
let projects = project_item::Project::get_many_ids(&ids, &***pool, redis).await?;
|
||||||
|
let ids: Vec<ProjectId> = filter_authorized_projects(projects, &Some(user.clone()), pool)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
Some(ids)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let version_ids = if let Some(version_ids) = version_ids {
|
||||||
|
// Submitted version_ids are filtered by the user's permissions
|
||||||
|
let ids = version_ids
|
||||||
|
.iter()
|
||||||
|
.map(|id| Ok(VersionId(parse_base62(id)?).into()))
|
||||||
|
.collect::<Result<Vec<_>, ApiError>>()?;
|
||||||
|
let versions = version_item::Version::get_many(&ids, &***pool, redis).await?;
|
||||||
|
let ids: Vec<VersionId> = filter_authorized_versions(versions, &Some(user), pool)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
Some(ids)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// Only one of project_ids or version_ids will be Some
|
||||||
|
Ok((project_ids, version_ids))
|
||||||
|
}
|
||||||
538
src/routes/v3/collections.rs
Normal file
538
src/routes/v3/collections.rs
Normal file
@ -0,0 +1,538 @@
|
|||||||
|
use crate::auth::checks::{filter_authorized_collections, is_authorized_collection};
|
||||||
|
use crate::auth::get_user_from_headers;
|
||||||
|
use crate::database::models::{collection_item, generate_collection_id, project_item};
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::file_hosting::FileHost;
|
||||||
|
use crate::models::collections::{Collection, CollectionStatus};
|
||||||
|
use crate::models::ids::base62_impl::parse_base62;
|
||||||
|
use crate::models::ids::{CollectionId, ProjectId};
|
||||||
|
use crate::models::pats::Scopes;
|
||||||
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::routes::v3::project_creation::CreateError;
|
||||||
|
use crate::routes::ApiError;
|
||||||
|
use crate::util::routes::read_from_payload;
|
||||||
|
use crate::util::validate::validation_errors_to_string;
|
||||||
|
use crate::{database, models};
|
||||||
|
use actix_web::web::Data;
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use chrono::Utc;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.route("collections", web::get().to(collections_get));
|
||||||
|
cfg.route("collection", web::post().to(collection_create));
|
||||||
|
|
||||||
|
cfg.service(
|
||||||
|
web::scope("collection")
|
||||||
|
.route("{id}", web::get().to(collection_get))
|
||||||
|
.route("{id}", web::delete().to(collection_delete))
|
||||||
|
.route("{id}", web::patch().to(collection_edit))
|
||||||
|
.route("{id}/icon", web::patch().to(collection_icon_edit))
|
||||||
|
.route("{id}/icon", web::delete().to(delete_collection_icon)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||||
|
pub struct CollectionCreateData {
|
||||||
|
#[validate(
|
||||||
|
length(min = 3, max = 64),
|
||||||
|
custom(function = "crate::util::validate::validate_name")
|
||||||
|
)]
|
||||||
|
/// The title or name of the project.
|
||||||
|
pub title: String,
|
||||||
|
#[validate(length(min = 3, max = 255))]
|
||||||
|
/// A short description of the collection.
|
||||||
|
pub description: String,
|
||||||
|
#[validate(length(max = 32))]
|
||||||
|
#[serde(default = "Vec::new")]
|
||||||
|
/// A list of initial projects to use with the created collection
|
||||||
|
pub projects: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn collection_create(
|
||||||
|
req: HttpRequest,
|
||||||
|
collection_create_data: web::Json<CollectionCreateData>,
|
||||||
|
client: Data<PgPool>,
|
||||||
|
redis: Data<RedisPool>,
|
||||||
|
session_queue: Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, CreateError> {
|
||||||
|
let collection_create_data = collection_create_data.into_inner();
|
||||||
|
|
||||||
|
// The currently logged in user
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**client,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::COLLECTION_CREATE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
collection_create_data
|
||||||
|
.validate()
|
||||||
|
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
|
||||||
|
|
||||||
|
let mut transaction = client.begin().await?;
|
||||||
|
|
||||||
|
let collection_id: CollectionId = generate_collection_id(&mut transaction).await?.into();
|
||||||
|
|
||||||
|
let initial_project_ids = project_item::Project::get_many(
|
||||||
|
&collection_create_data.projects,
|
||||||
|
&mut *transaction,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.inner.id.into())
|
||||||
|
.collect::<Vec<ProjectId>>();
|
||||||
|
|
||||||
|
let collection_builder_actual = collection_item::CollectionBuilder {
|
||||||
|
collection_id: collection_id.into(),
|
||||||
|
user_id: current_user.id.into(),
|
||||||
|
title: collection_create_data.title,
|
||||||
|
description: collection_create_data.description,
|
||||||
|
status: CollectionStatus::Listed,
|
||||||
|
projects: initial_project_ids
|
||||||
|
.iter()
|
||||||
|
.copied()
|
||||||
|
.map(|x| x.into())
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
let collection_builder = collection_builder_actual.clone();
|
||||||
|
|
||||||
|
let now = Utc::now();
|
||||||
|
collection_builder_actual.insert(&mut transaction).await?;
|
||||||
|
|
||||||
|
let response = crate::models::collections::Collection {
|
||||||
|
id: collection_id,
|
||||||
|
user: collection_builder.user_id.into(),
|
||||||
|
title: collection_builder.title.clone(),
|
||||||
|
description: collection_builder.description.clone(),
|
||||||
|
created: now,
|
||||||
|
updated: now,
|
||||||
|
icon_url: None,
|
||||||
|
color: None,
|
||||||
|
status: collection_builder.status,
|
||||||
|
projects: initial_project_ids,
|
||||||
|
};
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(response))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct CollectionIds {
|
||||||
|
pub ids: String,
|
||||||
|
}
|
||||||
|
pub async fn collections_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
web::Query(ids): web::Query<CollectionIds>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
||||||
|
let ids = ids
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| parse_base62(x).map(|x| database::models::CollectionId(x as i64)))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let collections_data = database::models::Collection::get_many(&ids, &**pool, &redis).await?;
|
||||||
|
|
||||||
|
let user_option = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::COLLECTION_READ]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
let collections = filter_authorized_collections(collections_data, &user_option, &pool).await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(collections))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn collection_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
|
||||||
|
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||||
|
let collection_data = database::models::Collection::get(id, &**pool, &redis).await?;
|
||||||
|
let user_option = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::COLLECTION_READ]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
if let Some(data) = collection_data {
|
||||||
|
if is_authorized_collection(&data, &user_option).await? {
|
||||||
|
return Ok(HttpResponse::Ok().json(Collection::from(data)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Validate)]
|
||||||
|
pub struct EditCollection {
|
||||||
|
#[validate(
|
||||||
|
length(min = 3, max = 64),
|
||||||
|
custom(function = "crate::util::validate::validate_name")
|
||||||
|
)]
|
||||||
|
pub title: Option<String>,
|
||||||
|
#[validate(length(min = 3, max = 256))]
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub status: Option<CollectionStatus>,
|
||||||
|
#[validate(length(max = 64))]
|
||||||
|
pub new_projects: Option<Vec<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn collection_edit(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
new_collection: web::Json<EditCollection>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::COLLECTION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
new_collection
|
||||||
|
.validate()
|
||||||
|
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||||
|
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||||
|
let result = database::models::Collection::get(id, &**pool, &redis).await?;
|
||||||
|
|
||||||
|
if let Some(collection_item) = result {
|
||||||
|
if !can_modify_collection(&collection_item, &user) {
|
||||||
|
return Ok(HttpResponse::Unauthorized().body(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = collection_item.id;
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
if let Some(title) = &new_collection.title {
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE collections
|
||||||
|
SET title = $1
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
title.trim(),
|
||||||
|
id as database::models::ids::CollectionId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(description) = &new_collection.description {
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE collections
|
||||||
|
SET description = $1
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
description,
|
||||||
|
id as database::models::ids::CollectionId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(status) = &new_collection.status {
|
||||||
|
if !(user.role.is_mod()
|
||||||
|
|| collection_item.status.is_approved() && status.can_be_requested())
|
||||||
|
{
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You don't have permission to set this status!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE collections
|
||||||
|
SET status = $1
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
status.to_string(),
|
||||||
|
id as database::models::ids::CollectionId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(new_project_ids) = &new_collection.new_projects {
|
||||||
|
// Delete all existing projects
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
DELETE FROM collections_mods
|
||||||
|
WHERE collection_id = $1
|
||||||
|
",
|
||||||
|
collection_item.id as database::models::ids::CollectionId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let collection_item_ids = new_project_ids
|
||||||
|
.iter()
|
||||||
|
.map(|_| collection_item.id.0)
|
||||||
|
.collect_vec();
|
||||||
|
let mut validated_project_ids = Vec::new();
|
||||||
|
for project_id in new_project_ids {
|
||||||
|
let project = database::models::Project::get(project_id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput(format!(
|
||||||
|
"The specified project {project_id} does not exist!"
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
validated_project_ids.push(project.inner.id.0);
|
||||||
|
}
|
||||||
|
// Insert- don't throw an error if it already exists
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
INSERT INTO collections_mods (collection_id, mod_id)
|
||||||
|
SELECT * FROM UNNEST ($1::int8[], $2::int8[])
|
||||||
|
ON CONFLICT DO NOTHING
|
||||||
|
",
|
||||||
|
&collection_item_ids[..],
|
||||||
|
&validated_project_ids[..],
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct Extension {
|
||||||
|
pub ext: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
pub async fn collection_icon_edit(
|
||||||
|
web::Query(ext): web::Query<Extension>,
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
|
mut payload: web::Payload,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) {
|
||||||
|
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::COLLECTION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||||
|
let collection_item = database::models::Collection::get(id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !can_modify_collection(&collection_item, &user) {
|
||||||
|
return Ok(HttpResponse::Unauthorized().body(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(icon) = collection_item.icon_url {
|
||||||
|
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
||||||
|
|
||||||
|
if let Some(icon_path) = name {
|
||||||
|
file_host.delete_file_version("", icon_path).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let bytes =
|
||||||
|
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
|
||||||
|
|
||||||
|
let color = crate::util::img::get_color_from_img(&bytes)?;
|
||||||
|
|
||||||
|
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
||||||
|
let collection_id: CollectionId = collection_item.id.into();
|
||||||
|
let upload_data = file_host
|
||||||
|
.upload_file(
|
||||||
|
content_type,
|
||||||
|
&format!("data/{}/{}.{}", collection_id, hash, ext.ext),
|
||||||
|
bytes.freeze(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE collections
|
||||||
|
SET icon_url = $1, color = $2
|
||||||
|
WHERE (id = $3)
|
||||||
|
",
|
||||||
|
format!("{}/{}", cdn_url, upload_data.file_name),
|
||||||
|
color.map(|x| x as i32),
|
||||||
|
collection_item.id as database::models::ids::CollectionId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Err(ApiError::InvalidInput(format!(
|
||||||
|
"Invalid format for collection icon: {}",
|
||||||
|
ext.ext
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_collection_icon(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::COLLECTION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||||
|
let collection_item = database::models::Collection::get(id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
if !can_modify_collection(&collection_item, &user) {
|
||||||
|
return Ok(HttpResponse::Unauthorized().body(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||||
|
if let Some(icon) = collection_item.icon_url {
|
||||||
|
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
||||||
|
|
||||||
|
if let Some(icon_path) = name {
|
||||||
|
file_host.delete_file_version("", icon_path).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE collections
|
||||||
|
SET icon_url = NULL, color = NULL
|
||||||
|
WHERE (id = $1)
|
||||||
|
",
|
||||||
|
collection_item.id as database::models::ids::CollectionId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn collection_delete(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::COLLECTION_DELETE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||||
|
let collection = database::models::Collection::get(id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
if !can_modify_collection(&collection, &user) {
|
||||||
|
return Ok(HttpResponse::Unauthorized().body(""));
|
||||||
|
}
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
let result =
|
||||||
|
database::models::Collection::remove(collection.id, &mut transaction, &redis).await?;
|
||||||
|
database::models::Collection::clear_cache(collection.id, &redis).await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
if result.is_some() {
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn can_modify_collection(
|
||||||
|
collection: &database::models::Collection,
|
||||||
|
user: &models::users::User,
|
||||||
|
) -> bool {
|
||||||
|
collection.user_id == user.id.into() || user.role.is_mod()
|
||||||
|
}
|
||||||
234
src/routes/v3/images.rs
Normal file
234
src/routes/v3/images.rs
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::auth::{get_user_from_headers, is_authorized, is_authorized_version};
|
||||||
|
use crate::database;
|
||||||
|
use crate::database::models::{project_item, report_item, thread_item, version_item};
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::file_hosting::FileHost;
|
||||||
|
use crate::models::ids::{ThreadMessageId, VersionId};
|
||||||
|
use crate::models::images::{Image, ImageContext};
|
||||||
|
use crate::models::reports::ReportId;
|
||||||
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::routes::ApiError;
|
||||||
|
use crate::util::routes::read_from_payload;
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
use super::threads::is_authorized_thread;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.route("image", web::post().to(images_add));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct ImageUpload {
|
||||||
|
pub ext: String,
|
||||||
|
|
||||||
|
// Context must be an allowed context
|
||||||
|
// currently: project, version, thread_message, report
|
||||||
|
pub context: String,
|
||||||
|
|
||||||
|
// Optional context id to associate with
|
||||||
|
pub project_id: Option<String>, // allow slug or id
|
||||||
|
pub version_id: Option<VersionId>,
|
||||||
|
pub thread_message_id: Option<ThreadMessageId>,
|
||||||
|
pub report_id: Option<ReportId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn images_add(
|
||||||
|
req: HttpRequest,
|
||||||
|
web::Query(data): web::Query<ImageUpload>,
|
||||||
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
|
mut payload: web::Payload,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
if let Some(content_type) = crate::util::ext::get_image_content_type(&data.ext) {
|
||||||
|
let mut context = ImageContext::from_str(&data.context, None);
|
||||||
|
|
||||||
|
let scopes = vec![context.relevant_scope()];
|
||||||
|
|
||||||
|
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||||
|
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, Some(&scopes))
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
// Attempt to associated a supplied id with the context
|
||||||
|
// If the context cannot be found, or the user is not authorized to upload images for the context, return an error
|
||||||
|
match &mut context {
|
||||||
|
ImageContext::Project { project_id } => {
|
||||||
|
if let Some(id) = data.project_id {
|
||||||
|
let project = project_item::Project::get(&id, &**pool, &redis).await?;
|
||||||
|
if let Some(project) = project {
|
||||||
|
if is_authorized(&project.inner, &Some(user.clone()), &pool).await? {
|
||||||
|
*project_id = Some(project.inner.id.into());
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You are not authorized to upload images for this project"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"The project could not be found.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ImageContext::Version { version_id } => {
|
||||||
|
if let Some(id) = data.version_id {
|
||||||
|
let version = version_item::Version::get(id.into(), &**pool, &redis).await?;
|
||||||
|
if let Some(version) = version {
|
||||||
|
if is_authorized_version(&version.inner, &Some(user.clone()), &pool).await?
|
||||||
|
{
|
||||||
|
*version_id = Some(version.inner.id.into());
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You are not authorized to upload images for this version"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"The version could not be found.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ImageContext::ThreadMessage { thread_message_id } => {
|
||||||
|
if let Some(id) = data.thread_message_id {
|
||||||
|
let thread_message = thread_item::ThreadMessage::get(id.into(), &**pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput(
|
||||||
|
"The thread message could not found.".to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let thread = thread_item::Thread::get(thread_message.thread_id, &**pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput(
|
||||||
|
"The thread associated with the thread message could not be found"
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
if is_authorized_thread(&thread, &user, &pool).await? {
|
||||||
|
*thread_message_id = Some(thread_message.id.into());
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You are not authorized to upload images for this thread message"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ImageContext::Report { report_id } => {
|
||||||
|
if let Some(id) = data.report_id {
|
||||||
|
let report = report_item::Report::get(id.into(), &**pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The report could not be found.".to_string())
|
||||||
|
})?;
|
||||||
|
let thread = thread_item::Thread::get(report.thread_id, &**pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput(
|
||||||
|
"The thread associated with the report could not be found."
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
if is_authorized_thread(&thread, &user, &pool).await? {
|
||||||
|
*report_id = Some(report.id.into());
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You are not authorized to upload images for this report".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ImageContext::Unknown => {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Context must be one of: project, version, thread_message, report".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload the image to the file host
|
||||||
|
let bytes =
|
||||||
|
read_from_payload(&mut payload, 1_048_576, "Icons must be smaller than 1MiB").await?;
|
||||||
|
|
||||||
|
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
||||||
|
let upload_data = file_host
|
||||||
|
.upload_file(
|
||||||
|
content_type,
|
||||||
|
&format!("data/cached_images/{}.{}", hash, data.ext),
|
||||||
|
bytes.freeze(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
let db_image: database::models::Image = database::models::Image {
|
||||||
|
id: database::models::generate_image_id(&mut transaction).await?,
|
||||||
|
url: format!("{}/{}", cdn_url, upload_data.file_name),
|
||||||
|
size: upload_data.content_length as u64,
|
||||||
|
created: chrono::Utc::now(),
|
||||||
|
owner_id: database::models::UserId::from(user.id),
|
||||||
|
context: context.context_as_str().to_string(),
|
||||||
|
project_id: if let ImageContext::Project {
|
||||||
|
project_id: Some(id),
|
||||||
|
} = context
|
||||||
|
{
|
||||||
|
Some(database::models::ProjectId::from(id))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
version_id: if let ImageContext::Version {
|
||||||
|
version_id: Some(id),
|
||||||
|
} = context
|
||||||
|
{
|
||||||
|
Some(database::models::VersionId::from(id))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
thread_message_id: if let ImageContext::ThreadMessage {
|
||||||
|
thread_message_id: Some(id),
|
||||||
|
} = context
|
||||||
|
{
|
||||||
|
Some(database::models::ThreadMessageId::from(id))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
report_id: if let ImageContext::Report {
|
||||||
|
report_id: Some(id),
|
||||||
|
} = context
|
||||||
|
{
|
||||||
|
Some(database::models::ReportId::from(id))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Insert
|
||||||
|
db_image.insert(&mut transaction).await?;
|
||||||
|
|
||||||
|
let image = Image {
|
||||||
|
id: db_image.id.into(),
|
||||||
|
url: db_image.url,
|
||||||
|
size: db_image.size,
|
||||||
|
created: db_image.created,
|
||||||
|
owner_id: db_image.owner_id.into(),
|
||||||
|
context,
|
||||||
|
};
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(image))
|
||||||
|
} else {
|
||||||
|
Err(ApiError::InvalidInput(
|
||||||
|
"The specified file is not an image!".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -3,13 +3,42 @@ use crate::{auth::oauth, util::cors::default_cors};
|
|||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
|
pub mod analytics_get;
|
||||||
|
pub mod collections;
|
||||||
|
pub mod images;
|
||||||
|
pub mod moderation;
|
||||||
|
pub mod notifications;
|
||||||
|
pub mod organizations;
|
||||||
|
pub mod project_creation;
|
||||||
|
pub mod projects;
|
||||||
|
pub mod reports;
|
||||||
|
pub mod statistics;
|
||||||
|
pub mod tags;
|
||||||
|
pub mod teams;
|
||||||
|
pub mod threads;
|
||||||
|
pub mod users;
|
||||||
|
pub mod version_creation;
|
||||||
|
pub mod version_file;
|
||||||
|
pub mod versions;
|
||||||
|
|
||||||
pub mod oauth_clients;
|
pub mod oauth_clients;
|
||||||
|
|
||||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::scope("v3")
|
web::scope("v3")
|
||||||
.wrap(default_cors())
|
.wrap(default_cors())
|
||||||
.route("", web::get().to(hello_world))
|
.configure(analytics_get::config)
|
||||||
|
.configure(collections::config)
|
||||||
|
.configure(images::config)
|
||||||
|
.configure(organizations::config)
|
||||||
|
.configure(project_creation::config)
|
||||||
|
.configure(projects::config)
|
||||||
|
.configure(reports::config)
|
||||||
|
.configure(tags::config)
|
||||||
|
.configure(teams::config)
|
||||||
|
.configure(threads::config)
|
||||||
|
.configure(version_file::config)
|
||||||
|
.configure(versions::config)
|
||||||
.configure(oauth::config)
|
.configure(oauth::config)
|
||||||
.configure(oauth_clients::config),
|
.configure(oauth_clients::config),
|
||||||
);
|
);
|
||||||
|
|||||||
65
src/routes/v3/moderation.rs
Normal file
65
src/routes/v3/moderation.rs
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
use super::ApiError;
|
||||||
|
use crate::database;
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::models::projects::ProjectStatus;
|
||||||
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes};
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.route("moderation/projects", web::get().to(get_projects));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct ResultCount {
|
||||||
|
#[serde(default = "default_count")]
|
||||||
|
pub count: i16,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_count() -> i16 {
|
||||||
|
100
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_projects(
|
||||||
|
req: HttpRequest,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
count: web::Query<ResultCount>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
check_is_moderator_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::PROJECT_READ]),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
use futures::stream::TryStreamExt;
|
||||||
|
|
||||||
|
let project_ids = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT id FROM mods
|
||||||
|
WHERE status = $1
|
||||||
|
ORDER BY queued ASC
|
||||||
|
LIMIT $2;
|
||||||
|
",
|
||||||
|
ProjectStatus::Processing.as_str(),
|
||||||
|
count.count as i64
|
||||||
|
)
|
||||||
|
.fetch_many(&**pool)
|
||||||
|
.try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) })
|
||||||
|
.try_collect::<Vec<database::models::ProjectId>>()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(crate::models::projects::Project::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(projects))
|
||||||
|
}
|
||||||
289
src/routes/v3/notifications.rs
Normal file
289
src/routes/v3/notifications.rs
Normal file
@ -0,0 +1,289 @@
|
|||||||
|
use crate::auth::get_user_from_headers;
|
||||||
|
use crate::database;
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::models::ids::NotificationId;
|
||||||
|
use crate::models::notifications::Notification;
|
||||||
|
use crate::models::pats::Scopes;
|
||||||
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::routes::ApiError;
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.route("notifications", web::get().to(notifications_get));
|
||||||
|
cfg.route("notifications", web::patch().to(notifications_read));
|
||||||
|
cfg.route("notifications", web::delete().to(notifications_delete));
|
||||||
|
|
||||||
|
cfg.service(
|
||||||
|
web::scope("notification")
|
||||||
|
.route("{id}", web::get().to(notification_get))
|
||||||
|
.route("{id}", web::patch().to(notification_read))
|
||||||
|
.route("{id}", web::delete().to(notification_delete)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct NotificationIds {
|
||||||
|
pub ids: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn notifications_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
web::Query(ids): web::Query<NotificationIds>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::NOTIFICATION_READ]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
use database::models::notification_item::Notification as DBNotification;
|
||||||
|
use database::models::NotificationId as DBNotificationId;
|
||||||
|
|
||||||
|
let notification_ids: Vec<DBNotificationId> =
|
||||||
|
serde_json::from_str::<Vec<NotificationId>>(ids.ids.as_str())?
|
||||||
|
.into_iter()
|
||||||
|
.map(DBNotificationId::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let notifications_data: Vec<DBNotification> =
|
||||||
|
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let notifications: Vec<Notification> = notifications_data
|
||||||
|
.into_iter()
|
||||||
|
.filter(|n| n.user_id == user.id.into() || user.role.is_admin())
|
||||||
|
.map(Notification::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(notifications))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn notification_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(NotificationId,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::NOTIFICATION_READ]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let id = info.into_inner().0;
|
||||||
|
|
||||||
|
let notification_data =
|
||||||
|
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
||||||
|
|
||||||
|
if let Some(data) = notification_data {
|
||||||
|
if user.id == data.user_id.into() || user.role.is_admin() {
|
||||||
|
Ok(HttpResponse::Ok().json(Notification::from(data)))
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn notification_read(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(NotificationId,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let id = info.into_inner().0;
|
||||||
|
|
||||||
|
let notification_data =
|
||||||
|
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
||||||
|
|
||||||
|
if let Some(data) = notification_data {
|
||||||
|
if data.user_id == user.id.into() || user.role.is_admin() {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
database::models::notification_item::Notification::read(
|
||||||
|
id.into(),
|
||||||
|
&mut transaction,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Err(ApiError::CustomAuthentication(
|
||||||
|
"You are not authorized to read this notification!".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn notification_delete(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(NotificationId,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let id = info.into_inner().0;
|
||||||
|
|
||||||
|
let notification_data =
|
||||||
|
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
||||||
|
|
||||||
|
if let Some(data) = notification_data {
|
||||||
|
if data.user_id == user.id.into() || user.role.is_admin() {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
database::models::notification_item::Notification::remove(
|
||||||
|
id.into(),
|
||||||
|
&mut transaction,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Err(ApiError::CustomAuthentication(
|
||||||
|
"You are not authorized to delete this notification!".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn notifications_read(
|
||||||
|
req: HttpRequest,
|
||||||
|
web::Query(ids): web::Query<NotificationIds>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.into())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
let notifications_data =
|
||||||
|
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
|
||||||
|
|
||||||
|
for notification in notifications_data {
|
||||||
|
if notification.user_id == user.id.into() || user.role.is_admin() {
|
||||||
|
notifications.push(notification.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
database::models::notification_item::Notification::read_many(
|
||||||
|
¬ifications,
|
||||||
|
&mut transaction,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn notifications_delete(
|
||||||
|
req: HttpRequest,
|
||||||
|
web::Query(ids): web::Query<NotificationIds>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.into())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
let notifications_data =
|
||||||
|
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
|
||||||
|
|
||||||
|
for notification in notifications_data {
|
||||||
|
if notification.user_id == user.id.into() || user.role.is_admin() {
|
||||||
|
notifications.push(notification.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
database::models::notification_item::Notification::remove_many(
|
||||||
|
¬ifications,
|
||||||
|
&mut transaction,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
}
|
||||||
@ -35,7 +35,7 @@ use crate::{
|
|||||||
pats::Scopes,
|
pats::Scopes,
|
||||||
},
|
},
|
||||||
queue::session::AuthQueue,
|
queue::session::AuthQueue,
|
||||||
routes::v2::project_creation::CreateError,
|
routes::v3::project_creation::CreateError,
|
||||||
util::validate::validation_errors_to_string,
|
util::validate::validation_errors_to_string,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
918
src/routes/v3/organizations.rs
Normal file
918
src/routes/v3/organizations.rs
Normal file
@ -0,0 +1,918 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use super::ApiError;
|
||||||
|
use crate::auth::{filter_authorized_projects, get_user_from_headers};
|
||||||
|
use crate::database::models::team_item::TeamMember;
|
||||||
|
use crate::database::models::{generate_organization_id, team_item, Organization};
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::file_hosting::FileHost;
|
||||||
|
use crate::models::ids::base62_impl::parse_base62;
|
||||||
|
use crate::models::organizations::OrganizationId;
|
||||||
|
use crate::models::pats::Scopes;
|
||||||
|
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
|
||||||
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::routes::v3::project_creation::CreateError;
|
||||||
|
use crate::util::routes::read_from_payload;
|
||||||
|
use crate::util::validate::validation_errors_to_string;
|
||||||
|
use crate::{database, models};
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use rust_decimal::Decimal;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(
|
||||||
|
web::scope("organization")
|
||||||
|
.route("{id}/projects", web::get().to(organization_projects_get))
|
||||||
|
.route("{id}", web::get().to(organization_get))
|
||||||
|
.route("{id}", web::patch().to(organizations_edit))
|
||||||
|
.route("{id}", web::delete().to(organization_delete))
|
||||||
|
.route("{id}/projects", web::post().to(organization_projects_add))
|
||||||
|
.route(
|
||||||
|
"{id}/projects",
|
||||||
|
web::delete().to(organization_projects_remove),
|
||||||
|
)
|
||||||
|
.route("{id}/icon", web::patch().to(organization_icon_edit))
|
||||||
|
.route("{id}/icon", web::delete().to(delete_organization_icon))
|
||||||
|
.route(
|
||||||
|
"{id}/members",
|
||||||
|
web::get().to(super::teams::team_members_get_organization),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn organization_projects_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let info = info.into_inner().0;
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_READ, Scopes::PROJECT_READ]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
let possible_organization_id: Option<u64> = parse_base62(&info).ok();
|
||||||
|
use futures::TryStreamExt;
|
||||||
|
|
||||||
|
let project_ids = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT m.id FROM organizations o
|
||||||
|
INNER JOIN mods m ON m.organization_id = o.id
|
||||||
|
WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL)
|
||||||
|
",
|
||||||
|
possible_organization_id.map(|x| x as i64),
|
||||||
|
info
|
||||||
|
)
|
||||||
|
.fetch_many(&**pool)
|
||||||
|
.try_filter_map(|e| async { Ok(e.right().map(|m| crate::database::models::ProjectId(m.id))) })
|
||||||
|
.try_collect::<Vec<crate::database::models::ProjectId>>()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let projects_data =
|
||||||
|
crate::database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?;
|
||||||
|
|
||||||
|
let projects = filter_authorized_projects(projects_data, ¤t_user, &pool).await?;
|
||||||
|
Ok(HttpResponse::Ok().json(projects))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Validate)]
|
||||||
|
pub struct NewOrganization {
|
||||||
|
#[validate(
|
||||||
|
length(min = 3, max = 64),
|
||||||
|
regex = "crate::util::validate::RE_URL_SAFE"
|
||||||
|
)]
|
||||||
|
// Title of the organization, also used as slug
|
||||||
|
pub title: String,
|
||||||
|
#[validate(length(min = 3, max = 256))]
|
||||||
|
pub description: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn organization_create(
|
||||||
|
req: HttpRequest,
|
||||||
|
new_organization: web::Json<NewOrganization>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, CreateError> {
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_CREATE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
new_organization
|
||||||
|
.validate()
|
||||||
|
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
// Try title
|
||||||
|
let title_organization_id_option: Option<OrganizationId> =
|
||||||
|
serde_json::from_str(&format!("\"{}\"", new_organization.title)).ok();
|
||||||
|
let mut organization_strings = vec![];
|
||||||
|
if let Some(title_organization_id) = title_organization_id_option {
|
||||||
|
organization_strings.push(title_organization_id.to_string());
|
||||||
|
}
|
||||||
|
organization_strings.push(new_organization.title.clone());
|
||||||
|
let results = Organization::get_many(&organization_strings, &mut *transaction, &redis).await?;
|
||||||
|
if !results.is_empty() {
|
||||||
|
return Err(CreateError::SlugCollision);
|
||||||
|
}
|
||||||
|
|
||||||
|
let organization_id = generate_organization_id(&mut transaction).await?;
|
||||||
|
|
||||||
|
// Create organization managerial team
|
||||||
|
let team = team_item::TeamBuilder {
|
||||||
|
members: vec![team_item::TeamMemberBuilder {
|
||||||
|
user_id: current_user.id.into(),
|
||||||
|
role: crate::models::teams::OWNER_ROLE.to_owned(),
|
||||||
|
permissions: ProjectPermissions::all(),
|
||||||
|
organization_permissions: Some(OrganizationPermissions::all()),
|
||||||
|
accepted: true,
|
||||||
|
payouts_split: Decimal::ONE_HUNDRED,
|
||||||
|
ordering: 0,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
let team_id = team.insert(&mut transaction).await?;
|
||||||
|
|
||||||
|
// Create organization
|
||||||
|
let organization = Organization {
|
||||||
|
id: organization_id,
|
||||||
|
title: new_organization.title.clone(),
|
||||||
|
description: new_organization.description.clone(),
|
||||||
|
team_id,
|
||||||
|
icon_url: None,
|
||||||
|
color: None,
|
||||||
|
};
|
||||||
|
organization.clone().insert(&mut transaction).await?;
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
// Only member is the owner, the logged in one
|
||||||
|
let member_data = TeamMember::get_from_team_full(team_id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.next();
|
||||||
|
let members_data = if let Some(member_data) = member_data {
|
||||||
|
vec![crate::models::teams::TeamMember::from_model(
|
||||||
|
member_data,
|
||||||
|
current_user.clone(),
|
||||||
|
false,
|
||||||
|
)]
|
||||||
|
} else {
|
||||||
|
return Err(CreateError::InvalidInput(
|
||||||
|
"Failed to get created team.".to_owned(), // should never happen
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
let organization = models::organizations::Organization::from(organization, members_data);
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(organization))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn organization_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let id = info.into_inner().0;
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_READ]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)
|
||||||
|
.ok();
|
||||||
|
let user_id = current_user.as_ref().map(|x| x.id.into());
|
||||||
|
|
||||||
|
let organization_data = Organization::get(&id, &**pool, &redis).await?;
|
||||||
|
if let Some(data) = organization_data {
|
||||||
|
let members_data = TeamMember::get_from_team_full(data.team_id, &**pool, &redis).await?;
|
||||||
|
|
||||||
|
let users = crate::database::models::User::get_many_ids(
|
||||||
|
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let logged_in = current_user
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|user| {
|
||||||
|
members_data
|
||||||
|
.iter()
|
||||||
|
.find(|x| x.user_id == user.id.into() && x.accepted)
|
||||||
|
})
|
||||||
|
.is_some();
|
||||||
|
let team_members: Vec<_> = members_data
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| {
|
||||||
|
logged_in
|
||||||
|
|| x.accepted
|
||||||
|
|| user_id
|
||||||
|
.map(|y: crate::database::models::UserId| y == x.user_id)
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
.flat_map(|data| {
|
||||||
|
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
||||||
|
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let organization = models::organizations::Organization::from(data, team_members);
|
||||||
|
return Ok(HttpResponse::Ok().json(organization));
|
||||||
|
}
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct OrganizationIds {
|
||||||
|
pub ids: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn organizations_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
web::Query(ids): web::Query<OrganizationIds>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
||||||
|
let organizations_data = Organization::get_many(&ids, &**pool, &redis).await?;
|
||||||
|
let team_ids = organizations_data
|
||||||
|
.iter()
|
||||||
|
.map(|x| x.team_id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
|
||||||
|
let users = crate::database::models::User::get_many_ids(
|
||||||
|
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_READ]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|x| x.1)
|
||||||
|
.ok();
|
||||||
|
let user_id = current_user.as_ref().map(|x| x.id.into());
|
||||||
|
|
||||||
|
let mut organizations = vec![];
|
||||||
|
|
||||||
|
let mut team_groups = HashMap::new();
|
||||||
|
for item in teams_data {
|
||||||
|
team_groups.entry(item.team_id).or_insert(vec![]).push(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
for data in organizations_data {
|
||||||
|
let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]);
|
||||||
|
let logged_in = current_user
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|user| {
|
||||||
|
members_data
|
||||||
|
.iter()
|
||||||
|
.find(|x| x.user_id == user.id.into() && x.accepted)
|
||||||
|
})
|
||||||
|
.is_some();
|
||||||
|
|
||||||
|
let team_members: Vec<_> = members_data
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| {
|
||||||
|
logged_in
|
||||||
|
|| x.accepted
|
||||||
|
|| user_id
|
||||||
|
.map(|y: crate::database::models::UserId| y == x.user_id)
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
.flat_map(|data| {
|
||||||
|
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
||||||
|
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let organization = models::organizations::Organization::from(data, team_members);
|
||||||
|
organizations.push(organization);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(organizations))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Validate)]
|
||||||
|
pub struct OrganizationEdit {
|
||||||
|
#[validate(length(min = 3, max = 256))]
|
||||||
|
pub description: Option<String>,
|
||||||
|
#[validate(
|
||||||
|
length(min = 3, max = 64),
|
||||||
|
regex = "crate::util::validate::RE_URL_SAFE"
|
||||||
|
)]
|
||||||
|
// Title of the organization, also used as slug
|
||||||
|
pub title: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn organizations_edit(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
new_organization: web::Json<OrganizationEdit>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
new_organization
|
||||||
|
.validate()
|
||||||
|
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||||
|
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
let result = database::models::Organization::get(&string, &**pool, &redis).await?;
|
||||||
|
if let Some(organization_item) = result {
|
||||||
|
let id = organization_item.id;
|
||||||
|
|
||||||
|
let team_member = database::models::TeamMember::get_from_user_id(
|
||||||
|
organization_item.team_id,
|
||||||
|
user.id.into(),
|
||||||
|
&**pool,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let permissions =
|
||||||
|
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member);
|
||||||
|
|
||||||
|
if let Some(perms) = permissions {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
if let Some(description) = &new_organization.description {
|
||||||
|
if !perms.contains(OrganizationPermissions::EDIT_DETAILS) {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You do not have the permissions to edit the description of this organization!"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE organizations
|
||||||
|
SET description = $1
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
description,
|
||||||
|
id as database::models::ids::OrganizationId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(title) = &new_organization.title {
|
||||||
|
if !perms.contains(OrganizationPermissions::EDIT_DETAILS) {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You do not have the permissions to edit the title of this organization!"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let title_organization_id_option: Option<u64> = parse_base62(title).ok();
|
||||||
|
if let Some(title_organization_id) = title_organization_id_option {
|
||||||
|
let results = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1)
|
||||||
|
",
|
||||||
|
title_organization_id as i64
|
||||||
|
)
|
||||||
|
.fetch_one(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if results.exists.unwrap_or(true) {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Title collides with other organization's id!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure the new title is different from the old one
|
||||||
|
// We are able to unwrap here because the title is always set
|
||||||
|
if !title.eq(&organization_item.title.clone()) {
|
||||||
|
let results = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT EXISTS(SELECT 1 FROM organizations WHERE title = LOWER($1))
|
||||||
|
",
|
||||||
|
title
|
||||||
|
)
|
||||||
|
.fetch_one(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if results.exists.unwrap_or(true) {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"Title collides with other organization's id!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE organizations
|
||||||
|
SET title = LOWER($1)
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
Some(title),
|
||||||
|
id as database::models::ids::OrganizationId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
database::models::Organization::clear_cache(
|
||||||
|
organization_item.id,
|
||||||
|
Some(organization_item.title),
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Err(ApiError::CustomAuthentication(
|
||||||
|
"You do not have permission to edit this organization!".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn organization_delete(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_DELETE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
|
||||||
|
let organization = database::models::Organization::get(&string, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !user.role.is_admin() {
|
||||||
|
let team_member = database::models::TeamMember::get_from_user_id_organization(
|
||||||
|
organization.id,
|
||||||
|
user.id.into(),
|
||||||
|
&**pool,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(ApiError::Database)?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let permissions =
|
||||||
|
OrganizationPermissions::get_permissions_by_role(&user.role, &Some(team_member))
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
if !permissions.contains(OrganizationPermissions::DELETE_ORGANIZATION) {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You don't have permission to delete this organization!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
let result =
|
||||||
|
database::models::Organization::remove(organization.id, &mut transaction, &redis).await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
database::models::Organization::clear_cache(organization.id, Some(organization.title), &redis)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.is_some() {
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct OrganizationProjectAdd {
|
||||||
|
pub project_id: String, // Also allow title/slug
|
||||||
|
}
|
||||||
|
pub async fn organization_projects_add(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
project_info: web::Json<OrganizationProjectAdd>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let info = info.into_inner().0;
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let organization = database::models::Organization::get(&info, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let project_item = database::models::Project::get(&project_info.project_id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
if project_item.inner.organization_id.is_some() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"The specified project is already owned by an organization!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let project_team_member = database::models::TeamMember::get_from_user_id_project(
|
||||||
|
project_item.inner.id,
|
||||||
|
current_user.id.into(),
|
||||||
|
&**pool,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::InvalidInput("You are not a member of this project!".to_string()))?;
|
||||||
|
|
||||||
|
let organization_team_member = database::models::TeamMember::get_from_user_id_organization(
|
||||||
|
organization.id,
|
||||||
|
current_user.id.into(),
|
||||||
|
&**pool,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("You are not a member of this organization!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Require ownership of a project to add it to an organization
|
||||||
|
if !current_user.role.is_admin()
|
||||||
|
&& !project_team_member
|
||||||
|
.role
|
||||||
|
.eq(crate::models::teams::OWNER_ROLE)
|
||||||
|
{
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You need to be an owner of a project to add it to an organization!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let permissions = OrganizationPermissions::get_permissions_by_role(
|
||||||
|
¤t_user.role,
|
||||||
|
&Some(organization_team_member),
|
||||||
|
)
|
||||||
|
.unwrap_or_default();
|
||||||
|
if permissions.contains(OrganizationPermissions::ADD_PROJECT) {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE mods
|
||||||
|
SET organization_id = $1
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
organization.id as database::models::OrganizationId,
|
||||||
|
project_item.inner.id as database::models::ids::ProjectId
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?;
|
||||||
|
database::models::Project::clear_cache(
|
||||||
|
project_item.inner.id,
|
||||||
|
project_item.inner.slug,
|
||||||
|
None,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You do not have permission to add projects to this organization!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(HttpResponse::Ok().finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn organization_projects_remove(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String, String)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let (organization_id, project_id) = info.into_inner();
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let organization = database::models::Organization::get(&organization_id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let project_item = database::models::Project::get(&project_id, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !project_item
|
||||||
|
.inner
|
||||||
|
.organization_id
|
||||||
|
.eq(&Some(organization.id))
|
||||||
|
{
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"The specified project is not owned by this organization!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let organization_team_member = database::models::TeamMember::get_from_user_id_organization(
|
||||||
|
organization.id,
|
||||||
|
current_user.id.into(),
|
||||||
|
&**pool,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("You are not a member of this organization!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let permissions = OrganizationPermissions::get_permissions_by_role(
|
||||||
|
¤t_user.role,
|
||||||
|
&Some(organization_team_member),
|
||||||
|
)
|
||||||
|
.unwrap_or_default();
|
||||||
|
if permissions.contains(OrganizationPermissions::REMOVE_PROJECT) {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE mods
|
||||||
|
SET organization_id = NULL
|
||||||
|
WHERE (id = $1)
|
||||||
|
",
|
||||||
|
project_item.inner.id as database::models::ids::ProjectId
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?;
|
||||||
|
database::models::Project::clear_cache(
|
||||||
|
project_item.inner.id,
|
||||||
|
project_item.inner.slug,
|
||||||
|
None,
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You do not have permission to add projects to this organization!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(HttpResponse::Ok().finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct Extension {
|
||||||
|
pub ext: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
pub async fn organization_icon_edit(
|
||||||
|
web::Query(ext): web::Query<Extension>,
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
|
mut payload: web::Payload,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) {
|
||||||
|
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
|
||||||
|
let organization_item = database::models::Organization::get(&string, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !user.role.is_mod() {
|
||||||
|
let team_member = database::models::TeamMember::get_from_user_id(
|
||||||
|
organization_item.team_id,
|
||||||
|
user.id.into(),
|
||||||
|
&**pool,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(ApiError::Database)?;
|
||||||
|
|
||||||
|
let permissions =
|
||||||
|
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member)
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You don't have permission to edit this organization's icon.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(icon) = organization_item.icon_url {
|
||||||
|
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
||||||
|
|
||||||
|
if let Some(icon_path) = name {
|
||||||
|
file_host.delete_file_version("", icon_path).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let bytes =
|
||||||
|
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
|
||||||
|
|
||||||
|
let color = crate::util::img::get_color_from_img(&bytes)?;
|
||||||
|
|
||||||
|
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
||||||
|
let organization_id: OrganizationId = organization_item.id.into();
|
||||||
|
let upload_data = file_host
|
||||||
|
.upload_file(
|
||||||
|
content_type,
|
||||||
|
&format!("data/{}/{}.{}", organization_id, hash, ext.ext),
|
||||||
|
bytes.freeze(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE organizations
|
||||||
|
SET icon_url = $1, color = $2
|
||||||
|
WHERE (id = $3)
|
||||||
|
",
|
||||||
|
format!("{}/{}", cdn_url, upload_data.file_name),
|
||||||
|
color.map(|x| x as i32),
|
||||||
|
organization_item.id as database::models::ids::OrganizationId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
database::models::Organization::clear_cache(
|
||||||
|
organization_item.id,
|
||||||
|
Some(organization_item.title),
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Err(ApiError::InvalidInput(format!(
|
||||||
|
"Invalid format for project icon: {}",
|
||||||
|
ext.ext
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_organization_icon(
|
||||||
|
req: HttpRequest,
|
||||||
|
info: web::Path<(String,)>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::ORGANIZATION_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
let string = info.into_inner().0;
|
||||||
|
|
||||||
|
let organization_item = database::models::Organization::get(&string, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput("The specified organization does not exist!".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !user.role.is_mod() {
|
||||||
|
let team_member = database::models::TeamMember::get_from_user_id(
|
||||||
|
organization_item.team_id,
|
||||||
|
user.id.into(),
|
||||||
|
&**pool,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(ApiError::Database)?;
|
||||||
|
|
||||||
|
let permissions =
|
||||||
|
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member)
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) {
|
||||||
|
return Err(ApiError::CustomAuthentication(
|
||||||
|
"You don't have permission to edit this organization's icon.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||||
|
if let Some(icon) = organization_item.icon_url {
|
||||||
|
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
||||||
|
|
||||||
|
if let Some(icon_path) = name {
|
||||||
|
file_host.delete_file_version("", icon_path).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE organizations
|
||||||
|
SET icon_url = NULL, color = NULL
|
||||||
|
WHERE (id = $1)
|
||||||
|
",
|
||||||
|
organization_item.id as database::models::ids::OrganizationId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
database::models::Organization::clear_cache(
|
||||||
|
organization_item.id,
|
||||||
|
Some(organization_item.title),
|
||||||
|
&redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
}
|
||||||
975
src/routes/v3/project_creation.rs
Normal file
975
src/routes/v3/project_creation.rs
Normal file
@ -0,0 +1,975 @@
|
|||||||
|
use super::version_creation::InitialVersionData;
|
||||||
|
use crate::auth::{get_user_from_headers, AuthenticationError};
|
||||||
|
use crate::database::models::loader_fields::{
|
||||||
|
Loader, LoaderField, LoaderFieldEnumValue, VersionField,
|
||||||
|
};
|
||||||
|
use crate::database::models::thread_item::ThreadBuilder;
|
||||||
|
use crate::database::models::{self, image_item, User};
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::file_hosting::{FileHost, FileHostingError};
|
||||||
|
use crate::models::error::ApiError;
|
||||||
|
use crate::models::ids::{ImageId, OrganizationId};
|
||||||
|
use crate::models::images::{Image, ImageContext};
|
||||||
|
use crate::models::pats::Scopes;
|
||||||
|
use crate::models::projects::{
|
||||||
|
DonationLink, License, MonetizationStatus, ProjectId, ProjectStatus, VersionId, VersionStatus,
|
||||||
|
};
|
||||||
|
use crate::models::teams::ProjectPermissions;
|
||||||
|
use crate::models::threads::ThreadType;
|
||||||
|
use crate::models::users::UserId;
|
||||||
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::search::indexing::IndexingError;
|
||||||
|
use crate::util::routes::read_from_field;
|
||||||
|
use crate::util::validate::validation_errors_to_string;
|
||||||
|
use actix_multipart::{Field, Multipart};
|
||||||
|
use actix_web::http::StatusCode;
|
||||||
|
use actix_web::web::{self, Data};
|
||||||
|
use actix_web::{HttpRequest, HttpResponse};
|
||||||
|
use chrono::Utc;
|
||||||
|
use futures::stream::StreamExt;
|
||||||
|
use image::ImageError;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use rust_decimal::Decimal;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use thiserror::Error;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||||
|
cfg.route("create", web::post().to(project_create));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum CreateError {
|
||||||
|
#[error("Environment Error")]
|
||||||
|
EnvError(#[from] dotenvy::Error),
|
||||||
|
#[error("An unknown database error occurred")]
|
||||||
|
SqlxDatabaseError(#[from] sqlx::Error),
|
||||||
|
#[error("Database Error: {0}")]
|
||||||
|
DatabaseError(#[from] models::DatabaseError),
|
||||||
|
#[error("Indexing Error: {0}")]
|
||||||
|
IndexingError(#[from] IndexingError),
|
||||||
|
#[error("Error while parsing multipart payload: {0}")]
|
||||||
|
MultipartError(#[from] actix_multipart::MultipartError),
|
||||||
|
#[error("Error while parsing JSON: {0}")]
|
||||||
|
SerDeError(#[from] serde_json::Error),
|
||||||
|
#[error("Error while validating input: {0}")]
|
||||||
|
ValidationError(String),
|
||||||
|
#[error("Error while uploading file: {0}")]
|
||||||
|
FileHostingError(#[from] FileHostingError),
|
||||||
|
#[error("Error while validating uploaded file: {0}")]
|
||||||
|
FileValidationError(#[from] crate::validate::ValidationError),
|
||||||
|
#[error("{}", .0)]
|
||||||
|
MissingValueError(String),
|
||||||
|
#[error("Invalid format for image: {0}")]
|
||||||
|
InvalidIconFormat(String),
|
||||||
|
#[error("Error with multipart data: {0}")]
|
||||||
|
InvalidInput(String),
|
||||||
|
#[error("Invalid game version: {0}")]
|
||||||
|
InvalidGameVersion(String),
|
||||||
|
#[error("Invalid loader: {0}")]
|
||||||
|
InvalidLoader(String),
|
||||||
|
#[error("Invalid category: {0}")]
|
||||||
|
InvalidCategory(String),
|
||||||
|
#[error("Invalid file type for version file: {0}")]
|
||||||
|
InvalidFileType(String),
|
||||||
|
#[error("Slug collides with other project's id!")]
|
||||||
|
SlugCollision,
|
||||||
|
#[error("Authentication Error: {0}")]
|
||||||
|
Unauthorized(#[from] AuthenticationError),
|
||||||
|
#[error("Authentication Error: {0}")]
|
||||||
|
CustomAuthenticationError(String),
|
||||||
|
#[error("Image Parsing Error: {0}")]
|
||||||
|
ImageError(#[from] ImageError),
|
||||||
|
#[error("Reroute Error: {0}")]
|
||||||
|
RerouteError(#[from] reqwest::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl actix_web::ResponseError for CreateError {
|
||||||
|
fn status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
CreateError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
CreateError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
CreateError::IndexingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
CreateError::FileHostingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
CreateError::SerDeError(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::MultipartError(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::MissingValueError(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::InvalidIconFormat(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::InvalidInput(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::InvalidGameVersion(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::InvalidLoader(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::InvalidFileType(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED,
|
||||||
|
CreateError::CustomAuthenticationError(..) => StatusCode::UNAUTHORIZED,
|
||||||
|
CreateError::SlugCollision => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::ValidationError(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::FileValidationError(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::ImageError(..) => StatusCode::BAD_REQUEST,
|
||||||
|
CreateError::RerouteError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn error_response(&self) -> HttpResponse {
|
||||||
|
HttpResponse::build(self.status_code()).json(ApiError {
|
||||||
|
error: match self {
|
||||||
|
CreateError::EnvError(..) => "environment_error",
|
||||||
|
CreateError::SqlxDatabaseError(..) => "database_error",
|
||||||
|
CreateError::DatabaseError(..) => "database_error",
|
||||||
|
CreateError::IndexingError(..) => "indexing_error",
|
||||||
|
CreateError::FileHostingError(..) => "file_hosting_error",
|
||||||
|
CreateError::SerDeError(..) => "invalid_input",
|
||||||
|
CreateError::MultipartError(..) => "invalid_input",
|
||||||
|
CreateError::MissingValueError(..) => "invalid_input",
|
||||||
|
CreateError::InvalidIconFormat(..) => "invalid_input",
|
||||||
|
CreateError::InvalidInput(..) => "invalid_input",
|
||||||
|
CreateError::InvalidGameVersion(..) => "invalid_input",
|
||||||
|
CreateError::InvalidLoader(..) => "invalid_input",
|
||||||
|
CreateError::InvalidCategory(..) => "invalid_input",
|
||||||
|
CreateError::InvalidFileType(..) => "invalid_input",
|
||||||
|
CreateError::Unauthorized(..) => "unauthorized",
|
||||||
|
CreateError::CustomAuthenticationError(..) => "unauthorized",
|
||||||
|
CreateError::SlugCollision => "invalid_input",
|
||||||
|
CreateError::ValidationError(..) => "invalid_input",
|
||||||
|
CreateError::FileValidationError(..) => "invalid_input",
|
||||||
|
CreateError::ImageError(..) => "invalid_image",
|
||||||
|
CreateError::RerouteError(..) => "reroute_error",
|
||||||
|
},
|
||||||
|
description: &self.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_project_type() -> String {
|
||||||
|
"mod".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_requested_status() -> ProjectStatus {
|
||||||
|
ProjectStatus::Approved
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||||
|
pub struct ProjectCreateData {
|
||||||
|
#[validate(
|
||||||
|
length(min = 3, max = 64),
|
||||||
|
custom(function = "crate::util::validate::validate_name")
|
||||||
|
)]
|
||||||
|
#[serde(alias = "mod_name")]
|
||||||
|
/// The title or name of the project.
|
||||||
|
pub title: String,
|
||||||
|
#[validate(
|
||||||
|
length(min = 3, max = 64),
|
||||||
|
regex = "crate::util::validate::RE_URL_SAFE"
|
||||||
|
)]
|
||||||
|
#[serde(alias = "mod_slug")]
|
||||||
|
/// The slug of a project, used for vanity URLs
|
||||||
|
pub slug: String,
|
||||||
|
#[validate(length(min = 3, max = 255))]
|
||||||
|
#[serde(alias = "mod_description")]
|
||||||
|
/// A short description of the project.
|
||||||
|
pub description: String,
|
||||||
|
#[validate(length(max = 65536))]
|
||||||
|
#[serde(alias = "mod_body")]
|
||||||
|
/// A long description of the project, in markdown.
|
||||||
|
pub body: String,
|
||||||
|
|
||||||
|
#[validate(length(max = 32))]
|
||||||
|
#[validate]
|
||||||
|
/// A list of initial versions to upload with the created project
|
||||||
|
pub initial_versions: Vec<InitialVersionData>,
|
||||||
|
#[validate(length(max = 3))]
|
||||||
|
/// A list of the categories that the project is in.
|
||||||
|
pub categories: Vec<String>,
|
||||||
|
#[validate(length(max = 256))]
|
||||||
|
#[serde(default = "Vec::new")]
|
||||||
|
/// A list of the categories that the project is in.
|
||||||
|
pub additional_categories: Vec<String>,
|
||||||
|
|
||||||
|
#[validate(
|
||||||
|
custom(function = "crate::util::validate::validate_url"),
|
||||||
|
length(max = 2048)
|
||||||
|
)]
|
||||||
|
/// An optional link to where to submit bugs or issues with the project.
|
||||||
|
pub issues_url: Option<String>,
|
||||||
|
#[validate(
|
||||||
|
custom(function = "crate::util::validate::validate_url"),
|
||||||
|
length(max = 2048)
|
||||||
|
)]
|
||||||
|
/// An optional link to the source code for the project.
|
||||||
|
pub source_url: Option<String>,
|
||||||
|
#[validate(
|
||||||
|
custom(function = "crate::util::validate::validate_url"),
|
||||||
|
length(max = 2048)
|
||||||
|
)]
|
||||||
|
/// An optional link to the project's wiki page or other relevant information.
|
||||||
|
pub wiki_url: Option<String>,
|
||||||
|
#[validate(
|
||||||
|
custom(function = "crate::util::validate::validate_url"),
|
||||||
|
length(max = 2048)
|
||||||
|
)]
|
||||||
|
/// An optional link to the project's license page
|
||||||
|
pub license_url: Option<String>,
|
||||||
|
#[validate(
|
||||||
|
custom(function = "crate::util::validate::validate_url"),
|
||||||
|
length(max = 2048)
|
||||||
|
)]
|
||||||
|
/// An optional link to the project's discord.
|
||||||
|
pub discord_url: Option<String>,
|
||||||
|
/// An optional list of all donation links the project has\
|
||||||
|
#[validate]
|
||||||
|
pub donation_urls: Option<Vec<DonationLink>>,
|
||||||
|
|
||||||
|
/// An optional boolean. If true, the project will be created as a draft.
|
||||||
|
pub is_draft: Option<bool>,
|
||||||
|
|
||||||
|
/// The license id that the project follows
|
||||||
|
pub license_id: String,
|
||||||
|
|
||||||
|
#[validate(length(max = 64))]
|
||||||
|
#[validate]
|
||||||
|
/// The multipart names of the gallery items to upload
|
||||||
|
pub gallery_items: Option<Vec<NewGalleryItem>>,
|
||||||
|
#[serde(default = "default_requested_status")]
|
||||||
|
/// The status of the mod to be set once it is approved
|
||||||
|
pub requested_status: ProjectStatus,
|
||||||
|
|
||||||
|
// Associations to uploaded images in body/description
|
||||||
|
#[validate(length(max = 10))]
|
||||||
|
#[serde(default)]
|
||||||
|
pub uploaded_images: Vec<ImageId>,
|
||||||
|
|
||||||
|
/// The id of the organization to create the project in
|
||||||
|
pub organization_id: Option<OrganizationId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||||
|
pub struct NewGalleryItem {
|
||||||
|
/// The name of the multipart item where the gallery media is located
|
||||||
|
pub item: String,
|
||||||
|
/// Whether the gallery item should show in search or not
|
||||||
|
pub featured: bool,
|
||||||
|
#[validate(length(min = 1, max = 2048))]
|
||||||
|
/// The title of the gallery item
|
||||||
|
pub title: Option<String>,
|
||||||
|
#[validate(length(min = 1, max = 2048))]
|
||||||
|
/// The description of the gallery item
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub ordering: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct UploadedFile {
|
||||||
|
pub file_id: String,
|
||||||
|
pub file_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn undo_uploads(
|
||||||
|
file_host: &dyn FileHost,
|
||||||
|
uploaded_files: &[UploadedFile],
|
||||||
|
) -> Result<(), CreateError> {
|
||||||
|
for file in uploaded_files {
|
||||||
|
file_host
|
||||||
|
.delete_file_version(&file.file_id, &file.file_name)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn project_create(
|
||||||
|
req: HttpRequest,
|
||||||
|
mut payload: Multipart,
|
||||||
|
client: Data<PgPool>,
|
||||||
|
redis: Data<RedisPool>,
|
||||||
|
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||||
|
session_queue: Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, CreateError> {
|
||||||
|
let mut transaction = client.begin().await?;
|
||||||
|
let mut uploaded_files = Vec::new();
|
||||||
|
|
||||||
|
let result = project_create_inner(
|
||||||
|
req,
|
||||||
|
&mut payload,
|
||||||
|
&mut transaction,
|
||||||
|
&***file_host,
|
||||||
|
&mut uploaded_files,
|
||||||
|
&client,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
if result.is_err() {
|
||||||
|
let undo_result = undo_uploads(&***file_host, &uploaded_files).await;
|
||||||
|
let rollback_result = transaction.rollback().await;
|
||||||
|
|
||||||
|
undo_result?;
|
||||||
|
if let Err(e) = rollback_result {
|
||||||
|
return Err(e.into());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
transaction.commit().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
|
||||||
|
Project Creation Steps:
|
||||||
|
Get logged in user
|
||||||
|
Must match the author in the version creation
|
||||||
|
|
||||||
|
1. Data
|
||||||
|
- Gets "data" field from multipart form; must be first
|
||||||
|
- Verification: string lengths
|
||||||
|
- Create versions
|
||||||
|
- Some shared logic with version creation
|
||||||
|
- Create list of VersionBuilders
|
||||||
|
- Create ProjectBuilder
|
||||||
|
|
||||||
|
2. Upload
|
||||||
|
- Icon: check file format & size
|
||||||
|
- Upload to backblaze & record URL
|
||||||
|
- Project files
|
||||||
|
- Check for matching version
|
||||||
|
- File size limits?
|
||||||
|
- Check file type
|
||||||
|
- Eventually, malware scan
|
||||||
|
- Upload to backblaze & create VersionFileBuilder
|
||||||
|
-
|
||||||
|
|
||||||
|
3. Creation
|
||||||
|
- Database stuff
|
||||||
|
- Add project data to indexing queue
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
async fn project_create_inner(
|
||||||
|
req: HttpRequest,
|
||||||
|
payload: &mut Multipart,
|
||||||
|
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||||
|
file_host: &dyn FileHost,
|
||||||
|
uploaded_files: &mut Vec<UploadedFile>,
|
||||||
|
pool: &PgPool,
|
||||||
|
redis: &RedisPool,
|
||||||
|
session_queue: &AuthQueue,
|
||||||
|
) -> Result<HttpResponse, CreateError> {
|
||||||
|
// The base URL for files uploaded to backblaze
|
||||||
|
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||||
|
|
||||||
|
// The currently logged in user
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
pool,
|
||||||
|
redis,
|
||||||
|
session_queue,
|
||||||
|
Some(&[Scopes::PROJECT_CREATE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let project_id: ProjectId = models::generate_project_id(transaction).await?.into();
|
||||||
|
let all_loaders = models::loader_fields::Loader::list(&mut **transaction, redis).await?;
|
||||||
|
|
||||||
|
let project_create_data: ProjectCreateData;
|
||||||
|
let mut versions;
|
||||||
|
let mut versions_map = std::collections::HashMap::new();
|
||||||
|
let mut gallery_urls = Vec::new();
|
||||||
|
{
|
||||||
|
// The first multipart field must be named "data" and contain a
|
||||||
|
// JSON `ProjectCreateData` object.
|
||||||
|
|
||||||
|
let mut field = payload
|
||||||
|
.next()
|
||||||
|
.await
|
||||||
|
.map(|m| m.map_err(CreateError::MultipartError))
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
Err(CreateError::MissingValueError(String::from(
|
||||||
|
"No `data` field in multipart upload",
|
||||||
|
)))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let content_disposition = field.content_disposition();
|
||||||
|
let name = content_disposition
|
||||||
|
.get_name()
|
||||||
|
.ok_or_else(|| CreateError::MissingValueError(String::from("Missing content name")))?;
|
||||||
|
|
||||||
|
if name != "data" {
|
||||||
|
return Err(CreateError::InvalidInput(String::from(
|
||||||
|
"`data` field must come before file fields",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut data = Vec::new();
|
||||||
|
while let Some(chunk) = field.next().await {
|
||||||
|
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
|
||||||
|
}
|
||||||
|
let create_data: ProjectCreateData = serde_json::from_slice(&data)?;
|
||||||
|
|
||||||
|
create_data
|
||||||
|
.validate()
|
||||||
|
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
|
||||||
|
|
||||||
|
let slug_project_id_option: Option<ProjectId> =
|
||||||
|
serde_json::from_str(&format!("\"{}\"", create_data.slug)).ok();
|
||||||
|
|
||||||
|
if let Some(slug_project_id) = slug_project_id_option {
|
||||||
|
let slug_project_id: models::ids::ProjectId = slug_project_id.into();
|
||||||
|
let results = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)
|
||||||
|
",
|
||||||
|
slug_project_id as models::ids::ProjectId
|
||||||
|
)
|
||||||
|
.fetch_one(&mut **transaction)
|
||||||
|
.await
|
||||||
|
.map_err(|e| CreateError::DatabaseError(e.into()))?;
|
||||||
|
|
||||||
|
if results.exists.unwrap_or(false) {
|
||||||
|
return Err(CreateError::SlugCollision);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let results = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1))
|
||||||
|
",
|
||||||
|
create_data.slug
|
||||||
|
)
|
||||||
|
.fetch_one(&mut **transaction)
|
||||||
|
.await
|
||||||
|
.map_err(|e| CreateError::DatabaseError(e.into()))?;
|
||||||
|
|
||||||
|
if results.exists.unwrap_or(false) {
|
||||||
|
return Err(CreateError::SlugCollision);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create VersionBuilders for the versions specified in `initial_versions`
|
||||||
|
versions = Vec::with_capacity(create_data.initial_versions.len());
|
||||||
|
for (i, data) in create_data.initial_versions.iter().enumerate() {
|
||||||
|
// Create a map of multipart field names to version indices
|
||||||
|
for name in &data.file_parts {
|
||||||
|
if versions_map.insert(name.to_owned(), i).is_some() {
|
||||||
|
// If the name is already used
|
||||||
|
return Err(CreateError::InvalidInput(String::from(
|
||||||
|
"Duplicate multipart field name",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
versions.push(
|
||||||
|
create_initial_version(
|
||||||
|
data,
|
||||||
|
project_id,
|
||||||
|
current_user.id,
|
||||||
|
&all_loaders,
|
||||||
|
transaction,
|
||||||
|
redis,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
project_create_data = create_data;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut icon_data = None;
|
||||||
|
|
||||||
|
let mut error = None;
|
||||||
|
while let Some(item) = payload.next().await {
|
||||||
|
let mut field: Field = item?;
|
||||||
|
|
||||||
|
if error.is_some() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = async {
|
||||||
|
let content_disposition = field.content_disposition().clone();
|
||||||
|
|
||||||
|
let name = content_disposition.get_name().ok_or_else(|| {
|
||||||
|
CreateError::MissingValueError("Missing content name".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let (file_name, file_extension) =
|
||||||
|
super::version_creation::get_name_ext(&content_disposition)?;
|
||||||
|
|
||||||
|
if name == "icon" {
|
||||||
|
if icon_data.is_some() {
|
||||||
|
return Err(CreateError::InvalidInput(String::from(
|
||||||
|
"Projects can only have one icon",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
// Upload the icon to the cdn
|
||||||
|
icon_data = Some(
|
||||||
|
process_icon_upload(
|
||||||
|
uploaded_files,
|
||||||
|
project_id.0,
|
||||||
|
file_extension,
|
||||||
|
file_host,
|
||||||
|
field,
|
||||||
|
&cdn_url,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
if let Some(gallery_items) = &project_create_data.gallery_items {
|
||||||
|
if gallery_items.iter().filter(|a| a.featured).count() > 1 {
|
||||||
|
return Err(CreateError::InvalidInput(String::from(
|
||||||
|
"Only one gallery image can be featured.",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
if let Some(item) = gallery_items.iter().find(|x| x.item == name) {
|
||||||
|
let data = read_from_field(
|
||||||
|
&mut field,
|
||||||
|
5 * (1 << 20),
|
||||||
|
"Gallery image exceeds the maximum of 5MiB.",
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let hash = sha1::Sha1::from(&data).hexdigest();
|
||||||
|
let (_, file_extension) =
|
||||||
|
super::version_creation::get_name_ext(&content_disposition)?;
|
||||||
|
let content_type = crate::util::ext::get_image_content_type(file_extension)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
CreateError::InvalidIconFormat(file_extension.to_string())
|
||||||
|
})?;
|
||||||
|
let url = format!("data/{project_id}/images/{hash}.{file_extension}");
|
||||||
|
let upload_data = file_host
|
||||||
|
.upload_file(content_type, &url, data.freeze())
|
||||||
|
.await?;
|
||||||
|
uploaded_files.push(UploadedFile {
|
||||||
|
file_id: upload_data.file_id,
|
||||||
|
file_name: upload_data.file_name,
|
||||||
|
});
|
||||||
|
gallery_urls.push(crate::models::projects::GalleryItem {
|
||||||
|
url: format!("{cdn_url}/{url}"),
|
||||||
|
featured: item.featured,
|
||||||
|
title: item.title.clone(),
|
||||||
|
description: item.description.clone(),
|
||||||
|
created: Utc::now(),
|
||||||
|
ordering: item.ordering,
|
||||||
|
});
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let index = if let Some(i) = versions_map.get(name) {
|
||||||
|
*i
|
||||||
|
} else {
|
||||||
|
return Err(CreateError::InvalidInput(format!(
|
||||||
|
"File `{file_name}` (field {name}) isn't specified in the versions data"
|
||||||
|
)));
|
||||||
|
};
|
||||||
|
// `index` is always valid for these lists
|
||||||
|
let created_version = versions.get_mut(index).unwrap();
|
||||||
|
let version_data = project_create_data.initial_versions.get(index).unwrap();
|
||||||
|
// TODO: maybe redundant is this calculation done elsewhere?
|
||||||
|
|
||||||
|
// Upload the new jar file
|
||||||
|
super::version_creation::upload_file(
|
||||||
|
&mut field,
|
||||||
|
file_host,
|
||||||
|
version_data.file_parts.len(),
|
||||||
|
uploaded_files,
|
||||||
|
&mut created_version.files,
|
||||||
|
&mut created_version.dependencies,
|
||||||
|
&cdn_url,
|
||||||
|
&content_disposition,
|
||||||
|
project_id,
|
||||||
|
created_version.version_id.into(),
|
||||||
|
&created_version.version_fields,
|
||||||
|
version_data.loaders.clone(),
|
||||||
|
version_data.primary_file.is_some(),
|
||||||
|
version_data.primary_file.as_deref() == Some(name),
|
||||||
|
None,
|
||||||
|
transaction,
|
||||||
|
redis,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
|
if result.is_err() {
|
||||||
|
error = result.err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(error) = error {
|
||||||
|
return Err(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// Check to make sure that all specified files were uploaded
|
||||||
|
for (version_data, builder) in project_create_data
|
||||||
|
.initial_versions
|
||||||
|
.iter()
|
||||||
|
.zip(versions.iter())
|
||||||
|
{
|
||||||
|
if version_data.file_parts.len() != builder.files.len() {
|
||||||
|
return Err(CreateError::InvalidInput(String::from(
|
||||||
|
"Some files were specified in initial_versions but not uploaded",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the list of category names to actual categories
|
||||||
|
let mut categories = Vec::with_capacity(project_create_data.categories.len());
|
||||||
|
for category in &project_create_data.categories {
|
||||||
|
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
|
||||||
|
if ids.is_empty() {
|
||||||
|
return Err(CreateError::InvalidCategory(category.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: We should filter out categories that don't match the project type of any of the versions
|
||||||
|
// ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version
|
||||||
|
categories.extend(ids.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut additional_categories =
|
||||||
|
Vec::with_capacity(project_create_data.additional_categories.len());
|
||||||
|
for category in &project_create_data.additional_categories {
|
||||||
|
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
|
||||||
|
if ids.is_empty() {
|
||||||
|
return Err(CreateError::InvalidCategory(category.clone()));
|
||||||
|
}
|
||||||
|
// TODO: We should filter out categories that don't match the project type of any of the versions
|
||||||
|
// ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version
|
||||||
|
additional_categories.extend(ids.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
let team = models::team_item::TeamBuilder {
|
||||||
|
members: vec![models::team_item::TeamMemberBuilder {
|
||||||
|
user_id: current_user.id.into(),
|
||||||
|
role: crate::models::teams::OWNER_ROLE.to_owned(),
|
||||||
|
// Allow all permissions for project creator, even if attached to a project
|
||||||
|
permissions: ProjectPermissions::all(),
|
||||||
|
organization_permissions: None,
|
||||||
|
accepted: true,
|
||||||
|
payouts_split: Decimal::ONE_HUNDRED,
|
||||||
|
ordering: 0,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
|
let team_id = team.insert(&mut *transaction).await?;
|
||||||
|
|
||||||
|
let status;
|
||||||
|
if project_create_data.is_draft.unwrap_or(false) {
|
||||||
|
status = ProjectStatus::Draft;
|
||||||
|
} else {
|
||||||
|
status = ProjectStatus::Processing;
|
||||||
|
if project_create_data.initial_versions.is_empty() {
|
||||||
|
return Err(CreateError::InvalidInput(String::from(
|
||||||
|
"Project submitted for review with no initial versions",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let license_id =
|
||||||
|
spdx::Expression::parse(&project_create_data.license_id).map_err(|err| {
|
||||||
|
CreateError::InvalidInput(format!("Invalid SPDX license identifier: {err}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut donation_urls = vec![];
|
||||||
|
|
||||||
|
if let Some(urls) = &project_create_data.donation_urls {
|
||||||
|
for url in urls {
|
||||||
|
let platform_id =
|
||||||
|
models::categories::DonationPlatform::get_id(&url.id, &mut **transaction)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
CreateError::InvalidInput(format!(
|
||||||
|
"Donation platform {} does not exist.",
|
||||||
|
url.id.clone()
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
donation_urls.push(models::project_item::DonationUrl {
|
||||||
|
platform_id,
|
||||||
|
platform_short: "".to_string(),
|
||||||
|
platform_name: "".to_string(),
|
||||||
|
url: url.url.clone(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let project_builder_actual = models::project_item::ProjectBuilder {
|
||||||
|
project_id: project_id.into(),
|
||||||
|
team_id,
|
||||||
|
organization_id: project_create_data.organization_id.map(|x| x.into()),
|
||||||
|
title: project_create_data.title,
|
||||||
|
description: project_create_data.description,
|
||||||
|
body: project_create_data.body,
|
||||||
|
icon_url: icon_data.clone().map(|x| x.0),
|
||||||
|
issues_url: project_create_data.issues_url,
|
||||||
|
source_url: project_create_data.source_url,
|
||||||
|
wiki_url: project_create_data.wiki_url,
|
||||||
|
|
||||||
|
license_url: project_create_data.license_url,
|
||||||
|
discord_url: project_create_data.discord_url,
|
||||||
|
categories,
|
||||||
|
additional_categories,
|
||||||
|
initial_versions: versions,
|
||||||
|
status,
|
||||||
|
requested_status: Some(project_create_data.requested_status),
|
||||||
|
license: license_id.to_string(),
|
||||||
|
slug: Some(project_create_data.slug),
|
||||||
|
donation_urls,
|
||||||
|
gallery_items: gallery_urls
|
||||||
|
.iter()
|
||||||
|
.map(|x| models::project_item::GalleryItem {
|
||||||
|
image_url: x.url.clone(),
|
||||||
|
featured: x.featured,
|
||||||
|
title: x.title.clone(),
|
||||||
|
description: x.description.clone(),
|
||||||
|
created: x.created,
|
||||||
|
ordering: x.ordering,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
color: icon_data.and_then(|x| x.1),
|
||||||
|
monetization_status: MonetizationStatus::Monetized,
|
||||||
|
};
|
||||||
|
let project_builder = project_builder_actual.clone();
|
||||||
|
|
||||||
|
let now = Utc::now();
|
||||||
|
|
||||||
|
let id = project_builder_actual.insert(&mut *transaction).await?;
|
||||||
|
User::clear_project_cache(&[current_user.id.into()], redis).await?;
|
||||||
|
|
||||||
|
for image_id in project_create_data.uploaded_images {
|
||||||
|
if let Some(db_image) =
|
||||||
|
image_item::Image::get(image_id.into(), &mut **transaction, redis).await?
|
||||||
|
{
|
||||||
|
let image: Image = db_image.into();
|
||||||
|
if !matches!(image.context, ImageContext::Project { .. })
|
||||||
|
|| image.context.inner_id().is_some()
|
||||||
|
{
|
||||||
|
return Err(CreateError::InvalidInput(format!(
|
||||||
|
"Image {} is not unused and in the 'project' context",
|
||||||
|
image_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE uploaded_images
|
||||||
|
SET mod_id = $1
|
||||||
|
WHERE id = $2
|
||||||
|
",
|
||||||
|
id as models::ids::ProjectId,
|
||||||
|
image_id.0 as i64
|
||||||
|
)
|
||||||
|
.execute(&mut **transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
image_item::Image::clear_cache(image.id.into(), redis).await?;
|
||||||
|
} else {
|
||||||
|
return Err(CreateError::InvalidInput(format!(
|
||||||
|
"Image {} does not exist",
|
||||||
|
image_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let thread_id = ThreadBuilder {
|
||||||
|
type_: ThreadType::Project,
|
||||||
|
members: vec![],
|
||||||
|
project_id: Some(id),
|
||||||
|
report_id: None,
|
||||||
|
}
|
||||||
|
.insert(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let loaders = project_builder
|
||||||
|
.initial_versions
|
||||||
|
.iter()
|
||||||
|
.flat_map(|v| v.loaders.clone())
|
||||||
|
.unique()
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let (project_types, games) = Loader::list(&mut **transaction, redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.fold(
|
||||||
|
(Vec::new(), Vec::new()),
|
||||||
|
|(mut project_types, mut games), loader| {
|
||||||
|
if loaders.contains(&loader.id) {
|
||||||
|
project_types.extend(loader.supported_project_types);
|
||||||
|
games.extend(loader.supported_games.iter().map(|x| x.name().to_string()));
|
||||||
|
}
|
||||||
|
(project_types, games)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = crate::models::projects::Project {
|
||||||
|
id: project_id,
|
||||||
|
slug: project_builder.slug.clone(),
|
||||||
|
project_types,
|
||||||
|
games,
|
||||||
|
team: team_id.into(),
|
||||||
|
organization: project_create_data.organization_id,
|
||||||
|
title: project_builder.title.clone(),
|
||||||
|
description: project_builder.description.clone(),
|
||||||
|
body: project_builder.body.clone(),
|
||||||
|
body_url: None,
|
||||||
|
published: now,
|
||||||
|
updated: now,
|
||||||
|
approved: None,
|
||||||
|
queued: None,
|
||||||
|
status,
|
||||||
|
requested_status: project_builder.requested_status,
|
||||||
|
moderator_message: None,
|
||||||
|
license: License {
|
||||||
|
id: project_create_data.license_id.clone(),
|
||||||
|
name: "".to_string(),
|
||||||
|
url: project_builder.license_url.clone(),
|
||||||
|
},
|
||||||
|
downloads: 0,
|
||||||
|
followers: 0,
|
||||||
|
categories: project_create_data.categories,
|
||||||
|
additional_categories: project_create_data.additional_categories,
|
||||||
|
loaders: vec![],
|
||||||
|
versions: project_builder
|
||||||
|
.initial_versions
|
||||||
|
.iter()
|
||||||
|
.map(|v| v.version_id.into())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
icon_url: project_builder.icon_url.clone(),
|
||||||
|
issues_url: project_builder.issues_url.clone(),
|
||||||
|
source_url: project_builder.source_url.clone(),
|
||||||
|
wiki_url: project_builder.wiki_url.clone(),
|
||||||
|
discord_url: project_builder.discord_url.clone(),
|
||||||
|
donation_urls: project_create_data.donation_urls.clone(),
|
||||||
|
gallery: gallery_urls,
|
||||||
|
color: project_builder.color,
|
||||||
|
thread_id: thread_id.into(),
|
||||||
|
monetization_status: MonetizationStatus::Monetized,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(response))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_initial_version(
|
||||||
|
version_data: &InitialVersionData,
|
||||||
|
project_id: ProjectId,
|
||||||
|
author: UserId,
|
||||||
|
all_loaders: &[models::loader_fields::Loader],
|
||||||
|
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||||
|
redis: &RedisPool,
|
||||||
|
) -> Result<models::version_item::VersionBuilder, CreateError> {
|
||||||
|
if version_data.project_id.is_some() {
|
||||||
|
return Err(CreateError::InvalidInput(String::from(
|
||||||
|
"Found project id in initial version for new project",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
version_data
|
||||||
|
.validate()
|
||||||
|
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
|
||||||
|
|
||||||
|
// Randomly generate a new id to be used for the version
|
||||||
|
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
|
||||||
|
|
||||||
|
let loaders = version_data
|
||||||
|
.loaders
|
||||||
|
.iter()
|
||||||
|
.map(|x| {
|
||||||
|
all_loaders
|
||||||
|
.iter()
|
||||||
|
.find(|y| y.loader == x.0)
|
||||||
|
.ok_or_else(|| CreateError::InvalidLoader(x.0.clone()))
|
||||||
|
.map(|y| y.id)
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<models::LoaderId>, CreateError>>()?;
|
||||||
|
|
||||||
|
let loader_fields = LoaderField::get_fields(&mut **transaction, redis).await?;
|
||||||
|
let mut version_fields = vec![];
|
||||||
|
let mut loader_field_enum_values =
|
||||||
|
LoaderFieldEnumValue::list_many_loader_fields(&loader_fields, &mut **transaction, redis)
|
||||||
|
.await?;
|
||||||
|
for (key, value) in version_data.fields.iter() {
|
||||||
|
let loader_field = loader_fields
|
||||||
|
.iter()
|
||||||
|
.find(|lf| &lf.field == key)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
CreateError::InvalidInput(format!("Loader field '{key}' does not exist!"))
|
||||||
|
})?;
|
||||||
|
let enum_variants = loader_field_enum_values
|
||||||
|
.remove(&loader_field.id)
|
||||||
|
.unwrap_or_default();
|
||||||
|
let vf: VersionField = VersionField::check_parse(
|
||||||
|
version_id.into(),
|
||||||
|
loader_field.clone(),
|
||||||
|
value.clone(),
|
||||||
|
enum_variants,
|
||||||
|
)
|
||||||
|
.map_err(CreateError::InvalidInput)?;
|
||||||
|
version_fields.push(vf);
|
||||||
|
}
|
||||||
|
|
||||||
|
let dependencies = version_data
|
||||||
|
.dependencies
|
||||||
|
.iter()
|
||||||
|
.map(|d| models::version_item::DependencyBuilder {
|
||||||
|
version_id: d.version_id.map(|x| x.into()),
|
||||||
|
project_id: d.project_id.map(|x| x.into()),
|
||||||
|
dependency_type: d.dependency_type.to_string(),
|
||||||
|
file_name: None,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let version = models::version_item::VersionBuilder {
|
||||||
|
version_id: version_id.into(),
|
||||||
|
project_id: project_id.into(),
|
||||||
|
author_id: author.into(),
|
||||||
|
name: version_data.version_title.clone(),
|
||||||
|
version_number: version_data.version_number.clone(),
|
||||||
|
changelog: version_data.version_body.clone().unwrap_or_default(),
|
||||||
|
files: Vec::new(),
|
||||||
|
dependencies,
|
||||||
|
loaders,
|
||||||
|
version_fields,
|
||||||
|
featured: version_data.featured,
|
||||||
|
status: VersionStatus::Listed,
|
||||||
|
version_type: version_data.release_channel.to_string(),
|
||||||
|
requested_status: None,
|
||||||
|
ordering: version_data.ordering,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(version)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn process_icon_upload(
|
||||||
|
uploaded_files: &mut Vec<UploadedFile>,
|
||||||
|
id: u64,
|
||||||
|
file_extension: &str,
|
||||||
|
file_host: &dyn FileHost,
|
||||||
|
mut field: Field,
|
||||||
|
cdn_url: &str,
|
||||||
|
) -> Result<(String, Option<u32>), CreateError> {
|
||||||
|
if let Some(content_type) = crate::util::ext::get_image_content_type(file_extension) {
|
||||||
|
let data = read_from_field(&mut field, 262144, "Icons must be smaller than 256KiB").await?;
|
||||||
|
|
||||||
|
let color = crate::util::img::get_color_from_img(&data)?;
|
||||||
|
|
||||||
|
let hash = sha1::Sha1::from(&data).hexdigest();
|
||||||
|
let upload_data = file_host
|
||||||
|
.upload_file(
|
||||||
|
content_type,
|
||||||
|
&format!("data/{id}/{hash}.{file_extension}"),
|
||||||
|
data.freeze(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
uploaded_files.push(UploadedFile {
|
||||||
|
file_id: upload_data.file_id,
|
||||||
|
file_name: upload_data.file_name.clone(),
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok((format!("{}/{}", cdn_url, upload_data.file_name), color))
|
||||||
|
} else {
|
||||||
|
Err(CreateError::InvalidIconFormat(file_extension.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
2495
src/routes/v3/projects.rs
Normal file
2495
src/routes/v3/projects.rs
Normal file
File diff suppressed because it is too large
Load Diff
524
src/routes/v3/reports.rs
Normal file
524
src/routes/v3/reports.rs
Normal file
@ -0,0 +1,524 @@
|
|||||||
|
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
|
||||||
|
use crate::database;
|
||||||
|
use crate::database::models::image_item;
|
||||||
|
use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder};
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use crate::models::ids::ImageId;
|
||||||
|
use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId};
|
||||||
|
use crate::models::images::{Image, ImageContext};
|
||||||
|
use crate::models::pats::Scopes;
|
||||||
|
use crate::models::reports::{ItemType, Report};
|
||||||
|
use crate::models::threads::{MessageBody, ThreadType};
|
||||||
|
use crate::queue::session::AuthQueue;
|
||||||
|
use crate::routes::ApiError;
|
||||||
|
use crate::util::img;
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use chrono::Utc;
|
||||||
|
use futures::StreamExt;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.route("report", web::post().to(report_create));
|
||||||
|
cfg.route("report", web::get().to(reports));
|
||||||
|
cfg.route("reports", web::get().to(reports_get));
|
||||||
|
cfg.route("report/{id}", web::get().to(report_get));
|
||||||
|
cfg.route("report/{id}", web::patch().to(report_edit));
|
||||||
|
cfg.route("report/{id}", web::delete().to(report_delete));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Validate)]
|
||||||
|
pub struct CreateReport {
|
||||||
|
pub report_type: String,
|
||||||
|
pub item_id: String,
|
||||||
|
pub item_type: ItemType,
|
||||||
|
pub body: String,
|
||||||
|
// Associations to uploaded images
|
||||||
|
#[validate(length(max = 10))]
|
||||||
|
#[serde(default)]
|
||||||
|
pub uploaded_images: Vec<ImageId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn report_create(
|
||||||
|
req: HttpRequest,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
mut body: web::Payload,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
let current_user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::REPORT_CREATE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let mut bytes = web::BytesMut::new();
|
||||||
|
while let Some(item) = body.next().await {
|
||||||
|
bytes.extend_from_slice(&item.map_err(|_| {
|
||||||
|
ApiError::InvalidInput("Error while parsing request payload!".to_string())
|
||||||
|
})?);
|
||||||
|
}
|
||||||
|
let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?;
|
||||||
|
|
||||||
|
let id = crate::database::models::generate_report_id(&mut transaction).await?;
|
||||||
|
let report_type = crate::database::models::categories::ReportType::get_id(
|
||||||
|
&new_report.report_type,
|
||||||
|
&mut *transaction,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput(format!("Invalid report type: {}", new_report.report_type))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut report = crate::database::models::report_item::Report {
|
||||||
|
id,
|
||||||
|
report_type_id: report_type,
|
||||||
|
project_id: None,
|
||||||
|
version_id: None,
|
||||||
|
user_id: None,
|
||||||
|
body: new_report.body.clone(),
|
||||||
|
reporter: current_user.id.into(),
|
||||||
|
created: Utc::now(),
|
||||||
|
closed: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
match new_report.item_type {
|
||||||
|
ItemType::Project => {
|
||||||
|
let project_id = ProjectId(parse_base62(new_report.item_id.as_str())?);
|
||||||
|
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM mods WHERE id = $1)",
|
||||||
|
project_id.0 as i64
|
||||||
|
)
|
||||||
|
.fetch_one(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !result.exists.unwrap_or(false) {
|
||||||
|
return Err(ApiError::InvalidInput(format!(
|
||||||
|
"Project could not be found: {}",
|
||||||
|
new_report.item_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
report.project_id = Some(project_id.into())
|
||||||
|
}
|
||||||
|
ItemType::Version => {
|
||||||
|
let version_id = VersionId(parse_base62(new_report.item_id.as_str())?);
|
||||||
|
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)",
|
||||||
|
version_id.0 as i64
|
||||||
|
)
|
||||||
|
.fetch_one(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !result.exists.unwrap_or(false) {
|
||||||
|
return Err(ApiError::InvalidInput(format!(
|
||||||
|
"Version could not be found: {}",
|
||||||
|
new_report.item_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
report.version_id = Some(version_id.into())
|
||||||
|
}
|
||||||
|
ItemType::User => {
|
||||||
|
let user_id = UserId(parse_base62(new_report.item_id.as_str())?);
|
||||||
|
|
||||||
|
let result = sqlx::query!(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM users WHERE id = $1)",
|
||||||
|
user_id.0 as i64
|
||||||
|
)
|
||||||
|
.fetch_one(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !result.exists.unwrap_or(false) {
|
||||||
|
return Err(ApiError::InvalidInput(format!(
|
||||||
|
"User could not be found: {}",
|
||||||
|
new_report.item_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
report.user_id = Some(user_id.into())
|
||||||
|
}
|
||||||
|
ItemType::Unknown => {
|
||||||
|
return Err(ApiError::InvalidInput(format!(
|
||||||
|
"Invalid report item type: {}",
|
||||||
|
new_report.item_type.as_str()
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
report.insert(&mut transaction).await?;
|
||||||
|
|
||||||
|
for image_id in new_report.uploaded_images {
|
||||||
|
if let Some(db_image) =
|
||||||
|
image_item::Image::get(image_id.into(), &mut *transaction, &redis).await?
|
||||||
|
{
|
||||||
|
let image: Image = db_image.into();
|
||||||
|
if !matches!(image.context, ImageContext::Report { .. })
|
||||||
|
|| image.context.inner_id().is_some()
|
||||||
|
{
|
||||||
|
return Err(ApiError::InvalidInput(format!(
|
||||||
|
"Image {} is not unused and in the 'report' context",
|
||||||
|
image_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE uploaded_images
|
||||||
|
SET report_id = $1
|
||||||
|
WHERE id = $2
|
||||||
|
",
|
||||||
|
id.0 as i64,
|
||||||
|
image_id.0 as i64
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
image_item::Image::clear_cache(image.id.into(), &redis).await?;
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::InvalidInput(format!(
|
||||||
|
"Image {} could not be found",
|
||||||
|
image_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let thread_id = ThreadBuilder {
|
||||||
|
type_: ThreadType::Report,
|
||||||
|
members: vec![],
|
||||||
|
project_id: None,
|
||||||
|
report_id: Some(report.id),
|
||||||
|
}
|
||||||
|
.insert(&mut transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(Report {
|
||||||
|
id: id.into(),
|
||||||
|
report_type: new_report.report_type.clone(),
|
||||||
|
item_id: new_report.item_id.clone(),
|
||||||
|
item_type: new_report.item_type.clone(),
|
||||||
|
reporter: current_user.id,
|
||||||
|
body: new_report.body.clone(),
|
||||||
|
created: Utc::now(),
|
||||||
|
closed: false,
|
||||||
|
thread_id: thread_id.into(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct ReportsRequestOptions {
|
||||||
|
#[serde(default = "default_count")]
|
||||||
|
pub count: i16,
|
||||||
|
#[serde(default = "default_all")]
|
||||||
|
pub all: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_count() -> i16 {
|
||||||
|
100
|
||||||
|
}
|
||||||
|
fn default_all() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn reports(
|
||||||
|
req: HttpRequest,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
count: web::Query<ReportsRequestOptions>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::REPORT_READ]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
use futures::stream::TryStreamExt;
|
||||||
|
|
||||||
|
let report_ids = if user.role.is_mod() && count.all {
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT id FROM reports
|
||||||
|
WHERE closed = FALSE
|
||||||
|
ORDER BY created ASC
|
||||||
|
LIMIT $1;
|
||||||
|
",
|
||||||
|
count.count as i64
|
||||||
|
)
|
||||||
|
.fetch_many(&**pool)
|
||||||
|
.try_filter_map(|e| async {
|
||||||
|
Ok(e.right()
|
||||||
|
.map(|m| crate::database::models::ids::ReportId(m.id)))
|
||||||
|
})
|
||||||
|
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT id FROM reports
|
||||||
|
WHERE closed = FALSE AND reporter = $1
|
||||||
|
ORDER BY created ASC
|
||||||
|
LIMIT $2;
|
||||||
|
",
|
||||||
|
user.id.0 as i64,
|
||||||
|
count.count as i64
|
||||||
|
)
|
||||||
|
.fetch_many(&**pool)
|
||||||
|
.try_filter_map(|e| async {
|
||||||
|
Ok(e.right()
|
||||||
|
.map(|m| crate::database::models::ids::ReportId(m.id)))
|
||||||
|
})
|
||||||
|
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_reports =
|
||||||
|
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
||||||
|
|
||||||
|
let mut reports: Vec<Report> = Vec::new();
|
||||||
|
|
||||||
|
for x in query_reports {
|
||||||
|
reports.push(x.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(reports))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct ReportIds {
|
||||||
|
pub ids: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn reports_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
web::Query(ids): web::Query<ReportIds>,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let report_ids: Vec<crate::database::models::ids::ReportId> =
|
||||||
|
serde_json::from_str::<Vec<crate::models::ids::ReportId>>(&ids.ids)?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.into())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let reports_data =
|
||||||
|
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
||||||
|
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::REPORT_READ]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
let all_reports = reports_data
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| user.role.is_mod() || x.reporter == user.id.into())
|
||||||
|
.map(|x| x.into())
|
||||||
|
.collect::<Vec<Report>>();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(all_reports))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn report_get(
|
||||||
|
req: HttpRequest,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::REPORT_READ]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
let id = info.into_inner().0.into();
|
||||||
|
|
||||||
|
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||||
|
|
||||||
|
if let Some(report) = report {
|
||||||
|
if !user.role.is_mod() && report.reporter != user.id.into() {
|
||||||
|
return Ok(HttpResponse::NotFound().body(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
let report: Report = report.into();
|
||||||
|
Ok(HttpResponse::Ok().json(report))
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Validate)]
|
||||||
|
pub struct EditReport {
|
||||||
|
#[validate(length(max = 65536))]
|
||||||
|
pub body: Option<String>,
|
||||||
|
pub closed: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn report_edit(
|
||||||
|
req: HttpRequest,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
edit_report: web::Json<EditReport>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let user = get_user_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::REPORT_WRITE]),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.1;
|
||||||
|
let id = info.into_inner().0.into();
|
||||||
|
|
||||||
|
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||||
|
|
||||||
|
if let Some(report) = report {
|
||||||
|
if !user.role.is_mod() && report.reporter != user.id.into() {
|
||||||
|
return Ok(HttpResponse::NotFound().body(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
if let Some(edit_body) = &edit_report.body {
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE reports
|
||||||
|
SET body = $1
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
edit_body,
|
||||||
|
id as crate::database::models::ids::ReportId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(edit_closed) = edit_report.closed {
|
||||||
|
if !user.role.is_mod() {
|
||||||
|
return Err(ApiError::InvalidInput(
|
||||||
|
"You cannot reopen a report!".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
ThreadMessageBuilder {
|
||||||
|
author_id: Some(user.id.into()),
|
||||||
|
body: if !edit_closed && report.closed {
|
||||||
|
MessageBody::ThreadReopen
|
||||||
|
} else {
|
||||||
|
MessageBody::ThreadClosure
|
||||||
|
},
|
||||||
|
thread_id: report.thread_id,
|
||||||
|
}
|
||||||
|
.insert(&mut transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE reports
|
||||||
|
SET closed = $1
|
||||||
|
WHERE (id = $2)
|
||||||
|
",
|
||||||
|
edit_closed,
|
||||||
|
id as crate::database::models::ids::ReportId,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
UPDATE threads
|
||||||
|
SET show_in_mod_inbox = $1
|
||||||
|
WHERE id = $2
|
||||||
|
",
|
||||||
|
!(edit_closed || report.closed),
|
||||||
|
report.thread_id.0,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete any images no longer in the body
|
||||||
|
let checkable_strings: Vec<&str> = vec![&edit_report.body]
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|x: &Option<String>| x.as_ref().map(|y| y.as_str()))
|
||||||
|
.collect();
|
||||||
|
let image_context = ImageContext::Report {
|
||||||
|
report_id: Some(id.into()),
|
||||||
|
};
|
||||||
|
img::delete_unused_images(image_context, checkable_strings, &mut transaction, &redis)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn report_delete(
|
||||||
|
req: HttpRequest,
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
session_queue: web::Data<AuthQueue>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
check_is_moderator_from_headers(
|
||||||
|
&req,
|
||||||
|
&**pool,
|
||||||
|
&redis,
|
||||||
|
&session_queue,
|
||||||
|
Some(&[Scopes::REPORT_DELETE]),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
let id = info.into_inner().0;
|
||||||
|
let context = ImageContext::Report {
|
||||||
|
report_id: Some(id),
|
||||||
|
};
|
||||||
|
let uploaded_images =
|
||||||
|
database::models::Image::get_many_contexted(context, &mut transaction).await?;
|
||||||
|
for image in uploaded_images {
|
||||||
|
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let result =
|
||||||
|
crate::database::models::report_item::Report::remove_full(id.into(), &mut transaction)
|
||||||
|
.await?;
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
if result.is_some() {
|
||||||
|
Ok(HttpResponse::NoContent().body(""))
|
||||||
|
} else {
|
||||||
|
Ok(HttpResponse::NotFound().body(""))
|
||||||
|
}
|
||||||
|
}
|
||||||
85
src/routes/v3/statistics.rs
Normal file
85
src/routes/v3/statistics.rs
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
use crate::routes::ApiError;
|
||||||
|
use actix_web::{web, HttpResponse};
|
||||||
|
use serde_json::json;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.route("statistics", web::get().to(get_stats));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_stats(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
|
||||||
|
let projects = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT COUNT(id)
|
||||||
|
FROM mods
|
||||||
|
WHERE status = ANY($1)
|
||||||
|
",
|
||||||
|
&*crate::models::projects::ProjectStatus::iterator()
|
||||||
|
.filter(|x| x.is_searchable())
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect::<Vec<String>>(),
|
||||||
|
)
|
||||||
|
.fetch_one(&**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let versions = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT COUNT(v.id)
|
||||||
|
FROM versions v
|
||||||
|
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
|
||||||
|
WHERE v.status = ANY($2)
|
||||||
|
",
|
||||||
|
&*crate::models::projects::ProjectStatus::iterator()
|
||||||
|
.filter(|x| x.is_searchable())
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect::<Vec<String>>(),
|
||||||
|
&*crate::models::projects::VersionStatus::iterator()
|
||||||
|
.filter(|x| x.is_listed())
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect::<Vec<String>>(),
|
||||||
|
)
|
||||||
|
.fetch_one(&**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let authors = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT COUNT(DISTINCT u.id)
|
||||||
|
FROM users u
|
||||||
|
INNER JOIN team_members tm on u.id = tm.user_id AND tm.accepted = TRUE
|
||||||
|
INNER JOIN mods m on tm.team_id = m.team_id AND m.status = ANY($1)
|
||||||
|
",
|
||||||
|
&*crate::models::projects::ProjectStatus::iterator()
|
||||||
|
.filter(|x| x.is_searchable())
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect::<Vec<String>>(),
|
||||||
|
)
|
||||||
|
.fetch_one(&**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let files = sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT COUNT(f.id) FROM files f
|
||||||
|
INNER JOIN versions v on f.version_id = v.id AND v.status = ANY($2)
|
||||||
|
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
|
||||||
|
",
|
||||||
|
&*crate::models::projects::ProjectStatus::iterator()
|
||||||
|
.filter(|x| x.is_searchable())
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect::<Vec<String>>(),
|
||||||
|
&*crate::models::projects::VersionStatus::iterator()
|
||||||
|
.filter(|x| x.is_listed())
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect::<Vec<String>>(),
|
||||||
|
)
|
||||||
|
.fetch_one(&**pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let json = json!({
|
||||||
|
"projects": projects.count,
|
||||||
|
"versions": versions.count,
|
||||||
|
"authors": authors.count,
|
||||||
|
"files": files.count,
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(json))
|
||||||
|
}
|
||||||
210
src/routes/v3/tags.rs
Normal file
210
src/routes/v3/tags.rs
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::ApiError;
|
||||||
|
use crate::database::models::categories::{Category, DonationPlatform, ProjectType, ReportType};
|
||||||
|
use crate::database::models::loader_fields::{
|
||||||
|
Loader, LoaderField, LoaderFieldEnumValue, LoaderFieldType,
|
||||||
|
};
|
||||||
|
use crate::database::redis::RedisPool;
|
||||||
|
use actix_web::{web, HttpResponse};
|
||||||
|
use serde_json::Value;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(
|
||||||
|
web::scope("tag")
|
||||||
|
.route("category", web::get().to(category_list))
|
||||||
|
.route("loader", web::get().to(loader_list)),
|
||||||
|
)
|
||||||
|
.route("loader_fields", web::get().to(loader_fields_list))
|
||||||
|
.route("license", web::get().to(license_list))
|
||||||
|
.route("license/{id}", web::get().to(license_text))
|
||||||
|
.route("donation_platform", web::get().to(donation_platform_list))
|
||||||
|
.route("report_type", web::get().to(report_type_list))
|
||||||
|
.route("project_type", web::get().to(project_type_list));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize, serde::Deserialize)]
|
||||||
|
pub struct CategoryData {
|
||||||
|
pub icon: String,
|
||||||
|
pub name: String,
|
||||||
|
pub project_type: String,
|
||||||
|
pub header: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn category_list(
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let results = Category::list(&**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| CategoryData {
|
||||||
|
icon: x.icon,
|
||||||
|
name: x.category,
|
||||||
|
project_type: x.project_type,
|
||||||
|
header: x.header,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize, serde::Deserialize)]
|
||||||
|
pub struct LoaderData {
|
||||||
|
pub icon: String,
|
||||||
|
pub name: String,
|
||||||
|
pub supported_project_types: Vec<String>,
|
||||||
|
pub supported_games: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn loader_list(
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let mut results = Loader::list(&**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| LoaderData {
|
||||||
|
icon: x.icon,
|
||||||
|
name: x.loader,
|
||||||
|
supported_project_types: x.supported_project_types,
|
||||||
|
supported_games: x
|
||||||
|
.supported_games
|
||||||
|
.iter()
|
||||||
|
.map(|x| x.name().to_string())
|
||||||
|
.collect(),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
results.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize, serde::Serialize)]
|
||||||
|
pub struct LoaderFieldsEnumQuery {
|
||||||
|
pub loader_field: String,
|
||||||
|
pub filters: Option<HashMap<String, Value>>, // For metadata
|
||||||
|
}
|
||||||
|
|
||||||
|
// Provides the variants for any enumerable loader field.
|
||||||
|
pub async fn loader_fields_list(
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
query: web::Query<LoaderFieldsEnumQuery>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let query = query.into_inner();
|
||||||
|
let loader_field = LoaderField::get_field(&query.loader_field, &**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::InvalidInput(format!(
|
||||||
|
"'{}' was not a valid loader field.",
|
||||||
|
query.loader_field
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let loader_field_enum_id = match loader_field.field_type {
|
||||||
|
LoaderFieldType::Enum(enum_id) | LoaderFieldType::ArrayEnum(enum_id) => enum_id,
|
||||||
|
_ => {
|
||||||
|
return Err(ApiError::InvalidInput(format!(
|
||||||
|
"'{}' is not an enumerable field, but an '{}' field.",
|
||||||
|
query.loader_field,
|
||||||
|
loader_field.field_type.to_str()
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let results: Vec<_> = if let Some(filters) = query.filters {
|
||||||
|
LoaderFieldEnumValue::list_filter(loader_field_enum_id, filters, &**pool, &redis).await?
|
||||||
|
} else {
|
||||||
|
LoaderFieldEnumValue::list(loader_field_enum_id, &**pool, &redis).await?
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
pub struct License {
|
||||||
|
short: String,
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn license_list() -> HttpResponse {
|
||||||
|
let licenses = spdx::identifiers::LICENSES;
|
||||||
|
let mut results: Vec<License> = Vec::with_capacity(licenses.len());
|
||||||
|
|
||||||
|
for (short, name, _) in licenses {
|
||||||
|
results.push(License {
|
||||||
|
short: short.to_string(),
|
||||||
|
name: name.to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
HttpResponse::Ok().json(results)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
pub struct LicenseText {
|
||||||
|
title: String,
|
||||||
|
body: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn license_text(params: web::Path<(String,)>) -> Result<HttpResponse, ApiError> {
|
||||||
|
let license_id = params.into_inner().0;
|
||||||
|
|
||||||
|
if license_id == *crate::models::projects::DEFAULT_LICENSE_ID {
|
||||||
|
return Ok(HttpResponse::Ok().json(LicenseText {
|
||||||
|
title: "All Rights Reserved".to_string(),
|
||||||
|
body: "All rights reserved unless explicitly stated.".to_string(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(license) = spdx::license_id(&license_id) {
|
||||||
|
return Ok(HttpResponse::Ok().json(LicenseText {
|
||||||
|
title: license.full_name.to_string(),
|
||||||
|
body: license.text().to_string(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(ApiError::InvalidInput(
|
||||||
|
"Invalid SPDX identifier specified".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
pub struct DonationPlatformQueryData {
|
||||||
|
short: String,
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn donation_platform_list(
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let results: Vec<DonationPlatformQueryData> = DonationPlatform::list(&**pool, &redis)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| DonationPlatformQueryData {
|
||||||
|
short: x.short,
|
||||||
|
name: x.name,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Ok(HttpResponse::Ok().json(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn report_type_list(
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let results = ReportType::list(&**pool, &redis).await?;
|
||||||
|
Ok(HttpResponse::Ok().json(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn project_type_list(
|
||||||
|
pool: web::Data<PgPool>,
|
||||||
|
redis: web::Data<RedisPool>,
|
||||||
|
) -> Result<HttpResponse, ApiError> {
|
||||||
|
let results = ProjectType::list(&**pool, &redis).await?;
|
||||||
|
Ok(HttpResponse::Ok().json(results))
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user