File types (#506)

* File types

* Run prepare + fmt

* Switch to struct

* Update docker version
This commit is contained in:
Geometrically 2022-12-23 16:36:53 -07:00 committed by GitHub
parent 5f175141e1
commit fe256d6a62
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 388 additions and 671 deletions

View File

@ -1,4 +1,4 @@
FROM rust:1.64.0 as build
FROM rust:1.65.0 as build
ENV PKG_CONFIG_ALLOW_CROSS=1
WORKDIR /usr/src/labrinth

View File

@ -0,0 +1,2 @@
-- Add migration script here
ALTER TABLE files ADD COLUMN file_type varchar(128) NULL;

View File

@ -315,6 +315,24 @@
},
"query": "\n SELECT SUM(pv.amount) amount\n FROM payouts_values pv\n WHERE pv.user_id = $1\n "
},
"0c2addb0d7a87fa558821ff8e943bbb751fb2bdc22d1a5368f61cc7827586840": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Varchar",
"Varchar",
"Bool",
"Int4",
"Varchar"
]
}
},
"query": "\n INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n "
},
"0dbd0fa9a25416716a047184944d243ed5cb55808c6f300d7335c887f02a7f6e": {
"describe": {
"columns": [
@ -760,6 +778,122 @@
},
"query": "\n INSERT INTO payouts_values (user_id, mod_id, amount, created)\n VALUES ($1, $2, $3, $4)\n "
},
"19bcfcd376172d2b293e86e9dd69ee778f7447ae708fd0c3c70239d2c8b6a419": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int8"
},
{
"name": "mod_id",
"ordinal": 1,
"type_info": "Int8"
},
{
"name": "author_id",
"ordinal": 2,
"type_info": "Int8"
},
{
"name": "version_name",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "version_number",
"ordinal": 4,
"type_info": "Varchar"
},
{
"name": "changelog",
"ordinal": 5,
"type_info": "Varchar"
},
{
"name": "date_published",
"ordinal": 6,
"type_info": "Timestamptz"
},
{
"name": "downloads",
"ordinal": 7,
"type_info": "Int4"
},
{
"name": "version_type",
"ordinal": 8,
"type_info": "Varchar"
},
{
"name": "featured",
"ordinal": 9,
"type_info": "Bool"
},
{
"name": "status",
"ordinal": 10,
"type_info": "Varchar"
},
{
"name": "requested_status",
"ordinal": 11,
"type_info": "Varchar"
},
{
"name": "game_versions",
"ordinal": 12,
"type_info": "Jsonb"
},
{
"name": "loaders",
"ordinal": 13,
"type_info": "VarcharArray"
},
{
"name": "files",
"ordinal": 14,
"type_info": "Jsonb"
},
{
"name": "hashes",
"ordinal": 15,
"type_info": "Jsonb"
},
{
"name": "dependencies",
"ordinal": 16,
"type_info": "Jsonb"
}
],
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
null,
null,
null,
null,
null
],
"parameters": {
"Left": [
"Int8Array"
]
}
},
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,\n JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.date_published ASC;\n "
},
"19dc22c4d6d14222f8e8bace74c2961761c53b7375460ade15af921754d5d7da": {
"describe": {
"columns": [],
@ -1149,6 +1283,122 @@
},
"query": "INSERT INTO banned_users (github_id) VALUES ($1);"
},
"28e5a9c09fac55677fea16d21482a626b7c5f8edbf2af182ed67e44a793ef2e0": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int8"
},
{
"name": "mod_id",
"ordinal": 1,
"type_info": "Int8"
},
{
"name": "author_id",
"ordinal": 2,
"type_info": "Int8"
},
{
"name": "version_name",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "version_number",
"ordinal": 4,
"type_info": "Varchar"
},
{
"name": "changelog",
"ordinal": 5,
"type_info": "Varchar"
},
{
"name": "date_published",
"ordinal": 6,
"type_info": "Timestamptz"
},
{
"name": "downloads",
"ordinal": 7,
"type_info": "Int4"
},
{
"name": "version_type",
"ordinal": 8,
"type_info": "Varchar"
},
{
"name": "featured",
"ordinal": 9,
"type_info": "Bool"
},
{
"name": "status",
"ordinal": 10,
"type_info": "Varchar"
},
{
"name": "requested_status",
"ordinal": 11,
"type_info": "Varchar"
},
{
"name": "game_versions",
"ordinal": 12,
"type_info": "Jsonb"
},
{
"name": "loaders",
"ordinal": 13,
"type_info": "VarcharArray"
},
{
"name": "files",
"ordinal": 14,
"type_info": "Jsonb"
},
{
"name": "hashes",
"ordinal": 15,
"type_info": "Jsonb"
},
{
"name": "dependencies",
"ordinal": 16,
"type_info": "Jsonb"
}
],
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
null,
null,
null,
null,
null
],
"parameters": {
"Left": [
"Int8"
]
}
},
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,\n JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = $1\n GROUP BY v.id;\n "
},
"29e657d26f0fb24a766f5b5eb6a94d01d1616884d8ca10e91536e974d5b585a6": {
"describe": {
"columns": [],
@ -2225,122 +2475,6 @@
},
"query": "\n UPDATE versions\n SET version_number = $1\n WHERE (id = $2)\n "
},
"5482c526f66da3d7e4618a546f900f312e6382684ad3cd8fc84cc7b961cfb91e": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int8"
},
{
"name": "mod_id",
"ordinal": 1,
"type_info": "Int8"
},
{
"name": "author_id",
"ordinal": 2,
"type_info": "Int8"
},
{
"name": "version_name",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "version_number",
"ordinal": 4,
"type_info": "Varchar"
},
{
"name": "changelog",
"ordinal": 5,
"type_info": "Varchar"
},
{
"name": "date_published",
"ordinal": 6,
"type_info": "Timestamptz"
},
{
"name": "downloads",
"ordinal": 7,
"type_info": "Int4"
},
{
"name": "version_type",
"ordinal": 8,
"type_info": "Varchar"
},
{
"name": "featured",
"ordinal": 9,
"type_info": "Bool"
},
{
"name": "status",
"ordinal": 10,
"type_info": "Varchar"
},
{
"name": "requested_status",
"ordinal": 11,
"type_info": "Varchar"
},
{
"name": "game_versions",
"ordinal": 12,
"type_info": "Jsonb"
},
{
"name": "loaders",
"ordinal": 13,
"type_info": "VarcharArray"
},
{
"name": "files",
"ordinal": 14,
"type_info": "Jsonb"
},
{
"name": "hashes",
"ordinal": 15,
"type_info": "Jsonb"
},
{
"name": "dependencies",
"ordinal": 16,
"type_info": "Jsonb"
}
],
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
null,
null,
null,
null,
null
],
"parameters": {
"Left": [
"Int8Array"
]
}
},
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,\n JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.date_published ASC;\n "
},
"57a38641fe5bdb273190e8d586f46284340b9ff11b6ae3177923631a37bb11eb": {
"describe": {
"columns": [],
@ -3050,6 +3184,19 @@
},
"query": "\n SELECT mf.mod_id FROM mod_follows mf\n WHERE mf.follower_id = $1\n "
},
"75dc7f592781a1414e5f489543b14cb94c5265ddb3abfb3dda965c8cf154b753": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8",
"Varchar"
]
}
},
"query": "\n UPDATE files\n SET file_type = $2\n WHERE (id = $1)\n "
},
"76db1c204139e18002e5751c3dcefff79791a1dd852b62d34fcf008151e8945a": {
"describe": {
"columns": [
@ -3257,27 +3404,6 @@
},
"query": "\n DELETE FROM loaders_versions WHERE version_id = $1\n "
},
"79d30dd9fe16ac93ece0b6272811e1b644bac8f61b446dceca46a16cb69953a1": {
"describe": {
"columns": [
{
"name": "version_id",
"ordinal": 0,
"type_info": "Int8"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Bytea",
"Text"
]
}
},
"query": "\n SELECT f.version_id version_id FROM hashes h\n INNER JOIN files f ON h.file_id = f.id\n WHERE h.algorithm = $2 AND h.hash = $1\n "
},
"7ab21e7613dd88e97cf602e76bff62170c13ceef8104a4ce4cb2d101f8ce4f48": {
"describe": {
"columns": [],
@ -3997,32 +4123,6 @@
},
"query": "\n UPDATE mods\n SET wiki_url = $1\n WHERE (id = $2)\n "
},
"9bbb52954fccebc8aa467618dcb2c224722c8d816ec6803bcd8711778bd56199": {
"describe": {
"columns": [
{
"name": "flame_anvil_project",
"ordinal": 0,
"type_info": "Int4"
},
{
"name": "flame_anvil_key",
"ordinal": 1,
"type_info": "Varchar"
}
],
"nullable": [
true,
true
],
"parameters": {
"Left": [
"Int8"
]
}
},
"query": "\n SELECT m.flame_anvil_project, u.flame_anvil_key\n FROM mods m\n INNER JOIN users u ON m.flame_anvil_user = u.id\n WHERE m.id = $1\n "
},
"9c8f3f9503b5bb52e05bbc8a8eee7f640ab7d6b04a59ec111ce8b23e886911de": {
"describe": {
"columns": [],
@ -4508,23 +4608,6 @@
},
"query": "\n DELETE FROM mods_categories\n WHERE joining_mod_id = $1\n "
},
"a82ece911fac855366bd25f1379778a803e61ab87da096f0ab9f6db3eaa521d4": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Varchar",
"Varchar",
"Bool",
"Int4"
]
}
},
"query": "\n INSERT INTO files (id, version_id, url, filename, is_primary, size)\n VALUES ($1, $2, $3, $4, $5, $6)\n "
},
"a90bb6904e1b790c0e29e060dac5ba4c2a6087e07c1197dc1f59f0aff31944c9": {
"describe": {
"columns": [],
@ -6125,121 +6208,26 @@
},
"query": "\n DELETE FROM team_members\n WHERE team_id = $1\n "
},
"d13d26a088c67d3b38a84d63241106ce78734bc34a51d8f01dcd4f9ea1d13717": {
"d1566672369ea22cb1f638f073f8e3fb467b354351ae71c67941323749ec9bcd": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int8"
},
{
"name": "mod_id",
"ordinal": 1,
"type_info": "Int8"
},
{
"name": "author_id",
"ordinal": 2,
"type_info": "Int8"
},
{
"name": "version_name",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "version_number",
"ordinal": 4,
"type_info": "Varchar"
},
{
"name": "changelog",
"ordinal": 5,
"type_info": "Varchar"
},
{
"name": "date_published",
"ordinal": 6,
"type_info": "Timestamptz"
},
{
"name": "downloads",
"ordinal": 7,
"type_info": "Int4"
},
{
"name": "version_type",
"ordinal": 8,
"type_info": "Varchar"
},
{
"name": "featured",
"ordinal": 9,
"type_info": "Bool"
},
{
"name": "status",
"ordinal": 10,
"type_info": "Varchar"
},
{
"name": "requested_status",
"ordinal": 11,
"type_info": "Varchar"
},
{
"name": "game_versions",
"ordinal": 12,
"type_info": "Jsonb"
},
{
"name": "loaders",
"ordinal": 13,
"type_info": "VarcharArray"
},
{
"name": "files",
"ordinal": 14,
"type_info": "Jsonb"
},
{
"name": "hashes",
"ordinal": 15,
"type_info": "Jsonb"
},
{
"name": "dependencies",
"ordinal": 16,
"type_info": "Jsonb"
}
],
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
null,
null,
null,
null,
null
false
],
"parameters": {
"Left": [
"Int8"
"Bytea",
"Text"
]
}
},
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,\n JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = $1\n GROUP BY v.id;\n "
"query": "\n SELECT f.id id FROM hashes h\n INNER JOIN files f ON h.file_id = f.id\n WHERE h.algorithm = $2 AND h.hash = $1\n "
},
"d1866ecc161c3fe3fbe094289510e99b17de563957e1f824c347c1e6ac40c40c": {
"describe": {

View File

@ -570,6 +570,7 @@ impl TeamMember {
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub async fn edit_team_member(
id: TeamId,
user_id: UserId,

View File

@ -1,6 +1,6 @@
use super::ids::*;
use super::DatabaseError;
use crate::models::projects::VersionStatus;
use crate::models::projects::{FileType, VersionStatus};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use std::cmp::Ordering;
@ -87,6 +87,7 @@ pub struct VersionFileBuilder {
pub hashes: Vec<HashBuilder>,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}
impl VersionFileBuilder {
@ -99,15 +100,16 @@ impl VersionFileBuilder {
sqlx::query!(
"
INSERT INTO files (id, version_id, url, filename, is_primary, size)
VALUES ($1, $2, $3, $4, $5, $6)
INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type)
VALUES ($1, $2, $3, $4, $5, $6, $7)
",
file_id as FileId,
version_id as VersionId,
self.url,
self.filename,
self.primary,
self.size as i32
self.size as i32,
self.file_type.map(|x| x.as_str()),
)
.execute(&mut *transaction)
.await?;
@ -601,7 +603,7 @@ impl Version {
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size)) filter (where f.id is not null) files,
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
FROM versions v
@ -654,6 +656,7 @@ impl Version {
pub filename: String,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}
let hashes: Vec<Hash> =
@ -687,6 +690,7 @@ impl Version {
hashes: file_hashes,
primary: x.primary,
size: x.size,
file_type: x.file_type,
}
})
.collect::<Vec<_>>();
@ -751,7 +755,7 @@ impl Version {
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size)) filter (where f.id is not null) files,
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
FROM versions v
@ -803,6 +807,7 @@ impl Version {
pub filename: String,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}
let hashes: Vec<Hash> = serde_json::from_value(
@ -836,6 +841,7 @@ impl Version {
hashes: file_hashes,
primary: x.primary,
size: x.size,
file_type: x.file_type,
}
}).collect::<Vec<_>>();
@ -908,4 +914,5 @@ pub struct QueryFile {
pub hashes: HashMap<String, String>,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}

View File

@ -1,6 +1,5 @@
use crate::file_hosting::S3Host;
use crate::queue::download::DownloadQueue;
use crate::queue::flameanvil::FlameAnvilQueue;
use crate::queue::payouts::PayoutsQueue;
use crate::ratelimit::errors::ARError;
use crate::ratelimit::memory::{MemoryStore, MemoryStoreActor};
@ -222,7 +221,6 @@ async fn main() -> std::io::Result<()> {
};
let payouts_queue = Arc::new(Mutex::new(PayoutsQueue::new()));
let flame_anvil_queue = Arc::new(Mutex::new(FlameAnvilQueue::new()));
let store = MemoryStore::new();
@ -278,7 +276,6 @@ async fn main() -> std::io::Result<()> {
.app_data(web::Data::new(search_config.clone()))
.app_data(web::Data::new(download_queue.clone()))
.app_data(web::Data::new(payouts_queue.clone()))
.app_data(web::Data::new(flame_anvil_queue.clone()))
.app_data(web::Data::new(ip_salt.clone()))
.wrap(sentry_actix::Sentry::new())
.configure(routes::v1_config)

View File

@ -460,6 +460,7 @@ impl From<QueryVersion> for Version {
hashes: f.hashes,
primary: f.primary,
size: f.size,
file_type: f.file_type,
})
.collect(),
dependencies: data
@ -587,6 +588,8 @@ pub struct VersionFile {
pub primary: bool,
/// The size in bytes of the file
pub size: u32,
/// The type of the file
pub file_type: Option<FileType>,
}
/// A dendency which describes what versions are required, break support, or are optional to the
@ -603,7 +606,7 @@ pub struct Dependency {
pub dependency_type: DependencyType,
}
#[derive(Serialize, Deserialize, Clone, Eq, PartialEq)]
#[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum VersionType {
Release,
@ -628,7 +631,7 @@ impl VersionType {
}
}
#[derive(Serialize, Deserialize, Clone)]
#[derive(Serialize, Deserialize, Copy, Clone)]
#[serde(rename_all = "lowercase")]
pub enum DependencyType {
Required,
@ -665,6 +668,31 @@ impl DependencyType {
}
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub enum FileType {
RequiredResourcePack,
OptionalResourcePack,
Unknown,
}
impl std::fmt::Display for FileType {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(self.as_str())
}
}
impl FileType {
// These are constant, so this can remove unnecessary allocations (`to_string`)
pub fn as_str(&self) -> &'static str {
match self {
FileType::RequiredResourcePack => "required-resource-pack",
FileType::OptionalResourcePack => "optional-resource-pack",
FileType::Unknown => "unknown",
}
}
}
/// A specific version of Minecraft
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
#[serde(transparent)]

View File

@ -1,276 +0,0 @@
use crate::database::models::categories::GameVersion;
use crate::file_hosting::FileHostingError;
use crate::routes::project_creation::CreateError;
use chrono::{DateTime, Duration, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Deserialize, Debug, Clone)]
struct FlameGameVersionType {
id: i32,
slug: String,
}
#[derive(Deserialize, Debug, Clone)]
struct FlameGameVersion {
id: i32,
#[serde(rename = "gameVersionTypeID")]
game_version_type_id: i32,
name: String,
slug: String,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
struct FlameUploadFile {
changelog: String,
// always "markdown"
changelog_type: String,
display_name: String,
game_versions: Vec<i32>,
release_type: String,
// TODO: relations?
}
pub struct FlameAnvilQueue {
mod_loaders: Vec<FlameGameVersion>,
minecraft_versions: Vec<FlameGameVersion>,
bukkit_versions: Vec<FlameGameVersion>,
last_updated: DateTime<Utc>,
}
pub struct UploadFile {
pub loaders: Vec<String>,
pub game_versions: Vec<String>,
pub display_name: String,
pub changelog: String,
pub version_type: String,
}
// Batches download transactions every thirty seconds
impl FlameAnvilQueue {
pub fn new() -> Self {
FlameAnvilQueue {
mod_loaders: vec![],
minecraft_versions: vec![],
bukkit_versions: vec![],
last_updated: Utc::now() - Duration::days(365),
}
}
fn convert_game_versions_to_flame(
&self,
original: Vec<String>,
flame_game_versions: &[FlameGameVersion],
game_versions: &[GameVersion],
) -> Vec<i32> {
let mut og_to_flame = HashMap::new();
let mut last_visited = if flame_game_versions
.last()
.map(|x| x.name.ends_with("-Snapshot"))
.unwrap_or_default()
{
None
} else {
flame_game_versions
.iter()
.rfind(|x| !x.name.ends_with("-Snapshot"))
.cloned()
};
for game_version in game_versions {
if let Some(flame_game_version) =
flame_game_versions.iter().find(|x| {
x.name
== if game_version.version.starts_with('b') {
game_version.version.replace('b', "Beta ")
} else {
game_version.version.clone()
}
})
{
last_visited = Some(flame_game_version.clone());
og_to_flame
.insert(&game_version.version, flame_game_version.id);
} else if let Some(last_visited) = &last_visited {
if game_version.major {
og_to_flame.insert(&game_version.version, last_visited.id);
} else {
let mut splits = last_visited.name.split('.');
let new_str = format!(
"{}.{}-Snapshot",
splits.next().unwrap_or_default(),
splits.next().unwrap_or_default()
);
if let Some(flame_game_version) =
flame_game_versions.iter().find(|x| x.name == new_str)
{
og_to_flame.insert(
&game_version.version,
flame_game_version.id,
);
} else {
og_to_flame
.insert(&game_version.version, last_visited.id);
}
}
} else if let Some(last) = flame_game_versions.last() {
og_to_flame.insert(&game_version.version, last.id);
}
}
let mut new = Vec::new();
for x in original {
if let Some(value) = og_to_flame.get(&&x) {
new.push(*value);
}
}
new
}
#[allow(clippy::too_many_arguments)]
pub async fn upload_file(
&mut self,
api_token: &str,
project_id: i32,
upload_file: UploadFile,
game_versions: &[GameVersion],
file: Vec<u8>,
file_name: String,
mime_type: String,
is_plugin: bool,
) -> Result<i32, CreateError> {
if self.last_updated < (Utc::now() - Duration::minutes(30)) {
self.index(api_token).await.map_err(|_| {
CreateError::InvalidInput(
"Indexing metadata from FlameAnvil failed!".to_string(),
)
})?;
}
let mut loaders_converted = upload_file
.loaders
.into_iter()
.filter_map(|x| self.mod_loaders.iter().find(|y| y.slug == x))
.map(|x| x.id)
.collect::<Vec<i32>>();
let mut game_versions_converted = self.convert_game_versions_to_flame(
upload_file.game_versions,
if is_plugin {
&self.bukkit_versions
} else {
&self.minecraft_versions
},
game_versions,
);
loaders_converted.append(&mut game_versions_converted);
let file = reqwest::multipart::Part::bytes(file)
.file_name(file_name)
.mime_str(&mime_type)
.map_err(|_| {
CreateError::InvalidInput(
"Error while converting inputted file to multipart payload"
.to_string(),
)
})?;
let form = reqwest::multipart::Form::new()
.text(
"metadata",
serde_json::to_string(&FlameUploadFile {
changelog: upload_file.changelog,
changelog_type: "markdown".to_string(),
display_name: upload_file.display_name,
game_versions: loaders_converted,
release_type: upload_file.version_type,
})
.unwrap(),
)
.part("file", file);
#[derive(Deserialize)]
struct FileResponse {
id: i32,
}
let client = reqwest::Client::new();
let id = client.post(&*format!("https://minecraft.curseforge.com/api/projects/{project_id}/upload-file?token={api_token}"))
.multipart(form)
.send()
.await.map_err(|_| CreateError::FileHostingError(FileHostingError::S3Error("Error uploading file to FlameAnvil!".to_string())))?
.json::<FileResponse>()
.await.map_err(|_| CreateError::FileHostingError(FileHostingError::S3Error("Error deserializing uploaded file response from FlameAnvil!".to_string())))?;
Ok(id.id)
}
pub async fn index(
&mut self,
api_token: &str,
) -> Result<(), reqwest::Error> {
let (game_versions, game_version_types, bukkit_game_versions, bukkit_game_versions_types) = futures::future::try_join4(
reqwest::get(format!("https://minecraft.curseforge.com/api/game/versions?token={api_token}")),
reqwest::get(format!("https://minecraft.curseforge.com/api/game/version-types?token={api_token}")),
reqwest::get(format!("https://dev.bukkit.org/api/game/versions?token={api_token}")),
reqwest::get(format!("https://dev.bukkit.org/api/game/version-types?token={api_token}"))
).await?;
let (
game_versions,
game_version_types,
bukkit_game_versions,
bukkit_game_versions_types,
) = futures::future::try_join4(
game_versions.json::<Vec<FlameGameVersion>>(),
game_version_types.json::<Vec<FlameGameVersionType>>(),
bukkit_game_versions.json::<Vec<FlameGameVersion>>(),
bukkit_game_versions_types.json::<Vec<FlameGameVersionType>>(),
)
.await?;
let mod_loader_types = game_version_types
.iter()
.filter(|x| x.slug == *"modloader")
.map(|x| x.id)
.collect::<Vec<_>>();
let minecraft_types = game_version_types
.iter()
.filter(|x| x.slug.starts_with("minecraft"))
.map(|x| x.id)
.collect::<Vec<_>>();
let bukkit_types = bukkit_game_versions_types
.iter()
.filter(|x| x.slug.starts_with("bukkit"))
.map(|x| x.id)
.collect::<Vec<_>>();
let mod_loaders = game_versions
.iter()
.filter(|x| mod_loader_types.contains(&x.game_version_type_id))
.cloned()
.collect::<Vec<_>>();
let minecraft_versions = game_versions
.iter()
.filter(|x| minecraft_types.contains(&x.game_version_type_id))
.cloned()
.collect::<Vec<_>>();
let bukkit_versions = bukkit_game_versions
.iter()
.filter(|x| bukkit_types.contains(&x.game_version_type_id))
.cloned()
.collect::<Vec<_>>();
self.mod_loaders = mod_loaders;
self.minecraft_versions = minecraft_versions;
self.bukkit_versions = bukkit_versions;
Ok(())
}
}

View File

@ -1,3 +1,2 @@
pub mod download;
pub mod flameanvil;
pub mod payouts;

View File

@ -6,7 +6,6 @@ use crate::models::projects::{
VersionStatus,
};
use crate::models::users::UserId;
use crate::queue::flameanvil::FlameAnvilQueue;
use crate::routes::version_creation::InitialVersionData;
use crate::search::indexing::IndexingError;
use crate::util::auth::{get_user_from_headers, AuthenticationError};
@ -24,7 +23,6 @@ use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use std::sync::Arc;
use thiserror::Error;
use tokio::sync::Mutex;
use validator::Validate;
#[derive(Error, Debug)]
@ -266,7 +264,6 @@ pub async fn project_create(
mut payload: Multipart,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
flame_anvil_queue: Data<Arc<Mutex<FlameAnvilQueue>>>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
@ -276,7 +273,6 @@ pub async fn project_create(
&mut payload,
&mut transaction,
&***file_host,
&flame_anvil_queue,
&mut uploaded_files,
&client,
)
@ -334,7 +330,6 @@ pub async fn project_create_inner(
payload: &mut Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
flame_anvil_queue: &Mutex<FlameAnvilQueue>,
uploaded_files: &mut Vec<UploadedFile>,
pool: &PgPool,
) -> Result<HttpResponse, CreateError> {
@ -593,11 +588,6 @@ pub async fn project_create_inner(
all_game_versions.clone(),
version_data.primary_file.is_some(),
version_data.primary_file.as_deref() == Some(name),
version_data.version_title.clone(),
version_data.version_body.clone().unwrap_or_default(),
version_data.release_channel.clone().to_string(),
flame_anvil_queue,
None,
None,
transaction,
)

View File

@ -1473,6 +1473,18 @@ pub async fn add_gallery_item(
let id: ProjectId = project_item.inner.id.into();
let url = format!("data/{}/images/{}.{}", id, hash, &*ext.ext);
let file_url = format!("{}/{}", cdn_url, url);
if project_item
.gallery_items
.iter()
.any(|x| x.image_url == file_url)
{
return Err(ApiError::InvalidInput(
"You may not upload duplicate gallery images!".to_string(),
));
}
file_host
.upload_file(content_type, &url, bytes.freeze())
.await?;
@ -1494,7 +1506,7 @@ pub async fn add_gallery_item(
}
database::models::project_item::GalleryItem {
image_url: format!("{}/{}", cdn_url, url),
image_url: file_url,
featured: item.featured,
title: item.title,
description: item.description,

View File

@ -1,6 +1,5 @@
use crate::file_hosting::FileHost;
use crate::models::projects::SearchRequest;
use crate::queue::flameanvil::FlameAnvilQueue;
use crate::routes::project_creation::{
project_create_inner, undo_uploads, CreateError,
};
@ -16,7 +15,6 @@ use actix_web::{get, post, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
use tokio::sync::Mutex;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ResultSearchMod {
@ -121,7 +119,6 @@ pub async fn mod_create(
mut payload: Multipart,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
flame_anvil_queue: Data<Arc<Mutex<FlameAnvilQueue>>>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
@ -131,7 +128,6 @@ pub async fn mod_create(
&mut payload,
&mut transaction,
&***file_host,
&flame_anvil_queue,
&mut uploaded_files,
&client,
)

View File

@ -3,12 +3,9 @@ use crate::models::ids::{ProjectId, UserId, VersionId};
use crate::models::projects::{
Dependency, GameVersion, Loader, Version, VersionFile, VersionType,
};
use crate::models::teams::Permissions;
use crate::routes::version_file::Algorithm;
use crate::routes::versions::{VersionIds, VersionListFilters};
use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use actix_web::{get, web, HttpResponse};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;

View File

@ -6,17 +6,15 @@ use crate::database::models::version_item::{
use crate::file_hosting::FileHost;
use crate::models::pack::PackFileHash;
use crate::models::projects::{
Dependency, DependencyType, GameVersion, Loader, ProjectId, Version,
VersionFile, VersionId, VersionStatus, VersionType,
Dependency, DependencyType, FileType, GameVersion, Loader, ProjectId,
Version, VersionFile, VersionId, VersionStatus, VersionType,
};
use crate::models::teams::Permissions;
use crate::queue::flameanvil::{FlameAnvilQueue, UploadFile};
use crate::routes::project_creation::{CreateError, UploadedFile};
use crate::util::auth::get_user_from_headers;
use crate::util::routes::read_from_field;
use crate::util::validate::validation_errors_to_string;
use crate::validate::plugin::PluginYmlValidator;
use crate::validate::{validate_file, ValidationResult, Validator};
use crate::validate::{validate_file, ValidationResult};
use actix::fut::ready;
use actix_multipart::{Field, Multipart};
use actix_web::web::Data;
@ -26,7 +24,6 @@ use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use std::sync::Arc;
use tokio::sync::Mutex;
use validator::Validate;
fn default_requested_status() -> VersionStatus {
@ -65,11 +62,13 @@ pub struct InitialVersionData {
pub primary_file: Option<String>,
#[serde(default = "default_requested_status")]
pub status: VersionStatus,
#[serde(default = "Vec::new")]
pub file_types: Vec<(String, FileType)>,
}
#[derive(Serialize, Deserialize, Clone)]
struct InitialFileData {
// TODO: hashes?
pub file_type: Option<FileType>,
}
// under `/api/v1/version`
@ -79,7 +78,6 @@ pub async fn version_create(
mut payload: Multipart,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
flame_anvil_queue: Data<Arc<Mutex<FlameAnvilQueue>>>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
@ -89,7 +87,6 @@ pub async fn version_create(
&mut payload,
&mut transaction,
&***file_host,
&flame_anvil_queue,
&mut uploaded_files,
)
.await;
@ -120,7 +117,6 @@ async fn version_create_inner(
payload: &mut Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
flame_anvil_queue: &Mutex<FlameAnvilQueue>,
uploaded_files: &mut Vec<UploadedFile>,
) -> Result<HttpResponse, CreateError> {
let cdn_url = dotenvy::var("CDN_URL")?;
@ -311,19 +307,6 @@ async fn version_create_inner(
.await?
.name;
let flame_anvil_info = sqlx::query!(
"
SELECT m.flame_anvil_project, u.flame_anvil_key
FROM mods m
INNER JOIN users u ON m.flame_anvil_user = u.id
WHERE m.id = $1
",
version.project_id as models::ProjectId,
)
.fetch_optional(&mut *transaction)
.await?
.map(|x| (x.flame_anvil_project, x.flame_anvil_key));
let version_data = initial_version_data.clone().ok_or_else(|| {
CreateError::InvalidInput("`data` field is required".to_string())
})?;
@ -345,12 +328,11 @@ async fn version_create_inner(
all_game_versions.clone(),
version_data.primary_file.is_some(),
version_data.primary_file.as_deref() == Some(name),
version_data.version_title.clone(),
version_data.version_body.clone().unwrap_or_default(),
version_data.release_channel.clone().to_string(),
flame_anvil_queue,
flame_anvil_info.clone().and_then(|x| x.0),
flame_anvil_info.and_then(|x| x.1),
version_data
.file_types
.iter()
.find(|x| x.0 == name)
.map(|x| x.1),
transaction,
)
.await?;
@ -453,6 +435,7 @@ async fn version_create_inner(
filename: file.filename.clone(),
primary: file.primary,
size: file.size,
file_type: file.file_type,
})
.collect::<Vec<_>>(),
dependencies: version_data.dependencies,
@ -473,7 +456,6 @@ pub async fn upload_file_to_version(
mut payload: Multipart,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
flame_anvil_queue: Data<Arc<Mutex<FlameAnvilQueue>>>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
@ -486,7 +468,6 @@ pub async fn upload_file_to_version(
client,
&mut transaction,
&***file_host,
&flame_anvil_queue,
&mut uploaded_files,
version_id,
)
@ -520,7 +501,6 @@ async fn upload_file_to_version_inner(
client: Data<PgPool>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
flame_anvil_queue: &Mutex<FlameAnvilQueue>,
uploaded_files: &mut Vec<UploadedFile>,
version_id: models::VersionId,
) -> Result<HttpResponse, CreateError> {
@ -597,13 +577,12 @@ async fn upload_file_to_version_inner(
);
}
let file_data: InitialFileData = serde_json::from_slice(&data)?;
// TODO: currently no data here, but still required
initial_file_data = Some(file_data);
continue;
}
let _file_data = initial_file_data.as_ref().ok_or_else(|| {
let file_data = initial_file_data.as_ref().ok_or_else(|| {
CreateError::InvalidInput(String::from(
"`data` field must come before file fields",
))
@ -642,12 +621,7 @@ async fn upload_file_to_version_inner(
all_game_versions.clone(),
true,
false,
version.inner.name.clone(),
version.inner.changelog.clone(),
version.inner.version_type.clone(),
flame_anvil_queue,
None,
None,
file_data.file_type,
transaction,
)
.await?;
@ -686,12 +660,7 @@ pub async fn upload_file(
all_game_versions: Vec<models::categories::GameVersion>,
ignore_primary: bool,
force_primary: bool,
version_display_name: String,
version_changelog: String,
version_type: String,
flame_anvil_queue: &Mutex<FlameAnvilQueue>,
flame_anvil_project: Option<i32>,
flame_anvil_key: Option<String>,
file_type: Option<FileType>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), CreateError> {
let (file_name, file_extension) = get_name_ext(content_disposition)?;
@ -812,42 +781,6 @@ pub async fn upload_file(
|| force_primary
|| total_files_len == 1;
if primary {
if let Some(project_id) = flame_anvil_project {
if let Some(key) = flame_anvil_key {
let mut flame_anvil_queue = flame_anvil_queue.lock().await;
let is_plugin = loaders.iter().any(|x| {
PluginYmlValidator {}
.get_supported_loaders()
.contains(&&*x.0)
});
flame_anvil_queue
.upload_file(
&key,
project_id,
UploadFile {
loaders: loaders.into_iter().map(|x| x.0).collect(),
game_versions: game_versions
.into_iter()
.map(|x| x.0)
.collect(),
display_name: version_display_name,
changelog: version_changelog,
version_type,
},
&all_game_versions,
data.to_vec(),
file_name.to_string(),
content_type.to_string(),
is_plugin,
)
.await?;
}
}
}
let file_path_encode = format!(
"data/{}/versions/{}/{}",
project_id,
@ -899,6 +832,7 @@ pub async fn upload_file(
],
primary,
size: upload_data.content_length,
file_type,
});
Ok(())

View File

@ -1,7 +1,7 @@
use super::ApiError;
use crate::database;
use crate::models;
use crate::models::projects::{Dependency, Version, VersionStatus};
use crate::models::projects::{Dependency, FileType, Version, VersionStatus};
use crate::models::teams::Permissions;
use crate::util::auth::{
get_user_from_headers, is_authorized, is_authorized_version,
@ -220,6 +220,14 @@ pub struct EditVersion {
pub primary_file: Option<(String, String)>,
pub downloads: Option<u32>,
pub status: Option<VersionStatus>,
pub file_types: Option<Vec<EditVersionFileType>>,
}
#[derive(Serialize, Deserialize)]
pub struct EditVersionFileType {
pub algorithm: String,
pub hash: String,
pub file_type: Option<FileType>,
}
#[patch("{id}")]
@ -550,6 +558,40 @@ pub async fn version_edit(
.await?;
}
if let Some(file_types) = &new_version.file_types {
for file_type in file_types {
let result = sqlx::query!(
"
SELECT f.id id FROM hashes h
INNER JOIN files f ON h.file_id = f.id
WHERE h.algorithm = $2 AND h.hash = $1
",
file_type.hash.as_bytes(),
file_type.algorithm
)
.fetch_optional(&**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Specified file with hash {} does not exist.",
file_type.algorithm.clone()
))
})?;
sqlx::query!(
"
UPDATE files
SET file_type = $2
WHERE (id = $1)
",
result.id,
file_type.file_type.as_ref().map(|x| x.as_str()),
)
.execute(&mut *transaction)
.await?;
}
}
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {