Version Creation (#47)

* Creation Stuff

* Make it work

* Response structs + Mod ID validation

* Run code formatter

* Push local changes

* Finish up version creation - fix comments, impl file creation

* fix: Add sqlx prepare data

Co-authored-by: Aeledfyr <aeledfyr@gmail.com>
This commit is contained in:
Geometrically 2020-08-12 11:05:49 -07:00 committed by GitHub
parent 7e2f1c9a8b
commit e2bf474332
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 589 additions and 234 deletions

View File

@ -1,31 +1,5 @@
{
"db": "PostgreSQL",
"1524c0462be70077736ac70fcd037fbf75651456b692e2ce40fa2e3fc8123984": {
"query": "\n SELECT hashes.algorithm, hashes.hash FROM hashes\n WHERE hashes.file_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "algorithm",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "hash",
"type_info": "Bytea"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false
]
}
},
"15b2a2f1bbbbab4f1d99e5e428b2ffba77c83814b936fa6e10e2703b207f6e9a": {
"query": "\n INSERT INTO team_members (id, team_id, user_id, member_name, role)\n VALUES ($1, $2, $3, $4, $5)\n ",
"describe": {
@ -75,44 +49,49 @@
"nullable": []
}
},
"35272854c6aeb743218e73ccf6f34427ab72f25492dfa752f87a50e3da7204c5": {
"query": "\n SELECT v.mod_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n release_channels.channel\n FROM versions v\n INNER JOIN release_channels ON v.release_channel = release_channels.id\n WHERE v.id = $1\n ",
"320b24c5ec3c7e71a4088a2862fb02b31a3d3cfc331ccd60d73dfd49af3e53c0": {
"query": "\n SELECT *\n FROM versions\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"ordinal": 3,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 3,
"ordinal": 4,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"ordinal": 5,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"ordinal": 6,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "channel",
"type_info": "Varchar"
"ordinal": 7,
"name": "release_channel",
"type_info": "Int4"
}
],
"parameters": {
@ -121,16 +100,32 @@
]
},
"nullable": [
true,
false,
false,
false,
false,
true,
false,
false,
true
false
]
}
},
"449920c44d498adf8b771973d6034dc97e1c7f3ff4d9d23599af432f294ed564": {
"query": "\n INSERT INTO files (id, version_id, url, filename)\n VALUES ($1, $2, $3, $4)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Varchar",
"Varchar"
]
},
"nullable": []
}
},
"4c99c0840159d18e88cd6094a41117258f2337346c145d926b5b610c76b5125f": {
"query": "\n SELECT c.category\n FROM mods_categories mc\n INNER JOIN categories c ON mc.joining_category_id=c.id\n WHERE mc.joining_mod_id = $1\n ",
"describe": {
@ -164,23 +159,18 @@
"nullable": []
}
},
"59cf9d085593887595ea45246291f2cd64fc6677d551e96bdb60c09ff1eebf99": {
"query": "\n SELECT files.id, files.url, files.filename FROM files\n WHERE files.version_id = $1\n ",
"a55925860b4a46af864a8c38f942d7cdd85c00638e761b9696de0bf47335173b": {
"query": "\n SELECT mod_id, version_number\n FROM versions\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "url",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "filename",
"name": "version_number",
"type_info": "Varchar"
}
],
@ -190,48 +180,52 @@
]
},
"nullable": [
false,
false,
false
]
}
},
"5aaae159c75c9385f4d969338bce509852d4b3e3ae9d4c4e366055b5b499b19a": {
"query": "\n SELECT v.mod_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n v.release_channel\n FROM versions v\n WHERE v.id = $1\n ",
"b133dbf99fbf7b02e0e7ebd7948445bec2ce952ea0f926575fae0af07913d8b6": {
"query": "\n SELECT *\n FROM versions\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"ordinal": 3,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 3,
"ordinal": 4,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"ordinal": 5,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"ordinal": 6,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 6,
"ordinal": 7,
"name": "release_channel",
"type_info": "Int4"
}
@ -242,86 +236,13 @@
]
},
"nullable": [
true,
false,
false,
true,
false,
false,
false
]
}
},
"96d7b2c8b7b69fc370bb1a2d4a449f972eb3893dad5d6c59e498663cfc93a5c3": {
"query": "\n SELECT title, description, downloads,\n icon_url, body_url, published,\n issues_url, source_url, wiki_url,\n team_id\n FROM mods\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 3,
"name": "icon_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "body_url",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "published",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "issues_url",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "source_url",
"type_info": "Varchar"
},
{
"ordinal": 8,
"name": "wiki_url",
"type_info": "Varchar"
},
{
"ordinal": 9,
"name": "team_id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
true,
true,
true,
false
]
}
@ -406,6 +327,20 @@
"nullable": []
}
},
"cb57ae673f1a7e50cc319efddb9bdc82e2251596bcf85aea52e8def343e423b8": {
"query": "\n INSERT INTO hashes (file_id, algorithm, hash)\n VALUES ($1, $2, $3)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Varchar",
"Bytea"
]
},
"nullable": []
}
},
"ccd913bb2f3006ffe881ce2fc4ef1e721d18fe2eed6ac62627046c955129610c": {
"query": "SELECT EXISTS(SELECT 1 FROM files WHERE id=$1)",
"describe": {
@ -446,18 +381,24 @@
]
}
},
"d67e6c185460a17b65c0dc01be0f436b87acc79fc56513f1c5c4c99e9b9cb283": {
"query": "\n INSERT INTO hashes (file_id, algorithm, hash)\n VALUES ($1, $2, $3)\n ",
"e0e1671ae27b7ade3e9fa340e9f98b5388f51412fe892f904f31deb40634a0e0": {
"query": "SELECT EXISTS(SELECT 1 FROM versions WHERE version_number=$1)",
"describe": {
"columns": [],
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8",
"Varchar",
"Bytea"
"Text"
]
},
"nullable": []
"nullable": [
null
]
}
},
"e7d0a64a08df6783c942f2fcadd94dd45f8d96ad3d3736e52ce90f68d396cdab": {
@ -573,46 +514,6 @@
]
}
},
"f0dd4e10e7c5c4c27ee84be6010919a1b23cb9438ff869c1902849874c75a4af": {
"query": "\n SELECT loaders.loader FROM loaders\n INNER JOIN loaders_versions ON loaders.id = loaders_versions.loader_id\n WHERE loaders_versions.version_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "loader",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false
]
}
},
"f80ca292323952d10dbd26d3453ced5c12bdd1b71dcd3cb3ade4c7d4dc3590f6": {
"query": "\n SELECT gv.version FROM game_versions_versions gvv\n INNER JOIN game_versions gv ON gvv.game_version_id=gv.id\n WHERE gvv.joining_version_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "version",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false
]
}
},
"fb6178b27856ff583039a974173efe5d6be4e347b6cc1d4904cf750a40d1b77f": {
"query": "\n SELECT dependency_id id FROM dependencies\n WHERE dependent_id = $1\n ",
"describe": {
@ -633,19 +534,25 @@
]
}
},
"fc4675de31f0256f43ff2033115f3dac603e05d400e26ac4d852929f37f5b74d": {
"query": "\n INSERT INTO files (id, version_id, url, filename)\n VALUES ($1, $2, $3, $4)\n ",
"fcb0ceeacfa2fa0f8f1f1987e744dabb73c26ac0fb8178ad9b3b9ebb3bd0acac": {
"query": "SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number=$1) AND (mod_id=$2))",
"describe": {
"columns": [],
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8",
"Int8",
"Varchar",
"Varchar"
"Text",
"Int8"
]
},
"nullable": []
"nullable": [
null
]
}
}
}

View File

@ -20,6 +20,45 @@ pub struct VersionFileBuilder {
pub hashes: Vec<HashBuilder>,
}
impl VersionFileBuilder {
pub async fn insert(
self,
version_id: VersionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<FileId, DatabaseError> {
let file_id = generate_file_id(&mut *transaction).await?;
sqlx::query!(
"
INSERT INTO files (id, version_id, url, filename)
VALUES ($1, $2, $3, $4)
",
file_id as FileId,
version_id as VersionId,
self.url,
self.filename,
)
.execute(&mut *transaction)
.await?;
for hash in self.hashes {
sqlx::query!(
"
INSERT INTO hashes (file_id, algorithm, hash)
VALUES ($1, $2, $3)
",
file_id as FileId,
hash.algorithm,
hash.hash,
)
.execute(&mut *transaction)
.await?;
}
Ok(file_id)
}
}
pub struct HashBuilder {
pub algorithm: String,
pub hash: Vec<u8>,
@ -44,33 +83,7 @@ impl VersionBuilder {
version.insert(&mut *transaction).await?;
for file in self.files {
let file_id = generate_file_id(&mut *transaction).await?;
sqlx::query!(
"
INSERT INTO files (id, version_id, url, filename)
VALUES ($1, $2, $3, $4)
",
file_id as FileId,
self.version_id as VersionId,
file.url,
file.filename,
)
.execute(&mut *transaction)
.await?;
for hash in file.hashes {
sqlx::query!(
"
INSERT INTO hashes (file_id, algorithm, hash)
VALUES ($1, $2, $3)
",
file_id as FileId,
hash.algorithm,
hash.hash,
)
.execute(&mut *transaction)
.await?;
}
file.insert(self.version_id, transaction);
}
for dependency in self.dependencies {

View File

@ -178,6 +178,8 @@ async fn main() -> std::io::Result<()> {
.service(routes::index_get)
.service(routes::mod_search)
.service(routes::mod_create)
.service(routes::version_create)
.service(routes::upload_file_to_version)
.default_service(web::get().to(routes::not_found))
})
.bind(dotenv::var("BIND_ADDR").unwrap())?

View File

@ -1,5 +1,5 @@
use super::ids::Base62Id;
use super::teams::Team;
use super::teams::TeamId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@ -20,10 +20,8 @@ pub struct VersionId(pub u64);
pub struct Mod {
/// The ID of the mod, encoded as a base62 string.
pub id: ModId,
// TODO: send partial team structure to reduce requests, but avoid sending
// unnecessary info
/// The team of people that has ownership of this mod.
pub team: Team,
pub team: TeamId,
/// The title or name of the mod.
pub title: String,
/// A short description of the mod.

View File

@ -2,8 +2,11 @@ mod index;
mod mod_creation;
mod mods;
mod not_found;
mod version_creation;
pub use self::index::index_get;
pub use self::mod_creation::mod_create;
pub use self::mods::mod_search;
pub use self::not_found::not_found;
pub use self::version_creation::upload_file_to_version;
pub use self::version_creation::version_create;

View File

@ -1,8 +1,9 @@
use crate::database::models;
use crate::file_hosting::{FileHost, FileHostingError};
use crate::models::error::ApiError;
use crate::models::mods::{GameVersion, ModId, VersionId, VersionType};
use crate::models::mods::{ModId, VersionId, VersionType};
use crate::models::teams::TeamMember;
use crate::routes::version_creation::InitialVersionData;
use crate::search::indexing::queue::CreationQueue;
use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode;
@ -70,18 +71,6 @@ impl actix_web::ResponseError for CreateError {
}
}
#[derive(Serialize, Deserialize, Clone)]
struct InitialVersionData {
pub file_parts: Vec<String>,
pub version_number: String,
pub version_title: String,
pub version_body: String,
pub dependencies: Vec<VersionId>,
pub game_versions: Vec<GameVersion>,
pub release_channel: VersionType,
pub loaders: Vec<String>,
}
#[derive(Serialize, Deserialize, Clone)]
struct ModCreateData {
/// The title or name of the mod.
@ -106,12 +95,12 @@ struct ModCreateData {
pub wiki_url: Option<String>,
}
struct UploadedFile {
file_id: String,
file_name: String,
pub struct UploadedFile {
pub file_id: String,
pub file_name: String,
}
async fn undo_uploads(
pub async fn undo_uploads(
file_host: &dyn FileHost,
uploaded_files: &[UploadedFile],
) -> Result<(), CreateError> {
@ -423,10 +412,30 @@ async fn mod_create_inner(
indexing_queue.add(index_mod);
let response = crate::models::mods::Mod {
id: mod_id,
team: team_id.into(),
title: mod_builder.title.clone(),
description: mod_builder.description.clone(),
body_url: mod_builder.body_url.clone(),
published: now,
downloads: 0,
categories: create_data.categories.clone(),
versions: mod_builder
.initial_versions
.iter()
.map(|v| v.version_id.into())
.collect::<Vec<_>>(),
icon_url: mod_builder.icon_url.clone(),
issues_url: mod_builder.issues_url.clone(),
source_url: mod_builder.source_url.clone(),
wiki_url: mod_builder.wiki_url.clone(),
};
let _mod_id = mod_builder.insert(&mut *transaction).await?;
// TODO: respond with the new mod info, or with just the new mod id.
Ok(HttpResponse::Ok().into())
Ok(HttpResponse::Ok().json(response))
}
async fn process_icon_upload(

View File

@ -0,0 +1,423 @@
use crate::database::models;
use crate::database::models::version_item::{VersionBuilder, VersionFileBuilder};
use crate::file_hosting::FileHost;
use crate::models::mods::{
GameVersion, ModId, ModLoader, Version, VersionFile, VersionId, VersionType,
};
use crate::routes::mod_creation::{CreateError, UploadedFile};
use actix_multipart::{Field, Multipart};
use actix_web::web::Data;
use actix_web::{post, HttpResponse};
use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
#[derive(Serialize, Deserialize, Clone)]
pub struct InitialVersionData {
pub mod_id: ModId,
pub file_parts: Vec<String>,
pub version_number: String,
pub version_title: String,
pub version_body: String,
pub dependencies: Vec<VersionId>,
pub game_versions: Vec<GameVersion>,
pub release_channel: VersionType,
pub loaders: Vec<ModLoader>,
}
#[derive(Serialize, Deserialize, Clone)]
struct InitialFileData {
// TODO: hashes?
}
#[post("api/v1/version")]
pub async fn version_create(
payload: Multipart,
client: Data<PgPool>,
file_host: Data<std::sync::Arc<dyn FileHost + Send + Sync>>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
let result = version_create_inner(
payload,
&mut transaction,
&***file_host,
&mut uploaded_files,
)
.await;
if result.is_err() {
let undo_result = super::mod_creation::undo_uploads(&***file_host, &uploaded_files).await;
let rollback_result = transaction.rollback().await;
if let Err(e) = undo_result {
return Err(e);
}
if let Err(e) = rollback_result {
return Err(e.into());
}
} else {
transaction.commit().await?;
}
result
}
async fn version_create_inner(
mut payload: Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
) -> Result<HttpResponse, CreateError> {
let cdn_url = dotenv::var("CDN_URL")?;
let mut initial_version_data = None;
let mut version_builder = None;
while let Some(item) = payload.next().await {
let mut field: Field = item.map_err(CreateError::MultipartError)?;
let content_disposition = field.content_disposition().ok_or_else(|| {
CreateError::MissingValueError("Missing content disposition".to_string())
})?;
let name = content_disposition
.get_name()
.ok_or_else(|| CreateError::MissingValueError("Missing content name".to_string()))?;
if name == "data" {
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let version_create_data: InitialVersionData = serde_json::from_slice(&data)?;
initial_version_data = Some(version_create_data);
let version_create_data = initial_version_data.as_ref().unwrap();
// TODO: get mod_id from path (POST `/api/v1/mod/{mod_id}/version`)
let mod_id: ModId = version_create_data.mod_id;
let results = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)",
models::ModId::from(mod_id) as models::ModId
)
.fetch_one(&mut *transaction)
.await?;
if !results.exists.unwrap_or(false) {
return Err(CreateError::InvalidInput(
"An invalid mod id was supplied".to_string(),
));
}
let results = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number=$1) AND (mod_id=$2))",
version_create_data.version_number,
models::ModId::from(mod_id) as models::ModId,
)
.fetch_one(&mut *transaction)
.await?;
if results.exists.unwrap_or(true) {
return Err(CreateError::InvalidInput(
"A version with that version_number already exists".to_string(),
));
}
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
let body_url = format!("data/{}/changelogs/{}/body.md", mod_id, version_id);
let uploaded_text = file_host
.upload_file(
"text/plain",
&body_url,
version_create_data.version_body.clone().into_bytes(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: uploaded_text.file_id.clone(),
file_name: uploaded_text.file_name.clone(),
});
// TODO: do a real lookup for the channels
let release_channel = match version_create_data.release_channel {
VersionType::Release => models::ChannelId(1),
VersionType::Beta => models::ChannelId(3),
VersionType::Alpha => models::ChannelId(5),
};
version_builder = Some(VersionBuilder {
version_id: version_id.into(),
mod_id: mod_id.into(),
name: version_create_data.version_title.clone(),
version_number: version_create_data.version_number.clone(),
changelog_url: Some(format!("{}/{}", cdn_url, body_url)),
files: Vec::with_capacity(1),
dependencies: version_create_data
.dependencies
.iter()
.map(|x| (*x).into())
.collect::<Vec<_>>(),
// TODO: add game_versions and loaders info
game_versions: vec![],
loaders: vec![],
release_channel,
});
continue;
}
let file_name = content_disposition.get_filename().ok_or_else(|| {
CreateError::MissingValueError("Missing content file name".to_string())
})?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get((last_period + 1)..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
if &*file_extension == "jar" {
let version = version_builder.as_mut().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
.upload_file(
"application/java-archive",
&format!(
"{}/{}/{}",
ModId::from(version.mod_id),
version.version_number,
file_name
),
data.to_vec(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
// Add the newly uploaded file to the existing or new version
// TODO: Malware scan + file validation
version
.files
.push(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, upload_data.file_name),
hashes: vec![models::version_item::HashBuilder {
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
}],
});
}
}
let version_data_safe = initial_version_data
.ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?;
let version_builder_safe = version_builder
.ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?;
let response = Version {
id: version_builder_safe.version_id.into(),
mod_id: version_builder_safe.mod_id.into(),
name: version_builder_safe.name.clone(),
version_number: version_builder_safe.version_number.clone(),
changelog_url: version_builder_safe.changelog_url.clone(),
date_published: chrono::Utc::now(),
downloads: 0,
version_type: version_data_safe.release_channel,
files: version_builder_safe
.files
.iter()
.map(|file| VersionFile {
hashes: file
.hashes
.iter()
.map(|hash| crate::models::mods::FileHash {
algorithm: hash.algorithm.clone(),
// This is a hack since the hashes are currently stored as ASCII
// in the database, but represented here as a Vec<u8>. At some
// point we need to change the hash to be the real bytes in the
// database and add more processing here.
hash: String::from_utf8(hash.hash.clone()).unwrap(),
})
.collect(),
url: file.url.clone(),
})
.collect::<Vec<_>>(),
dependencies: version_data_safe.dependencies,
game_versions: version_data_safe.game_versions,
loaders: version_data_safe.loaders,
};
version_builder_safe.insert(transaction).await?;
Ok(HttpResponse::Ok().json(response))
}
// TODO: file deletion, listing, etc
#[post("api/v1/version/{version_id}/file")]
pub async fn upload_file_to_version(
url_data: actix_web::web::Path<(VersionId,)>,
payload: Multipart,
client: Data<PgPool>,
file_host: Data<std::sync::Arc<dyn FileHost + Send + Sync>>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
let version_id = models::VersionId::from(url_data.into_inner().0);
let result = upload_file_to_version_inner(
payload,
&mut transaction,
&***file_host,
&mut uploaded_files,
version_id,
)
.await;
if result.is_err() {
let undo_result = super::mod_creation::undo_uploads(&***file_host, &uploaded_files).await;
let rollback_result = transaction.rollback().await;
if let Err(e) = undo_result {
return Err(e);
}
if let Err(e) = rollback_result {
return Err(e.into());
}
} else {
transaction.commit().await?;
}
result
}
async fn upload_file_to_version_inner(
mut payload: Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
version_id: models::VersionId,
) -> Result<HttpResponse, CreateError> {
let cdn_url = dotenv::var("CDN_URL")?;
let mut initial_file_data: Option<InitialFileData> = None;
let mut file_builder: Option<VersionFileBuilder> = None;
let result = sqlx::query!(
"
SELECT mod_id, version_number
FROM versions
WHERE id = $1
",
version_id as models::VersionId,
)
.fetch_optional(&mut *transaction)
.await?;
let version = match result {
Some(v) => v,
None => {
return Err(CreateError::InvalidInput(
"An invalid version id was supplied".to_string(),
));
}
};
let mod_id = ModId(version.mod_id as u64);
let version_number = version.version_number;
while let Some(item) = payload.next().await {
let mut field: Field = item.map_err(CreateError::MultipartError)?;
let content_disposition = field.content_disposition().ok_or_else(|| {
CreateError::MissingValueError("Missing content disposition".to_string())
})?;
let name = content_disposition
.get_name()
.ok_or_else(|| CreateError::MissingValueError("Missing content name".to_string()))?;
if name == "data" {
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let file_data: InitialFileData = serde_json::from_slice(&data)?;
// TODO: currently no data here, but still required
initial_file_data = Some(file_data);
}
let file_name = content_disposition.get_filename().ok_or_else(|| {
CreateError::MissingValueError("Missing content file name".to_string())
})?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get((last_period + 1)..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
if &*file_extension == "jar" {
let _file_data = initial_file_data.as_ref().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
.upload_file(
"application/java-archive",
&format!("{}/{}/{}", mod_id, version_number, file_name),
data.to_vec(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
// TODO: Malware scan + file validation
file_builder = Some(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, upload_data.file_name),
hashes: vec![models::version_item::HashBuilder {
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
}],
});
break;
}
}
if let Some(file_builder) = file_builder {
file_builder.insert(version_id, &mut *transaction).await?;
} else {
return Err(CreateError::InvalidInput(
"A file must be specified".to_string(),
));
}
Ok(HttpResponse::Ok().into())
}

View File

@ -146,7 +146,7 @@ pub async fn index_curseforge(
using_fabric = true;
}
mod_categories.sort();
mod_categories.sort_unstable();
mod_categories.dedup();
mod_categories.truncate(3);