General cleanup: fix some bugs, some refactoring (#65)

* Merged mod file upload in version creation, mod creation and
  version file add to one function;  This makes sure that they are
  consistent
* Made some fields on `User` optional: `github_id`, `avatar_url`, `bio`.
    * We may not want to publicly show the `github_id` to everyone
      with access to the API
    * If we allow non-github users, some of those fields would be
      invalid; some oauth providers may not have avatars or bios
* Made CORS origins should configurable
* Made `--reconfigure-indices` and `--reset-indices` exit after
  completion instead of starting the server
This commit is contained in:
Aeledfyr 2020-10-01 00:07:52 -05:00 committed by GitHub
parent 43a791db65
commit c4fb7b7928
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 318 additions and 561 deletions

1
.env
View File

@ -1,6 +1,7 @@
DEBUG=true
RUST_LOG=info,sqlx::query=warn
CORS_ORIGINS='["http://localhost:3000","https://modrinth.com"]'
CDN_URL=https://cdn.modrinth.com
DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth

View File

@ -0,0 +1,35 @@
-- Originally:
-- ALTER TABLE users
-- ADD COLUMN github_id bigint NOT NULL default 0,
-- ADD COLUMN username varchar(255) NOT NULL default 'username',
-- ADD COLUMN name varchar(255) NOT NULL default 'John Doe',
-- ADD COLUMN email varchar(255) NULL default 'johndoe@modrinth.com',
-- ADD COLUMN avatar_url varchar(500) NOT NULL default '...',
-- ADD COLUMN bio varchar(160) NOT NULL default 'I make mods!',
-- ADD COLUMN created timestamptz default CURRENT_TIMESTAMP NOT NULL
-- We don't want garbage data when users are created incorrectly;
-- we just want it to fail.
ALTER TABLE users
ALTER COLUMN github_id DROP NOT NULL;
ALTER TABLE users
ALTER COLUMN github_id DROP DEFAULT;
ALTER TABLE users
ALTER COLUMN avatar_url DROP NOT NULL;
ALTER TABLE users
ALTER COLUMN avatar_url DROP DEFAULT;
ALTER TABLE users
ALTER COLUMN username DROP DEFAULT;
ALTER TABLE users
ALTER COLUMN name DROP DEFAULT;
ALTER TABLE users
ALTER COLUMN email DROP DEFAULT;
ALTER TABLE users
ALTER COLUMN bio DROP DEFAULT;
ALTER TABLE users
ALTER COLUMN bio DROP NOT NULL;

View File

@ -266,118 +266,6 @@
"nullable": []
}
},
"351af9c9c1c05556bdd8c373f406a66c9358c51dc4222f8abc5095fbf2458471": {
"query": "\n SELECT u.id, u.name, u.email,\n u.avatar_url, u.username, u.bio,\n u.created\n FROM users u\n WHERE u.github_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "email",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "avatar_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "created",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false
]
}
},
"35272854c6aeb743218e73ccf6f34427ab72f25492dfa752f87a50e3da7204c5": {
"query": "\n SELECT v.mod_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n release_channels.channel\n FROM versions v\n INNER JOIN release_channels ON v.release_channel = release_channels.id\n WHERE v.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "channel",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
false
]
}
},
"398ac436f5fe2f6a66544204b9ff01ae1ea1204edf03ffc16de657a861cfe0ba": {
"query": "\n DELETE FROM categories\n WHERE category = $1\n ",
"describe": {
@ -543,62 +431,6 @@
]
}
},
"5aaae159c75c9385f4d969338bce509852d4b3e3ae9d4c4e366055b5b499b19a": {
"query": "\n SELECT v.mod_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n v.release_channel\n FROM versions v\n WHERE v.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "release_channel",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
false
]
}
},
"6562c876826ad3091a14eb50fa1f961a971c1d1bb158fc3dcb55d469a73facc6": {
"query": "\n SELECT v.mod_id, v.author_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n v.release_channel\n FROM versions v\n WHERE v.id = $1\n ",
"describe": {
@ -873,32 +705,6 @@
"nullable": []
}
},
"a55925860b4a46af864a8c38f942d7cdd85c00638e761b9696de0bf47335173b": {
"query": "\n SELECT mod_id, version_number\n FROM versions\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "version_number",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false
]
}
},
"a5d47fb171b0a1ba322125e7cedebf5af9c5831c319bbc4f8f087cb63322bee3": {
"query": "\n SELECT files.id, files.url, files.filename FROM files\n WHERE files.version_id = $1\n ",
"describe": {
@ -994,12 +800,12 @@
]
},
"nullable": [
false,
false,
true,
false,
true,
true,
false,
false,
true,
false,
false
]
@ -1523,33 +1329,14 @@
false,
false,
true,
true,
false,
false,
false,
true,
false,
false
]
}
},
"eaea3f606f926d7e1fc51a9798ce3c6448f0f02d55ce48bb38e84dc1bdced740": {
"query": "\n INSERT INTO versions (\n id, mod_id, name, version_number,\n changelog_url, date_published,\n downloads, release_channel\n )\n VALUES (\n $1, $2, $3, $4,\n $5, $6,\n $7, $8\n )\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Varchar",
"Varchar",
"Varchar",
"Timestamptz",
"Int4",
"Int4"
]
},
"nullable": []
}
},
"ebf2d1fbcd12816799b60be6e8dec606eadd96edc26a840a411b44a19dc0497c": {
"query": "\n SELECT loaders.loader FROM versions\n INNER JOIN loaders_versions lv ON lv.version_id = versions.id\n INNER JOIN loaders ON loaders.id = lv.loader_id\n WHERE versions.mod_id = $1\n ",
"describe": {
@ -1664,62 +1451,6 @@
]
}
},
"f772d6c3d287da99e00390517ea56cf3190658781da471bef58230e82b892b8c": {
"query": "\n SELECT u.github_id, u.name, u.email,\n u.avatar_url, u.username, u.bio,\n u.created\n FROM users u\n WHERE u.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "github_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "email",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "avatar_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "created",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false
]
}
},
"f7bea04e8e279e27a24de1bdf3c413daa8677994df5131494b28691ed6611efc": {
"query": "\n SELECT url,expires FROM states\n WHERE id = $1\n ",
"describe": {

View File

@ -58,7 +58,7 @@ where
match res {
Some(result) => Ok(User {
id: UserId::from(result.id),
github_id: result.github_id as u64,
github_id: result.github_id.map(|i| i as u64),
username: result.username,
name: result.name,
email: result.email,

View File

@ -2,12 +2,12 @@ use super::ids::UserId;
pub struct User {
pub id: UserId,
pub github_id: i64,
pub github_id: Option<i64>,
pub username: String,
pub name: String,
pub email: Option<String>,
pub avatar_url: String,
pub bio: String,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: chrono::DateTime<chrono::Utc>,
pub role: String,
}
@ -33,8 +33,8 @@ impl User {
&self.username,
&self.name,
self.email.as_ref(),
&self.avatar_url,
&self.bio,
self.avatar_url.as_ref(),
self.bio.as_ref(),
self.created,
)
.execute(&mut *transaction)
@ -99,7 +99,7 @@ impl User {
if let Some(row) = result {
Ok(Some(User {
id: UserId(row.id),
github_id: github_id as i64,
github_id: Some(github_id as i64),
name: row.name,
email: row.email,
avatar_url: row.avatar_url,

View File

@ -43,7 +43,7 @@ pub async fn run_migrations(uri: &str) -> Result<(), sqlx::Error> {
for migration in migrator.iter() {
if migration.version > version {
let elapsed = conn.apply(migration).await?;
let _elapsed = conn.apply(migration).await?;
} else {
conn.validate(migration).await?;
}

View File

@ -38,6 +38,23 @@ async fn main() -> std::io::Result<()> {
check_env_vars();
if config.reset_indices {
info!("Resetting indices");
search::indexing::reset_indices().await.unwrap();
return Ok(());
} else if config.reconfigure_indices {
info!("Reconfiguring indices");
search::indexing::reconfigure_indices().await.unwrap();
return Ok(());
}
// Allow manually skipping the initial indexing for quicker iteration
// and startup times.
let skip_initial = config.skip_first_index;
if skip_initial {
info!("Skipping initial indexing");
}
database::check_for_migrations()
.await
.expect("An error occurred while running migrations.");
@ -65,21 +82,6 @@ async fn main() -> std::io::Result<()> {
Arc::new(file_hosting::MockHost::new())
};
if config.reset_indices {
info!("Resetting indices");
search::indexing::reset_indices().await.unwrap();
} else if config.reconfigure_indices {
info!("Reconfiguring indices");
search::indexing::reconfigure_indices().await.unwrap();
}
// Allow manually skipping the initial indexing for quicker iteration
// and startup times.
let skip_initial = config.skip_first_index;
if skip_initial {
info!("Skipping initial indexing");
}
let mut scheduler = scheduler::Scheduler::new();
// The interval in seconds at which the local database is indexed
@ -171,21 +173,26 @@ async fn main() -> std::io::Result<()> {
});
}
let allowed_origins = dotenv::var("CORS_ORIGINS")
.ok()
.and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
.unwrap_or_else(|| vec![String::from("http://localhost")]);
info!("Starting Actix HTTP server!");
// Init App
HttpServer::new(move || {
let mut cors = Cors::new()
.allowed_methods(vec!["GET", "POST"])
.allowed_headers(vec![http::header::AUTHORIZATION, http::header::ACCEPT])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600);
for allowed_origin in &allowed_origins {
cors = cors.allowed_origin(allowed_origin);
}
App::new()
.wrap(
Cors::new()
.allowed_origin("http://localhost:3000")
.allowed_origin("https://modrinth.com")
.allowed_methods(vec!["GET", "POST"])
.allowed_headers(vec![http::header::AUTHORIZATION, http::header::ACCEPT])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600)
.finish(),
)
.wrap(cors.finish())
.wrap(Logger::default())
.wrap(Logger::new("%a %{User-Agent}i"))
.data(pool.clone())
@ -221,6 +228,15 @@ fn check_env_vars() {
)
}
}
if dotenv::var("CORS_ORIGINS")
.ok()
.and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
.is_none()
{
warn!("Variable `CORS_ORIGINS` missing in dotenv or not a json array of strings");
}
check_var::<String>("CDN_URL");
check_var::<String>("DATABASE_URL");
check_var::<String>("MEILISEARCH_ADDR");

View File

@ -89,7 +89,7 @@ pub struct VersionFile {
pub hashes: std::collections::HashMap<String, String>,
/// A direct link to the file for downloading it.
pub url: String,
/// A direct link to the file for downloading it.
/// The filename of the file.
pub filename: String,
}
@ -101,14 +101,13 @@ pub enum VersionType {
Alpha,
}
impl ToString for VersionType {
fn to_string(&self) -> String {
impl std::fmt::Display for VersionType {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
VersionType::Release => "release",
VersionType::Beta => "beta",
VersionType::Alpha => "alpha",
VersionType::Release => write!(fmt, "release"),
VersionType::Beta => write!(fmt, "beta"),
VersionType::Alpha => write!(fmt, "alpha"),
}
.to_string()
}
}
@ -122,6 +121,8 @@ pub struct GameVersion(pub String);
#[serde(transparent)]
pub struct ModLoader(pub String);
// These fields must always succeed parsing; deserialize errors aren't
// processed correctly (don't return JSON errors)
#[derive(Serialize, Deserialize)]
pub struct SearchRequest {
pub query: Option<String>,
@ -133,5 +134,5 @@ pub struct SearchRequest {
pub version: Option<String>,
pub offset: Option<String>,
pub index: Option<String>,
pub limit: Option<usize>,
pub limit: Option<String>,
}

View File

@ -2,7 +2,6 @@ use super::ids::Base62Id;
use crate::models::users::UserId;
use serde::{Deserialize, Serialize};
//TODO Implement Item for teams
/// The ID of a team
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
@ -26,6 +25,6 @@ pub struct TeamMember {
pub user_id: UserId,
/// The name of the user
pub name: String,
///The role of the use in the team
/// The role of the user in the team
pub role: String,
}

View File

@ -9,29 +9,30 @@ pub struct UserId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct User {
pub id: UserId,
pub github_id: u64,
pub github_id: Option<u64>,
pub username: String,
pub name: String,
pub email: Option<String>,
pub avatar_url: String,
pub bio: String,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: chrono::DateTime<chrono::Utc>,
pub role: Role,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Role {
Developer,
Moderator,
Admin,
}
impl ToString for Role {
fn to_string(&self) -> String {
impl std::fmt::Display for Role {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Role::Developer => String::from("developer"),
Role::Moderator => String::from("moderator"),
Role::Admin => String::from("admin"),
Role::Developer => write!(fmt, "developer"),
Role::Moderator => write!(fmt, "moderator"),
Role::Admin => write!(fmt, "admin"),
}
}
}

View File

@ -182,12 +182,12 @@ pub async fn auth_callback(
User {
id: user_id,
github_id: user.id as i64,
github_id: Some(user.id as i64),
username: user.login,
name: user.name,
email: user.email,
avatar_url: user.avatar_url,
bio: user.bio,
avatar_url: Some(user.avatar_url),
bio: Some(user.bio),
created: Utc::now(),
role: Role::Developer.to_string(),
}

View File

@ -35,9 +35,7 @@ pub fn versions_config(cfg: &mut web::ServiceConfig) {
web::scope("version")
.service(versions::version_get)
.service(versions::version_delete)
.service(
web::scope("{version_id}").service(version_creation::upload_file_to_version),
),
.service(version_creation::upload_file_to_version),
);
}

View File

@ -44,6 +44,8 @@ pub enum CreateError {
InvalidLoader(String),
#[error("Invalid category: {0}")]
InvalidCategory(String),
#[error("Invalid file type for version file: {0}")]
InvalidFileType(String),
#[error("Authentication Error: {0}")]
Unauthorized(#[from] AuthenticationError),
}
@ -63,6 +65,7 @@ impl actix_web::ResponseError for CreateError {
CreateError::InvalidGameVersion(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidLoader(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidFileType(..) => StatusCode::BAD_REQUEST,
CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED,
}
}
@ -82,6 +85,7 @@ impl actix_web::ResponseError for CreateError {
CreateError::InvalidGameVersion(..) => "invalid_input",
CreateError::InvalidLoader(..) => "invalid_input",
CreateError::InvalidCategory(..) => "invalid_input",
CreateError::InvalidFileType(..) => "invalid_input",
CreateError::Unauthorized(..) => "unauthorized",
},
description: &self.to_string(),
@ -204,16 +208,12 @@ async fn mod_create_inner(
continue;
}
let file_name = content_disposition.get_filename().ok_or_else(|| {
CreateError::MissingValueError("Missing content file name".to_string())
let create_data = mod_create_data.as_ref().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get((last_period + 1)..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
let (file_name, file_extension) =
super::version_creation::get_name_ext(&content_disposition)?;
if name == "icon" {
icon_url = process_icon_upload(
@ -229,134 +229,103 @@ async fn mod_create_inner(
continue;
}
if &*file_extension == "jar" {
let create_data = mod_create_data.as_ref().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
let version_data = create_data
.initial_versions
.iter()
.find(|x| x.file_parts.iter().any(|n| n == name))
.ok_or_else(|| {
CreateError::InvalidInput(format!(
"File `{}` (field {}) isn't specified in the versions data",
file_name, name
))
})?;
let version_data = create_data
.initial_versions
.iter()
.find(|x| x.file_parts.iter().any(|n| n == name))
.ok_or_else(|| {
CreateError::InvalidInput(format!(
"Jar file `{}` (field {}) isn't specified in the versions data",
file_name, name
))
})?;
// If a version has already been created for this version, add the
// file to it instead of creating a new version.
// Versions must have at least one jar file to be uploaded
// If a version has already been created for this version, add the
// file to it instead of creating a new version.
let created_version = if let Some(created_version) = created_versions
.iter_mut()
.find(|x| x.version_number == version_data.version_number)
{
created_version
} else {
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
let created_version = if let Some(created_version) = created_versions
.iter_mut()
.find(|x| x.version_number == version_data.version_number)
{
created_version
} else {
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
let body_url = format!("data/{}/changelogs/{}/body.md", mod_id, version_id);
let body_url = format!("data/{}/changelogs/{}/body.md", mod_id, version_id);
let uploaded_text = file_host
.upload_file(
"text/plain",
&body_url,
version_data.version_body.clone().into_bytes(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: uploaded_text.file_id.clone(),
file_name: uploaded_text.file_name.clone(),
});
// TODO: do a real lookup for the channels
let release_channel = match version_data.release_channel {
VersionType::Release => models::ChannelId(1),
VersionType::Beta => models::ChannelId(3),
VersionType::Alpha => models::ChannelId(5),
};
let mut game_versions = Vec::with_capacity(version_data.game_versions.len());
for v in &version_data.game_versions {
let id = models::categories::GameVersion::get_id(&v.0, &mut *transaction)
.await?
.ok_or_else(|| CreateError::InvalidGameVersion(v.0.clone()))?;
game_versions.push(id);
}
let mut loaders = Vec::with_capacity(version_data.loaders.len());
for l in &version_data.loaders {
let id = models::categories::Loader::get_id(&l.0, &mut *transaction)
.await?
.ok_or_else(|| CreateError::InvalidLoader(l.0.clone()))?;
loaders.push(id);
}
let version = models::version_item::VersionBuilder {
version_id: version_id.into(),
mod_id: mod_id.into(),
author_id: user.id.into(),
name: version_data.version_title.clone(),
version_number: version_data.version_number.clone(),
changelog_url: Some(format!("{}/{}", cdn_url, body_url)),
files: Vec::with_capacity(1),
dependencies: version_data
.dependencies
.iter()
.map(|x| (*x).into())
.collect::<Vec<_>>(),
game_versions,
loaders,
release_channel,
};
created_versions.push(version);
created_versions.last_mut().unwrap()
};
// Upload the new jar file
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
let uploaded_text = file_host
.upload_file(
"application/java-archive",
&format!(
"{}/{}/{}",
create_data.mod_namespace.replace(".", "/"),
version_data.version_number,
file_name
),
data.to_vec(),
"text/plain",
&body_url,
version_data.version_body.clone().into_bytes(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
file_id: uploaded_text.file_id.clone(),
file_name: uploaded_text.file_name.clone(),
});
// Add the newly uploaded file to the existing or new version
// TODO: do a real lookup for the channels
let release_channel = match version_data.release_channel {
VersionType::Release => models::ChannelId(1),
VersionType::Beta => models::ChannelId(3),
VersionType::Alpha => models::ChannelId(5),
};
// TODO: Malware scan + file validation
created_version
.files
.push(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, upload_data.file_name),
hashes: vec![models::version_item::HashBuilder {
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
}],
});
}
let mut game_versions = Vec::with_capacity(version_data.game_versions.len());
for v in &version_data.game_versions {
let id = models::categories::GameVersion::get_id(&v.0, &mut *transaction)
.await?
.ok_or_else(|| CreateError::InvalidGameVersion(v.0.clone()))?;
game_versions.push(id);
}
let mut loaders = Vec::with_capacity(version_data.loaders.len());
for l in &version_data.loaders {
let id = models::categories::Loader::get_id(&l.0, &mut *transaction)
.await?
.ok_or_else(|| CreateError::InvalidLoader(l.0.clone()))?;
loaders.push(id);
}
let version = models::version_item::VersionBuilder {
version_id: version_id.into(),
mod_id: mod_id.into(),
author_id: user.id.into(),
name: version_data.version_title.clone(),
version_number: version_data.version_number.clone(),
changelog_url: Some(format!("{}/{}", cdn_url, body_url)),
files: Vec::with_capacity(1),
dependencies: version_data
.dependencies
.iter()
.map(|x| (*x).into())
.collect::<Vec<_>>(),
game_versions,
loaders,
release_channel,
};
created_versions.push(version);
created_versions.last_mut().unwrap()
};
// Upload the new jar file
let file_builder = super::version_creation::upload_file(
&mut field,
file_host,
uploaded_files,
&cdn_url,
&content_disposition,
mod_id,
&version_data.version_number,
)
.await?;
// Add the newly uploaded file to the existing or new version
created_version.files.push(file_builder);
}
let create_data = if let Some(create_data) = mod_create_data {

View File

@ -1,10 +1,10 @@
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
use crate::models::users::{Role, UserId};
use crate::routes::ApiError;
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use sqlx::PgPool;
#[post("mod")]
#[get("user")]
pub async fn user_auth_get(
req: HttpRequest,
pool: web::Data<PgPool>,
@ -35,7 +35,7 @@ pub async fn user_get(
if let Some(data) = user_data {
let response = crate::models::users::User {
id: data.id.into(),
github_id: data.github_id as u64,
github_id: data.github_id.map(|i| i as u64),
username: data.username,
name: data.name,
email: None,

View File

@ -200,61 +200,23 @@ async fn version_create_inner(
continue;
}
let file_name = content_disposition.get_filename().ok_or_else(|| {
CreateError::MissingValueError("Missing content file name".to_string())
let version = version_builder.as_mut().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get((last_period + 1)..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
if &*file_extension == "jar" {
let version = version_builder.as_mut().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let file_builder = upload_file(
&mut field,
file_host,
uploaded_files,
&cdn_url,
&content_disposition,
ModId::from(mod_id),
&version.version_number,
)
.await?;
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
.upload_file(
"application/java-archive",
&format!(
"{}/{}/{}",
ModId::from(version.mod_id),
version.version_number,
file_name
),
data.to_vec(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
// Add the newly uploaded file to the existing or new version
// TODO: Malware scan + file validation
version
.files
.push(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, upload_data.file_name),
hashes: vec![models::version_item::HashBuilder {
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
}],
});
}
// Add the newly uploaded file to the existing or new version
version.files.push(file_builder);
}
let version_data_safe = initial_version_data
@ -307,7 +269,7 @@ async fn version_create_inner(
// TODO: file deletion, listing, etc
// under /api/v1/mod/{mod_id}/version/{version_id}
#[post("file")]
#[post("{version_id}/file")]
pub async fn upload_file_to_version(
req: HttpRequest,
url_data: actix_web::web::Path<(ModId, VersionId)>,
@ -362,7 +324,7 @@ async fn upload_file_to_version_inner(
let cdn_url = dotenv::var("CDN_URL")?;
let mut initial_file_data: Option<InitialFileData> = None;
let mut file_builder: Option<VersionFileBuilder> = None;
let mut file_builders: Vec<VersionFileBuilder> = Vec::new();
let user = get_user_from_headers(req.headers(), &mut *transaction).await?;
@ -416,64 +378,108 @@ async fn upload_file_to_version_inner(
// TODO: currently no data here, but still required
initial_file_data = Some(file_data);
continue;
}
let file_name = content_disposition.get_filename().ok_or_else(|| {
CreateError::MissingValueError("Missing content file name".to_string())
let _file_data = initial_file_data.as_ref().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get((last_period + 1)..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
if &*file_extension == "jar" {
let _file_data = initial_file_data.as_ref().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let file_builder = upload_file(
&mut field,
file_host,
uploaded_files,
&cdn_url,
&content_disposition,
mod_id,
&version_number,
)
.await?;
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
.upload_file(
"application/java-archive",
&format!("{}/{}/{}", mod_id, version_number, file_name),
data.to_vec(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
// TODO: Malware scan + file validation
file_builder = Some(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, upload_data.file_name),
hashes: vec![models::version_item::HashBuilder {
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
}],
});
break;
}
// TODO: Malware scan + file validation
file_builders.push(file_builder);
}
if let Some(file_builder) = file_builder {
file_builder.insert(version_id, &mut *transaction).await?;
} else {
if file_builders.is_empty() {
return Err(CreateError::InvalidInput(
"A file must be specified".to_string(),
"At least one file must be specified".to_string(),
));
} else {
for file_builder in file_builders {
file_builder.insert(version_id, &mut *transaction).await?;
}
}
Ok(HttpResponse::Ok().into())
}
// This function is used for adding a file to a version, uploading the initial
// files for a version, and for uploading the initial version files for a mod
pub async fn upload_file(
field: &mut Field,
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
cdn_url: &str,
content_disposition: &actix_web::http::header::ContentDisposition,
mod_id: crate::models::ids::ModId,
version_number: &str,
) -> Result<models::version_item::VersionFileBuilder, CreateError> {
let (file_name, file_extension) = get_name_ext(content_disposition)?;
let content_type = mod_file_type(file_extension)
.ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?;
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
.upload_file(
content_type,
&format!("{}/{}/{}", mod_id, version_number, file_name),
data.to_vec(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
// TODO: Malware scan + file validation
Ok(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, upload_data.file_name),
hashes: vec![models::version_item::HashBuilder {
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
}],
})
}
// Currently we only support jar mods; this may change in the future (datapacks?)
fn mod_file_type(ext: &str) -> Option<&str> {
match ext {
"jar" => Some("application/java-archive"),
_ => None,
}
}
pub fn get_name_ext(
content_disposition: &actix_web::http::header::ContentDisposition,
) -> Result<(&str, &str), CreateError> {
let file_name = content_disposition
.get_filename()
.ok_or_else(|| CreateError::MissingValueError("Missing content file name".to_string()))?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get((last_period + 1)..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
Ok((file_name, file_extension))
}

View File

@ -141,7 +141,7 @@ pub async fn search_for_mod(info: &SearchRequest) -> Result<SearchResults, Searc
let offset = info.offset.as_deref().unwrap_or("0").parse()?;
let index = info.index.as_deref().unwrap_or("relevance");
let limit = info.limit.unwrap_or(10);
let limit = info.limit.as_deref().unwrap_or("10").parse()?;
let search_query: &str = info
.query
.as_deref()