Rustic cleanups, dedups and making the code less hard to read in general (#251)

* typos :help_me:

* (part 1/?) massive cleanup to make the code more Rust-ic and cut down heap allocations.

* (part 2/?) massive cleanup to make the code more Rust-ic and cut down heap allocations.

* (part 3/?) cut down some pretty major heap allocations here - more Bytes and BytesMuts, less Vec<u8>s

also I don't really understand why you need to `to_vec` when you don't really use it again afterwards

* (part 4/?) deduplicate error handling in backblaze logic

* (part 5/?) fixes, cleanups, refactors, and reformatting

* (part 6/?) cleanups and refactors

* remove loads of `as_str` in types that already are `Display`

* Revert "remove loads of `as_str` in types that already are `Display`"

This reverts commit 4f974310cfb167ceba03001d81388db4f0fbb509.

* reformat and move routes util to the util module

* use streams

* Run prepare + formatting issues

Co-authored-by: Jai A <jaiagr+gpg@pm.me>
Co-authored-by: Geometrically <18202329+Geometrically@users.noreply.github.com>
This commit is contained in:
Leo Chen 2021-10-12 11:26:59 +08:00 committed by GitHub
parent 0010119440
commit 13187de97d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 997 additions and 1129 deletions

7
.idea/vcs.xml generated
View File

@ -1,12 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GithubSharedProjectSettings">
<option name="branchProtectionPatterns">
<list>
<option value="master" />
</list>
</option>
</component>
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>

1
Cargo.lock generated
View File

@ -1969,6 +1969,7 @@ dependencies = [
"async-trait",
"base64 0.13.0",
"bitflags",
"bytes 0.5.6",
"chrono",
"dotenv",
"env_logger",

View File

@ -59,3 +59,5 @@ sentry-actix = "0.22.0"
actix-web-prom = {git = "https://github.com/nlopes/actix-web-prom", branch = "master"}
prometheus = "0.12.0"
bytes = "0.5.6"

View File

@ -3131,6 +3131,153 @@
]
}
},
"8d491f3ccbddbd1e1bbea62d04090b2214d10182e3bfac7d8374ac183514f352": {
"query": "\n SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published,\n m.updated updated,\n m.team_id team_id, m.license license, m.slug slug,\n s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,\n STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,\n STRING_AGG(DISTINCT mg.image_url, ',') gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id\n LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN statuses s ON s.id = m.status\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN licenses l ON m.license = l.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $2\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.id = $1\n GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "project_type",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 5,
"name": "follows",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "icon_url",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "published",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "updated",
"type_info": "Timestamptz"
},
{
"ordinal": 9,
"name": "team_id",
"type_info": "Int8"
},
{
"ordinal": 10,
"name": "license",
"type_info": "Int4"
},
{
"ordinal": 11,
"name": "slug",
"type_info": "Varchar"
},
{
"ordinal": 12,
"name": "status_name",
"type_info": "Varchar"
},
{
"ordinal": 13,
"name": "client_side_type",
"type_info": "Varchar"
},
{
"ordinal": 14,
"name": "server_side_type",
"type_info": "Varchar"
},
{
"ordinal": 15,
"name": "short",
"type_info": "Varchar"
},
{
"ordinal": 16,
"name": "project_type_name",
"type_info": "Varchar"
},
{
"ordinal": 17,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 18,
"name": "categories",
"type_info": "Text"
},
{
"ordinal": 19,
"name": "loaders",
"type_info": "Text"
},
{
"ordinal": 20,
"name": "versions",
"type_info": "Text"
},
{
"ordinal": 21,
"name": "gallery",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8",
"Text"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false,
false,
false,
false,
true,
false,
false,
false,
false,
false,
false,
null,
null,
null,
null
]
}
},
"8f706d78ac4235ea04c59e2c220a4791e1d08fdf287b783b4aaef36fd2445467": {
"query": "\n DELETE FROM loaders\n WHERE loader = $1\n ",
"describe": {
@ -5075,153 +5222,6 @@
"nullable": []
}
},
"dc70fb063947058851923f72ae1618e876c51335c0c6fdb82f097cb0bd68ccd7": {
"query": "\n SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published,\n m.updated updated,\n m.team_id team_id, m.license license, m.slug slug,\n s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,\n STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,\n STRING_AGG(DISTINCT mg.image_url, ',') gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id\n LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN statuses s ON s.id = m.status\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN licenses l ON m.license = l.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $2\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.id = $1\n GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "project_type",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 5,
"name": "follows",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "icon_url",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "published",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "updated",
"type_info": "Timestamptz"
},
{
"ordinal": 9,
"name": "team_id",
"type_info": "Int8"
},
{
"ordinal": 10,
"name": "license",
"type_info": "Int4"
},
{
"ordinal": 11,
"name": "slug",
"type_info": "Varchar"
},
{
"ordinal": 12,
"name": "status_name",
"type_info": "Varchar"
},
{
"ordinal": 13,
"name": "client_side_type",
"type_info": "Varchar"
},
{
"ordinal": 14,
"name": "server_side_type",
"type_info": "Varchar"
},
{
"ordinal": 15,
"name": "short",
"type_info": "Varchar"
},
{
"ordinal": 16,
"name": "project_type_name",
"type_info": "Varchar"
},
{
"ordinal": 17,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 18,
"name": "categories",
"type_info": "Text"
},
{
"ordinal": 19,
"name": "loaders",
"type_info": "Text"
},
{
"ordinal": 20,
"name": "versions",
"type_info": "Text"
},
{
"ordinal": 21,
"name": "gallery",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8",
"Text"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false,
false,
false,
false,
true,
false,
false,
false,
false,
false,
false,
null,
null,
null,
null
]
}
},
"dd616640be8807405d0d98a55f8fa23a50186a6a87a64bcb872750c9f9404763": {
"query": "\n SELECT c.category category\n FROM mods_categories mc\n INNER JOIN categories c ON mc.joining_category_id = c.id\n WHERE mc.joining_mod_id = $1\n ",
"describe": {

View File

@ -13,15 +13,13 @@ pub async fn connect() -> Result<PgPool, sqlx::Error> {
.min_connections(
dotenv::var("DATABASE_MIN_CONNECTIONS")
.ok()
.map(|x| x.parse::<u32>().ok())
.flatten()
.and_then(|x| x.parse().ok())
.unwrap_or(16),
)
.max_connections(
dotenv::var("DATABASE_MAX_CONNECTIONS")
.ok()
.map(|x| x.parse::<u32>().ok())
.flatten()
.and_then(|x| x.parse().ok())
.unwrap_or(16),
)
.connect(&database_url)
@ -30,7 +28,8 @@ pub async fn connect() -> Result<PgPool, sqlx::Error> {
Ok(pool)
}
pub async fn check_for_migrations() -> Result<(), sqlx::Error> {
let uri = &*dotenv::var("DATABASE_URL").expect("`DATABASE_URL` not in .env");
let uri = dotenv::var("DATABASE_URL").expect("`DATABASE_URL` not in .env");
let uri = uri.as_str();
if !Postgres::database_exists(uri).await? {
info!("Creating database...");
Postgres::create_database(uri).await?;
@ -50,7 +49,7 @@ pub async fn run_migrations(uri: &str) -> Result<(), sqlx::Error> {
let (version, dirty) = conn.version().await?.unwrap_or((0, false));
if dirty {
panic!("The database is dirty ! Please check your database status.");
panic!("The database is dirty! Please check your database status.");
}
for migration in migrator.iter() {

View File

@ -1,5 +1,8 @@
use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
use bytes::Bytes;
use reqwest::Response;
use serde::Deserialize;
use sha2::Digest;
mod authorization;
@ -31,7 +34,7 @@ impl FileHost for BackblazeHost {
&self,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
@ -79,3 +82,14 @@ impl FileHost for BackblazeHost {
})
}
}
pub async fn process_response<T>(response: Response) -> Result<T, FileHostingError>
where
T: for<'de> Deserialize<'de>,
{
if response.status().is_success() {
Ok(response.json().await?)
} else {
Err(FileHostingError::BackblazeError(response.json().await?))
}
}

View File

@ -44,11 +44,7 @@ pub async fn authorize_account(
.send()
.await?;
if response.status().is_success() {
Ok(response.json().await?)
} else {
Err(FileHostingError::BackblazeError(response.json().await?))
}
super::process_response(response).await
}
pub async fn get_upload_url(
@ -71,9 +67,5 @@ pub async fn get_upload_url(
.send()
.await?;
if response.status().is_success() {
Ok(response.json().await?)
} else {
Err(FileHostingError::BackblazeError(response.json().await?))
}
super::process_response(response).await
}

View File

@ -34,9 +34,5 @@ pub async fn delete_file_version(
.send()
.await?;
if response.status().is_success() {
Ok(response.json().await?)
} else {
Err(FileHostingError::BackblazeError(response.json().await?))
}
super::process_response(response).await
}

View File

@ -1,5 +1,6 @@
use super::authorization::UploadUrlData;
use crate::file_hosting::FileHostingError;
use bytes::Bytes;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
@ -21,7 +22,7 @@ pub async fn upload_file(
url_data: &UploadUrlData,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let response = reqwest::Client::new()
.post(&url_data.upload_url)
@ -40,9 +41,5 @@ pub async fn upload_file(
.send()
.await?;
if response.status().is_success() {
Ok(response.json().await?)
} else {
Err(FileHostingError::BackblazeError(response.json().await?))
}
super::process_response(response).await
}

View File

@ -1,5 +1,6 @@
use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
use bytes::{Buf, Bytes};
use sha2::Digest;
pub struct MockHost(());
@ -16,15 +17,15 @@ impl FileHost for MockHost {
&self,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let path = std::path::Path::new(&dotenv::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::create_dir_all(path.parent().ok_or(FileHostingError::InvalidFilename)?)?;
let content_sha1 = sha1::Sha1::from(&file_bytes).hexdigest();
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
let content_sha1 = sha1::Sha1::from(file_bytes.bytes()).hexdigest();
let content_sha512 = format!("{:x}", sha2::Sha512::digest(file_bytes.bytes()));
std::fs::write(path, &file_bytes)?;
std::fs::write(path, file_bytes.bytes())?;
Ok(UploadFileData {
file_id: String::from("MOCK_FILE_ID"),
file_name: file_name.to_string(),

View File

@ -6,6 +6,7 @@ mod mock;
mod s3_host;
pub use backblaze::BackblazeHost;
use bytes::Bytes;
pub use mock::MockHost;
use s3::creds::AwsCredsError;
use s3::S3Error;
@ -51,7 +52,7 @@ pub trait FileHost {
&self,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError>;
async fn delete_file_version(

View File

@ -1,5 +1,6 @@
use crate::file_hosting::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
use bytes::{Buf, Bytes};
use s3::bucket::Bucket;
use s3::creds::Credentials;
use s3::region::Region;
@ -38,15 +39,15 @@ impl FileHost for S3Host {
&self,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let content_sha1 = sha1::Sha1::from(&file_bytes).hexdigest();
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
let content_sha512 = format!("{:x}", sha2::Sha512::digest(file_bytes.bytes()));
self.bucket
.put_object_with_content_type(
format!("/{}", file_name),
file_bytes.as_slice(),
file_bytes.bytes(),
content_type,
)
.await?;

View File

@ -1,10 +1,10 @@
pub mod scheduler;
pub mod pod;
pub mod scheduler;
pub mod status;
use lazy_static::lazy_static;
use std::sync::atomic::AtomicBool;
lazy_static!{
pub static ref SEARCH_READY: AtomicBool = AtomicBool::new(false);
}
lazy_static! {
pub static ref SEARCH_READY: AtomicBool = AtomicBool::new(false);
}

View File

@ -1,4 +1,4 @@
use std::sync::{RwLock, Arc};
use std::sync::{Arc, RwLock};
#[derive(Clone, Debug)]
pub struct PodInfo {
@ -12,7 +12,7 @@ impl PodInfo {
Self {
pod_name: dotenv::var("POD_NAME").unwrap_or("DEV".to_string()),
node_name: dotenv::var("NODE_NAME").unwrap_or("self-hosted".to_string()),
pod_id: Arc::new(RwLock::new(None))
pod_id: Arc::new(RwLock::new(None)),
}
}
pub fn get_id(&self) -> String {
@ -30,4 +30,4 @@ impl PodInfo {
fn generate_id(&self) -> String {
base64::encode(format!("{}-{}", self.node_name, self.pod_name))
}
}
}

View File

@ -1,18 +1,18 @@
use crate::scheduler::Scheduler;
use sqlx::{Pool, Postgres};
use prometheus::{opts, IntGaugeVec};
use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::Error;
use prometheus::{opts, IntGaugeVec};
use std::pin::Pin;
use std::future::{Future};
use std::task::{Context, Poll};
use futures::future::{ok, Ready};
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
use crate::health::pod::PodInfo;
use actix_web::http::{HeaderName, HeaderValue};
use actix_web_prom::{PrometheusMetrics};
use actix_web_prom::PrometheusMetrics;
pub struct HealthCounters {
pod: PodInfo,
@ -35,13 +35,16 @@ impl HealthCounters {
pub fn register(&self, builder: &mut PrometheusMetrics) {
builder
.registry
.register(Box::new(self.opened_db_conn.clone())).unwrap();
.register(Box::new(self.opened_db_conn.clone()))
.unwrap();
builder
.registry
.register(Box::new(self.idle_db_conn.clone())).unwrap();
.register(Box::new(self.idle_db_conn.clone()))
.unwrap();
builder
.registry
.register(Box::new(self.current_requests.clone())).unwrap();
.register(Box::new(self.current_requests.clone()))
.unwrap();
}
pub fn schedule(&self, pool: Pool<Postgres>, scheduler: &mut Scheduler) {
let this = self.clone();
@ -69,10 +72,10 @@ impl Clone for HealthCounters {
}
impl<S, B> Transform<S> for HealthCounters
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
@ -82,23 +85,23 @@ impl<S, B> Transform<S> for HealthCounters
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(MonitoringMiddleware { service, counters: self.clone() })
ok(MonitoringMiddleware {
service,
counters: self.clone(),
})
}
}
pub struct MonitoringMiddleware<S> {
service: S,
counters: HealthCounters,
}
impl<S, B> Service for MonitoringMiddleware<S>
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
@ -112,7 +115,10 @@ impl<S, B> Service for MonitoringMiddleware<S>
fn call(&mut self, req: ServiceRequest) -> Self::Future {
// The request has started.
let pattern_or_path = req.match_pattern().unwrap_or("unknown".to_string());
let counter = self.counters.current_requests.with_label_values(&[&*pattern_or_path,req.method().as_str()]);
let counter = self
.counters
.current_requests
.with_label_values(&[&*pattern_or_path, req.method().as_str()]);
counter.inc();
let pod = self.counters.pod.clone();
let fut = self.service.call(req);
@ -120,8 +126,11 @@ impl<S, B> Service for MonitoringMiddleware<S>
let mut res: Self::Response = fut.await?;
// The request finished, remove a counter
counter.dec();
res.headers_mut().insert(HeaderName::from_static("x-server"), HeaderValue::from_str(&*pod.get_id()).unwrap());
res.headers_mut().insert(
HeaderName::from_static("x-server"),
HeaderValue::from_str(&*pod.get_id()).unwrap(),
);
Ok(res)
})
}
}
}

View File

@ -1,16 +1,14 @@
use sqlx::{PgPool};
use actix_web::web;
use sqlx::PgPool;
pub async fn test_database(postgres: web::Data<PgPool>) -> Result<(), sqlx::Error> {
let mut transaction = postgres.acquire().await?;
let result = sqlx::query(
"
SELECT 1
"
).execute(&mut transaction)
.await;
match result {
Ok(_) => Ok(()),
Err(e) => Err(e)
}
}
sqlx::query(
"
SELECT 1
",
)
.execute(&mut transaction)
.await
.map(|_| ())
}

View File

@ -1,28 +1,27 @@
use crate::file_hosting::S3Host;
use crate::health::scheduler::HealthCounters;
use crate::util::env::{parse_strings_from_var, parse_var};
use actix_cors::Cors;
use actix_ratelimit::errors::ARError;
use actix_ratelimit::{MemoryStore, MemoryStoreActor, RateLimiter};
use actix_web::{http, web, App, HttpServer};
use actix_web_prom::PrometheusMetricsBuilder;
use env_logger::Env;
use gumdrop::Options;
use log::{error, info, warn};
use rand::Rng;
use search::indexing::index_projects;
use search::indexing::IndexingSettings;
use std::sync::Arc;
use std::collections::HashMap;
use std::sync::atomic::Ordering;
use crate::health::pod::PodInfo;
use crate::health::scheduler::HealthCounters;
use actix_web_prom::{PrometheusMetricsBuilder};
use std::sync::Arc;
mod database;
mod file_hosting;
mod health;
mod models;
mod routes;
mod scheduler;
mod search;
mod health;
mod util;
mod validate;
@ -112,18 +111,16 @@ async fn main() -> std::io::Result<()> {
let storage_backend = dotenv::var("STORAGE_BACKEND").unwrap_or_else(|_| "local".to_string());
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> = if storage_backend == "backblaze"
{
Arc::new(
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> = match storage_backend.as_str() {
"backblaze" => Arc::new(
file_hosting::BackblazeHost::new(
&dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
&dotenv::var("BACKBLAZE_KEY").unwrap(),
&dotenv::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await,
)
} else if storage_backend == "s3" {
Arc::new(
),
"s3" => Arc::new(
S3Host::new(
&*dotenv::var("S3_BUCKET_NAME").unwrap(),
&*dotenv::var("S3_REGION").unwrap(),
@ -132,30 +129,24 @@ async fn main() -> std::io::Result<()> {
&*dotenv::var("S3_SECRET").unwrap(),
)
.unwrap(),
)
} else if storage_backend == "local" {
Arc::new(file_hosting::MockHost::new())
} else {
panic!("Invalid storage backend specified. Aborting startup!")
),
"local" => Arc::new(file_hosting::MockHost::new()),
_ => panic!("Invalid storage backend specified. Aborting startup!"),
};
let mut scheduler = scheduler::Scheduler::new();
// The interval in seconds at which the local database is indexed
// for searching. Defaults to 1 hour if unset.
let local_index_interval = std::time::Duration::from_secs(
dotenv::var("LOCAL_INDEX_INTERVAL")
.ok()
.map(|i| i.parse().unwrap())
.unwrap_or(3600),
);
let local_index_interval =
std::time::Duration::from_secs(parse_var("LOCAL_INDEX_INTERVAL").unwrap_or(3600));
let pool_ref = pool.clone();
let thread_search_config = search_config.clone();
let mut skip = skip_initial;
let pool_ref = pool.clone();
let search_config_ref = search_config.clone();
scheduler.run(local_index_interval, move || {
let pool_ref = pool_ref.clone();
let thread_search_config = thread_search_config.clone();
let search_config_ref = search_config_ref.clone();
let local_skip = skip;
if skip {
skip = false;
@ -166,7 +157,7 @@ async fn main() -> std::io::Result<()> {
}
info!("Indexing local database");
let settings = IndexingSettings { index_local: true };
let result = index_projects(pool_ref, settings, &thread_search_config).await;
let result = index_projects(pool_ref, settings, &search_config_ref).await;
if let Err(e) = result {
warn!("Local project indexing failed: {:?}", e);
}
@ -219,12 +210,12 @@ async fn main() -> std::io::Result<()> {
let indexing_queue = Arc::new(search::indexing::queue::CreationQueue::new());
let queue_ref = indexing_queue.clone();
let thread_search_config = search_config.clone();
let mut skip = skip_initial;
let queue_ref = indexing_queue.clone();
let search_config_ref = search_config.clone();
scheduler.run(std::time::Duration::from_secs(15 * 60), move || {
let queue = queue_ref.clone();
let thread_search_config = thread_search_config.clone();
let queue_ref = queue_ref.clone();
let search_config_ref = search_config_ref.clone();
let local_skip = skip;
if skip {
skip = false;
@ -234,7 +225,7 @@ async fn main() -> std::io::Result<()> {
return;
}
info!("Indexing created project queue");
let result = search::indexing::queue::index_queue(&*queue, &thread_search_config).await;
let result = queue_ref.index(&search_config_ref).await;
if let Err(e) = result {
warn!("Indexing created projects failed: {:?}", e);
}
@ -250,12 +241,6 @@ async fn main() -> std::io::Result<()> {
};
let store = MemoryStore::new();
// Generate pod id
let pod = PodInfo::new();
// Init prometheus cluster
let mut labels = HashMap::new();
labels.insert("pod".to_string(), pod.pod_name);
labels.insert("node".to_string(), pod.node_name);
// Get prometheus service
let mut prometheus = PrometheusMetricsBuilder::new("api")
@ -275,8 +260,8 @@ async fn main() -> std::io::Result<()> {
.wrap(health.clone())
.wrap(
Cors::default()
.allowed_methods(vec!["GET", "POST", "DELETE", "PATCH", "PUT"])
.allowed_headers(vec![http::header::AUTHORIZATION, http::header::ACCEPT])
.allowed_methods(["GET", "POST", "DELETE", "PATCH", "PUT"])
.allowed_headers([http::header::AUTHORIZATION, http::header::ACCEPT])
.allowed_header(http::header::CONTENT_TYPE)
.allow_any_origin()
.max_age(3600),
@ -288,12 +273,8 @@ async fn main() -> std::io::Result<()> {
RateLimiter::new(MemoryStoreActor::from(store.clone()).start())
.with_identifier(|req| {
let connection_info = req.connection_info();
let ip = String::from(
if dotenv::var("CLOUDFLARE_INTEGRATION")
.ok()
.map(|i| i.parse().unwrap())
.unwrap_or(false)
{
let ip =
String::from(if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
if let Some(header) = req.headers().get("CF-Connecting-IP") {
header.to_str().map_err(|_| ARError::IdentificationError)?
} else {
@ -305,13 +286,10 @@ async fn main() -> std::io::Result<()> {
connection_info
.remote_addr()
.ok_or(ARError::IdentificationError)?
},
);
});
let ignore_ips = dotenv::var("RATE_LIMIT_IGNORE_IPS")
.ok()
.and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
.unwrap_or_else(Vec::new);
let ignore_ips =
parse_strings_from_var("RATE_LIMIT_IGNORE_IPS").unwrap_or_default();
if ignore_ips.contains(&ip) {
// At an even distribution of numbers, this will allow at the most
@ -348,28 +326,19 @@ async fn main() -> std::io::Result<()> {
fn check_env_vars() -> bool {
let mut failed = false;
fn check_var<T: std::str::FromStr>(var: &str) -> bool {
if dotenv::var(var)
.ok()
.and_then(|s| s.parse::<T>().ok())
.is_none()
{
fn check_var<T: std::str::FromStr>(var: &'static str) -> bool {
let check = parse_var::<T>(var).is_none();
if check {
warn!(
"Variable `{}` missing in dotenv or not of type `{}`",
var,
std::any::type_name::<T>()
);
true
} else {
false
}
check
}
if dotenv::var("RATE_LIMIT_IGNORE_IPS")
.ok()
.and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
.is_none()
{
if parse_strings_from_var("RATE_LIMIT_IGNORE_IPS").is_none() {
warn!("Variable `RATE_LIMIT_IGNORE_IPS` missing in dotenv or not a json array of strings");
failed |= true;
}
@ -384,24 +353,31 @@ fn check_env_vars() -> bool {
failed |= check_var::<String>("STORAGE_BACKEND");
let storage_backend = dotenv::var("STORAGE_BACKEND").ok();
if storage_backend.as_deref() == Some("backblaze") {
failed |= check_var::<String>("BACKBLAZE_KEY_ID");
failed |= check_var::<String>("BACKBLAZE_KEY");
failed |= check_var::<String>("BACKBLAZE_BUCKET_ID");
} else if storage_backend.as_deref() == Some("s3") {
failed |= check_var::<String>("S3_ACCESS_TOKEN");
failed |= check_var::<String>("S3_SECRET");
failed |= check_var::<String>("S3_URL");
failed |= check_var::<String>("S3_REGION");
failed |= check_var::<String>("S3_BUCKET_NAME");
} else if storage_backend.as_deref() == Some("local") {
failed |= check_var::<String>("MOCK_FILE_PATH");
} else if let Some(backend) = storage_backend {
warn!("Variable `STORAGE_BACKEND` contains an invalid value: {}. Expected \"backblaze\", \"s3\", or \"local\".", backend);
failed |= true;
match storage_backend.as_deref() {
Some("backblaze") => {
failed |= check_var::<String>("BACKBLAZE_KEY_ID");
failed |= check_var::<String>("BACKBLAZE_KEY");
failed |= check_var::<String>("BACKBLAZE_BUCKET_ID");
}
Some("s3") => {
failed |= check_var::<String>("S3_ACCESS_TOKEN");
failed |= check_var::<String>("S3_SECRET");
failed |= check_var::<String>("S3_URL");
failed |= check_var::<String>("S3_REGION");
failed |= check_var::<String>("S3_BUCKET_NAME");
}
Some("local") => {
failed |= check_var::<String>("MOCK_FILE_PATH");
}
Some(backend) => {
warn!("Variable `STORAGE_BACKEND` contains an invalid value: {}. Expected \"backblaze\", \"s3\", or \"local\".", backend);
failed |= true;
}
_ => {
warn!("Variable `STORAGE_BACKEND` is not set!");
failed |= true;
}
}
failed |= check_var::<usize>("LOCAL_INDEX_INTERVAL");
failed |= check_var::<usize>("VERSION_INDEX_INTERVAL");

View File

@ -16,13 +16,9 @@ pub use super::users::UserId;
/// This method panics if `n` is 0 or greater than 11, since a `u64`
/// can only represent up to 11 character base62 strings
#[allow(dead_code)]
#[inline]
pub fn random_base62(n: usize) -> u64 {
use rand::Rng;
assert!(n > 0 && n <= 11);
let mut rng = rand::thread_rng();
// gen_range is [low, high): max value is `MULTIPLES[n] - 1`,
// which is n characters long when encoded
rng.gen_range(MULTIPLES[n - 1], MULTIPLES[n])
random_base62_rng(&mut rand::thread_rng(), n)
}
/// Generates a random 64 bit integer that is exactly `n` characters
@ -35,6 +31,8 @@ pub fn random_base62(n: usize) -> u64 {
pub fn random_base62_rng<R: rand::RngCore>(rng: &mut R, n: usize) -> u64 {
use rand::Rng;
assert!(n > 0 && n <= 11);
// gen_range is [low, high): max value is `MULTIPLES[n] - 1`,
// which is n characters long when encoded
rng.gen_range(MULTIPLES[n - 1], MULTIPLES[n])
}
@ -50,7 +48,7 @@ const MULTIPLES: [u64; 12] = [
62 * 62 * 62 * 62 * 62 * 62 * 62 * 62,
62 * 62 * 62 * 62 * 62 * 62 * 62 * 62 * 62,
62 * 62 * 62 * 62 * 62 * 62 * 62 * 62 * 62 * 62,
std::u64::MAX,
u64::MAX,
];
/// An ID encoded as base62 for use in the API.
@ -63,7 +61,7 @@ pub struct Base62Id(pub u64);
/// An error decoding a number from base62.
#[derive(Error, Debug)]
pub enum DecodingError {
/// Encountered a non base62 character in base62 string
/// Encountered a non-base62 character in a base62 string
#[error("Invalid character {0:?} in base62 encoding")]
InvalidBase62(char),
/// Encountered integer overflow when decoding a base62 id.
@ -154,13 +152,8 @@ pub mod base62_impl {
}
}
const BASE62_CHARS: [u8; 62] = [
b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'A', b'B', b'C', b'D', b'E',
b'F', b'G', b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O', b'P', b'Q', b'R', b'S', b'T',
b'U', b'V', b'W', b'X', b'Y', b'Z', b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h', b'i',
b'j', b'k', b'l', b'm', b'n', b'o', b'p', b'q', b'r', b's', b't', b'u', b'v', b'w', b'x',
b'y', b'z',
];
const BASE62_CHARS: [u8; 62] =
*b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
pub fn to_base62(mut num: u64) -> String {
let length = (num as f64).log(62.0).ceil() as usize;
@ -189,7 +182,7 @@ pub mod base62_impl {
return Err(DecodingError::InvalidBase62(c));
}
// We don't want this panicing or wrapping on integer overflow
// We don't want this panicking or wrapping on integer overflow
if let Some(n) = num.checked_mul(62).and_then(|n| n.checked_add(next_digit)) {
num = n;
} else {

View File

@ -4,4 +4,4 @@ pub mod notifications;
pub mod projects;
pub mod reports;
pub mod teams;
pub mod users;
pub mod users;

View File

@ -22,9 +22,37 @@ pub struct Notification {
pub actions: Vec<NotificationAction>,
}
use crate::database::models::notification_item::Notification as DBNotification;
use crate::database::models::notification_item::NotificationAction as DBNotificationAction;
impl From<DBNotification> for Notification {
fn from(notif: DBNotification) -> Self {
Self {
id: notif.id.into(),
user_id: notif.user_id.into(),
type_: notif.notification_type,
title: notif.title,
text: notif.text,
link: notif.link,
read: notif.read,
created: notif.created,
actions: notif.actions.into_iter().map(Into::into).collect(),
}
}
}
#[derive(Serialize, Deserialize)]
pub struct NotificationAction {
pub title: String,
/// The route to call when this notification action is called. Formatted HTTP Method, route
pub action_route: (String, String),
}
impl From<DBNotificationAction> for NotificationAction {
fn from(act: DBNotificationAction) -> Self {
Self {
title: act.title,
action_route: (act.action_route_method, act.action_route),
}
}
}

View File

@ -1,6 +1,8 @@
use super::ids::Base62Id;
use super::teams::TeamId;
use super::users::UserId;
use crate::database::models::project_item::QueryProject;
use crate::database::models::version_item::QueryVersion;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use validator::Validate;
@ -80,6 +82,70 @@ pub struct Project {
pub gallery: Vec<GalleryItem>,
}
impl From<QueryProject> for Project {
fn from(data: QueryProject) -> Self {
let m = data.inner;
Self {
id: m.id.into(),
slug: m.slug,
project_type: data.project_type,
team: m.team_id.into(),
title: m.title,
description: m.description,
body: m.body,
body_url: m.body_url,
published: m.published,
updated: m.updated,
status: data.status,
moderator_message: if let Some(message) = m.moderation_message {
Some(ModeratorMessage {
message,
body: m.moderation_message_body,
})
} else {
None
},
license: License {
id: data.license_id,
name: data.license_name,
url: m.license_url,
},
client_side: data.client_side,
server_side: data.server_side,
downloads: m.downloads as u32,
followers: m.follows as u32,
categories: data.categories,
versions: data.versions.into_iter().map(|v| v.into()).collect(),
icon_url: m.icon_url,
issues_url: m.issues_url,
source_url: m.source_url,
wiki_url: m.wiki_url,
discord_url: m.discord_url,
donation_urls: Some(
data.donation_urls
.into_iter()
.map(|d| DonationLink {
id: d.platform_short,
platform: d.platform_name,
url: d.url,
})
.collect(),
),
gallery: data
.gallery_items
.into_iter()
.map(|x| GalleryItem {
url: x.image_url,
featured: x.featured,
title: x.title,
description: x.description,
created: x.created,
})
.collect(),
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct GalleryItem {
pub url: String,
@ -146,7 +212,7 @@ pub struct DonationLink {
pub url: String,
}
/// A status decides the visbility of a project in search, URLs, and the whole site itself.
/// A status decides the visibility of a project in search, URLs, and the whole site itself.
/// Approved - Project is displayed on search, and accessible by URL
/// Rejected - Project is not displayed on search, and not accessible by URL (Temporary state, project can reapply)
/// Draft - Project is not displayed on search, and not accessible by URL
@ -248,6 +314,61 @@ pub struct Version {
pub loaders: Vec<Loader>,
}
impl From<QueryVersion> for Version {
fn from(data: QueryVersion) -> Version {
Version {
id: data.id.into(),
project_id: data.project_id.into(),
author_id: data.author_id.into(),
featured: data.featured,
name: data.name,
version_number: data.version_number,
changelog: data.changelog,
changelog_url: data.changelog_url,
date_published: data.date_published,
downloads: data.downloads as u32,
version_type: match data.version_type.as_str() {
"release" => VersionType::Release,
"beta" => VersionType::Beta,
"alpha" => VersionType::Alpha,
_ => VersionType::Release,
},
files: data
.files
.into_iter()
.map(|f| {
VersionFile {
url: f.url,
filename: f.filename,
// FIXME: Hashes are currently stored as an ascii byte slice instead
// of as an actual byte array in the database
hashes: f
.hashes
.into_iter()
.map(|(k, v)| Some((k, String::from_utf8(v).ok()?)))
.collect::<Option<_>>()
.unwrap_or_default(),
primary: f.primary,
}
})
.collect(),
dependencies: data
.dependencies
.into_iter()
.map(|d| Dependency {
version_id: d.version_id.map(|i| VersionId(i.0 as u64)),
project_id: d.project_id.map(|i| ProjectId(i.0 as u64)),
dependency_type: DependencyType::from_str(d.dependency_type.as_str()),
})
.collect(),
game_versions: data.game_versions.into_iter().map(GameVersion).collect(),
loaders: data.loaders.into_iter().map(Loader).collect(),
}
}
}
/// A single project file, with a url for the file and the file's hash
#[derive(Serialize, Deserialize)]
pub struct VersionFile {
@ -284,11 +405,7 @@ pub enum VersionType {
impl std::fmt::Display for VersionType {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
VersionType::Release => write!(fmt, "release"),
VersionType::Beta => write!(fmt, "beta"),
VersionType::Alpha => write!(fmt, "alpha"),
}
fmt.write_str(self.as_str())
}
}
@ -313,11 +430,7 @@ pub enum DependencyType {
impl std::fmt::Display for DependencyType {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
DependencyType::Required => write!(fmt, "required"),
DependencyType::Optional => write!(fmt, "optional"),
DependencyType::Incompatible => write!(fmt, "incompatible"),
}
fmt.write_str(self.as_str())
}
}

View File

@ -1,4 +1,5 @@
use super::ids::Base62Id;
use crate::database::models::team_item::QueryTeamMember;
use crate::models::users::User;
use serde::{Deserialize, Serialize};
@ -57,3 +58,19 @@ pub struct TeamMember {
/// Whether the user has joined the team or is just invited to it
pub accepted: bool,
}
impl TeamMember {
pub fn from(data: QueryTeamMember, override_permissions: bool) -> Self {
Self {
team_id: data.team_id.into(),
user: data.user.into(),
role: data.role,
permissions: if override_permissions {
None
} else {
Some(data.permissions)
},
accepted: data.accepted,
}
}
}

View File

@ -21,6 +21,23 @@ pub struct User {
pub role: Role,
}
use crate::database::models::user_item::User as DBUser;
impl From<DBUser> for User {
fn from(data: DBUser) -> Self {
Self {
id: data.id.into(),
github_id: data.github_id.map(|i| i as u64),
username: data.username,
name: data.name,
email: None,
avatar_url: data.avatar_url,
bio: data.bio,
created: data.created,
role: Role::from_string(&*data.role),
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Eq, Clone)]
#[serde(rename_all = "lowercase")]
pub enum Role {
@ -31,11 +48,7 @@ pub enum Role {
impl std::fmt::Display for Role {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Role::Developer => write!(fmt, "developer"),
Role::Moderator => write!(fmt, "moderator"),
Role::Admin => write!(fmt, "admin"),
}
fmt.write_str(self.as_str())
}
}
@ -48,6 +61,14 @@ impl Role {
}
}
pub fn as_str(&self) -> &'static str {
match self {
Role::Developer => "developer",
Role::Moderator => "moderator",
Role::Admin => "admin",
}
}
pub fn is_mod(&self) -> bool {
match self {
Role::Developer => false,

View File

@ -1,9 +1,9 @@
use crate::health::status::test_database;
use crate::health::SEARCH_READY;
use actix_web::web::Data;
use actix_web::{get, HttpResponse};
use serde_json::json;
use crate::health::status::test_database;
use actix_web::web::Data;
use sqlx::PgPool;
use crate::health::SEARCH_READY;
use std::sync::atomic::Ordering;
#[get("/health")]
@ -15,17 +15,17 @@ pub async fn health_get(client: Data<PgPool>) -> HttpResponse {
"ready": false,
"reason": "Database connection error"
});
return HttpResponse::InternalServerError().json(data)
return HttpResponse::InternalServerError().json(data);
}
if !SEARCH_READY.load(Ordering::Acquire) {
let data = json!({
"ready": false,
"reason": "Indexing is not finished"
});
return HttpResponse::InternalServerError().json(data)
return HttpResponse::InternalServerError().json(data);
}
HttpResponse::Ok().json(json!({
"ready": true,
"reason": "Everything is OK"
}))
}
}

View File

@ -1,15 +1,14 @@
use actix_web::web;
mod v1;
pub use v1::v1_config;
mod auth;
mod health;
mod index;
mod maven;
mod moderation;
mod not_found;
mod notifications;
mod project_creation;
pub(crate) mod project_creation;
mod projects;
mod reports;
mod tags;
@ -18,15 +17,15 @@ mod users;
mod version_creation;
mod version_file;
mod versions;
mod health;
pub use auth::config as auth_config;
pub use tags::config as tags_config;
pub use self::index::index_get;
pub use self::health::health_get;
pub use self::index::index_get;
pub use self::not_found::not_found;
use crate::file_hosting::FileHostingError;
use actix_web::web;
pub fn v2_config(cfg: &mut web::ServiceConfig) {
cfg.service(

View File

@ -1,6 +1,6 @@
use super::ApiError;
use crate::database;
use crate::models::projects::{Project, ProjectStatus};
use crate::models::projects::ProjectStatus;
use crate::util::auth::check_is_moderator_from_headers;
use actix_web::{get, web, HttpRequest, HttpResponse};
use serde::Deserialize;
@ -43,10 +43,10 @@ pub async fn get_projects(
.try_collect::<Vec<database::models::ProjectId>>()
.await?;
let projects: Vec<Project> = database::Project::get_many_full(project_ids, &**pool)
let projects: Vec<_> = database::Project::get_many_full(project_ids, &**pool)
.await?
.into_iter()
.map(super::projects::convert_project)
.map(crate::models::projects::Project::from)
.collect();
Ok(HttpResponse::Ok().json(projects))

View File

@ -1,6 +1,6 @@
use crate::database;
use crate::models::ids::NotificationId;
use crate::models::notifications::{Notification, NotificationAction};
use crate::models::notifications::Notification;
use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
@ -20,22 +20,25 @@ pub async fn notifications_get(
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await?;
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&*ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
// TODO: this is really confusingly named.
use database::models::notification_item::Notification as DBNotification;
use database::models::NotificationId as DBNotificationId;
let notifications_data =
let notification_ids: Vec<DBNotificationId> =
serde_json::from_str::<Vec<NotificationId>>(ids.ids.as_str())?
.into_iter()
.map(DBNotificationId::from)
.collect();
let notifications_data: Vec<DBNotification> =
database::models::notification_item::Notification::get_many(notification_ids, &**pool)
.await?;
let mut notifications: Vec<Notification> = Vec::new();
for notification in notifications_data {
if notification.user_id == user.id.into() || user.role.is_mod() {
notifications.push(convert_notification(notification));
}
}
let notifications: Vec<Notification> = notifications_data
.into_iter()
.filter(|n| n.user_id == user.id.into() || user.role.is_mod())
.map(Notification::from)
.collect();
Ok(HttpResponse::Ok().json(notifications))
}
@ -55,7 +58,7 @@ pub async fn notification_get(
if let Some(data) = notification_data {
if user.id == data.user_id.into() || user.role.is_mod() {
Ok(HttpResponse::Ok().json(convert_notification(data)))
Ok(HttpResponse::Ok().json(Notification::from(data)))
} else {
Ok(HttpResponse::NotFound().body(""))
}
@ -64,29 +67,6 @@ pub async fn notification_get(
}
}
pub fn convert_notification(
notif: database::models::notification_item::Notification,
) -> Notification {
Notification {
id: notif.id.into(),
user_id: notif.user_id.into(),
type_: notif.notification_type,
title: notif.title,
text: notif.text,
link: notif.link,
read: notif.read,
created: notif.created,
actions: notif
.actions
.into_iter()
.map(|x| NotificationAction {
title: x.title,
action_route: (x.action_route_method, x.action_route),
})
.collect(),
}
}
#[delete("{id}")]
pub async fn notification_delete(
req: HttpRequest,

View File

@ -8,6 +8,7 @@ use crate::models::users::UserId;
use crate::routes::version_creation::InitialVersionData;
use crate::search::indexing::IndexingError;
use crate::util::auth::{get_user_from_headers, AuthenticationError};
use crate::util::routes::read_from_field;
use crate::util::validate::validation_errors_to_string;
use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode;
@ -255,7 +256,6 @@ pub async fn project_create(
result
}
/*
Project Creation Steps:
@ -449,18 +449,12 @@ pub async fn project_create_inner(
}
if let Some(item) = gallery_items.iter().find(|x| x.item == name) {
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
const FILE_SIZE_CAP: usize = 5 * (1 << 20);
if data.len() >= FILE_SIZE_CAP {
return Err(CreateError::InvalidInput(String::from(
"Gallery image exceeds the maximum of 5MiB.",
)));
} else {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
}
let data = read_from_field(
&mut field,
5 * (1 << 20),
"Gallery image exceeds the maximum of 5MiB.",
)
.await?;
let hash = sha1::Sha1::from(&data).hexdigest();
let (_, file_extension) =
@ -470,7 +464,7 @@ pub async fn project_create_inner(
let url = format!("data/{}/images/{}.{}", project_id, hash, file_extension);
let upload_data = file_host
.upload_file(content_type, &url, data.to_vec())
.upload_file(content_type, &url, data.freeze())
.await?;
uploaded_files.push(UploadedFile {
@ -804,22 +798,13 @@ async fn process_icon_upload(
cdn_url: &str,
) -> Result<String, CreateError> {
if let Some(content_type) = crate::util::ext::get_image_content_type(file_extension) {
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
if data.len() >= 262144 {
return Err(CreateError::InvalidInput(String::from(
"Icons must be smaller than 256KiB",
)));
} else {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
}
let data = read_from_field(&mut field, 262144, "Icons must be smaller than 256KiB").await?;
let upload_data = file_host
.upload_file(
content_type,
&format!("data/{}/icon.{}", project_id, file_extension),
data,
data.freeze(),
)
.await?;

View File

@ -2,14 +2,14 @@ use crate::database;
use crate::file_hosting::FileHost;
use crate::models;
use crate::models::projects::{
DonationLink, GalleryItem, License, ModeratorMessage, ProjectId, ProjectStatus, SearchRequest,
SideType,
DonationLink, Project, ProjectId, ProjectStatus, SearchRequest, SideType,
};
use crate::models::teams::Permissions;
use crate::routes::ApiError;
use crate::search::indexing::queue::CreationQueue;
use crate::search::{search_for_project, SearchConfig, SearchError};
use crate::util::auth::get_user_from_headers;
use crate::util::auth::{get_user_from_headers, is_authorized};
use crate::util::routes::read_from_payload;
use crate::util::validate::validation_errors_to_string;
use actix_web::web::Data;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
@ -48,36 +48,16 @@ pub async fn projects_get(
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
let mut projects = Vec::new();
for project_data in projects_data {
let mut authorized = !project_data.status.is_hidden();
if let Some(user) = &user_option {
if !authorized {
if user.role.is_mod() {
authorized = true;
} else {
let user_id: database::models::ids::UserId = user.id.into();
let project_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM team_members WHERE team_id = $1 AND user_id = $2)",
project_data.inner.team_id as database::models::ids::TeamId,
user_id as database::models::ids::UserId,
)
.fetch_one(&**pool)
.await?
.exists;
authorized = project_exists.unwrap_or(false);
}
let projects: Vec<_> = futures::stream::iter(projects_data)
.filter_map(|data| async {
if is_authorized(&data, &user_option, &pool).await.ok()? {
Some(Project::from(data))
} else {
None
}
}
if authorized {
projects.push(convert_project(project_data));
}
}
})
.collect()
.await;
Ok(HttpResponse::Ok().json(projects))
}
@ -97,37 +77,11 @@ pub async fn project_get(
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
if let Some(data) = project_data {
let mut authorized = !data.status.is_hidden();
if let Some(user) = user_option {
if !authorized {
if user.role.is_mod() {
authorized = true;
} else {
let user_id: database::models::ids::UserId = user.id.into();
let project_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM team_members WHERE team_id = $1 AND user_id = $2)",
data.inner.team_id as database::models::ids::TeamId,
user_id as database::models::ids::UserId,
)
.fetch_one(&**pool)
.await?
.exists;
authorized = project_exists.unwrap_or(false);
}
}
if is_authorized(&data, &user_option, &pool).await? {
return Ok(HttpResponse::Ok().json(Project::from(data)));
}
if authorized {
return Ok(HttpResponse::Ok().json(convert_project(data)));
}
Ok(HttpResponse::NotFound().body(""))
} else {
Ok(HttpResponse::NotFound().body(""))
}
Ok(HttpResponse::NotFound().body(""))
}
#[derive(Serialize)]
@ -189,12 +143,12 @@ pub async fn dependency_list(
let projects = projects_result?
.into_iter()
.map(convert_project)
.collect::<Vec<models::projects::Project>>();
.map(models::projects::Project::from)
.collect::<Vec<_>>();
let versions = versions_result?
.into_iter()
.map(super::versions::convert_version)
.collect::<Vec<models::projects::Version>>();
.map(models::projects::Version::from)
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(DependencyInfo { projects, versions }))
} else {
@ -202,71 +156,6 @@ pub async fn dependency_list(
}
}
pub fn convert_project(
data: database::models::project_item::QueryProject,
) -> models::projects::Project {
let m = data.inner;
models::projects::Project {
id: m.id.into(),
slug: m.slug,
project_type: data.project_type,
team: m.team_id.into(),
title: m.title,
description: m.description,
body: m.body,
body_url: m.body_url,
published: m.published,
updated: m.updated,
status: data.status,
moderator_message: if let Some(message) = m.moderation_message {
Some(ModeratorMessage {
message,
body: m.moderation_message_body,
})
} else {
None
},
license: License {
id: data.license_id,
name: data.license_name,
url: m.license_url,
},
client_side: data.client_side,
server_side: data.server_side,
downloads: m.downloads as u32,
followers: m.follows as u32,
categories: data.categories,
versions: data.versions.into_iter().map(|v| v.into()).collect(),
icon_url: m.icon_url,
issues_url: m.issues_url,
source_url: m.source_url,
wiki_url: m.wiki_url,
discord_url: m.discord_url,
donation_urls: Some(
data.donation_urls
.into_iter()
.map(|d| DonationLink {
id: d.platform_short,
platform: d.platform_name,
url: d.url,
})
.collect(),
),
gallery: data
.gallery_items
.into_iter()
.map(|x| GalleryItem {
url: x.image_url,
featured: x.featured,
title: x.title,
description: x.description,
created: x.created,
})
.collect(),
}
}
/// A project returned from the API
#[derive(Serialize, Deserialize, Validate)]
pub struct EditProject {
@ -476,7 +365,7 @@ pub async fn project_edit(
if let Ok(webhook_url) = dotenv::var("MODERATION_DISCORD_WEBHOOK") {
crate::util::webhook::send_discord_webhook(
convert_project(project_item.clone()),
Project::from(project_item.clone()),
webhook_url,
)
.await
@ -959,30 +848,15 @@ pub async fn project_icon_edit(
}
}
let mut bytes = web::BytesMut::new();
while let Some(item) = payload.next().await {
if bytes.len() >= 262144 {
return Err(ApiError::InvalidInputError(String::from(
"Icons must be smaller than 256KiB",
)));
} else {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInputError(
"Unable to parse bytes in payload sent!".to_string(),
)
})?);
}
}
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let hash = sha1::Sha1::from(&bytes).hexdigest();
let project_id: ProjectId = project_item.id.into();
let upload_data = file_host
.upload_file(
content_type,
&format!("data/{}/{}.{}", project_id, hash, ext.ext),
bytes.to_vec(),
bytes.freeze(),
)
.await?;
@ -1126,29 +1000,18 @@ pub async fn add_gallery_item(
}
}
let mut bytes = web::BytesMut::new();
while let Some(item) = payload.next().await {
const FILE_SIZE_CAP: usize = 5 * (1 << 20);
if bytes.len() >= FILE_SIZE_CAP {
return Err(ApiError::InvalidInputError(String::from(
"Gallery image exceeds the maximum of 5MiB.",
)));
} else {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInputError(
"Unable to parse bytes in payload sent!".to_string(),
)
})?);
}
}
let bytes = read_from_payload(
&mut payload,
5 * (1 << 20),
"Gallery image exceeds the maximum of 5MiB.",
)
.await?;
let hash = sha1::Sha1::from(&bytes).hexdigest();
let id: ProjectId = project_item.id.into();
let url = format!("data/{}/images/{}.{}", id, hash, &*ext.ext);
file_host
.upload_file(content_type, &url, bytes.to_vec())
.upload_file(content_type, &url, bytes.freeze())
.await?;
let mut transaction = pool.begin().await?;

View File

@ -1,5 +1,4 @@
use crate::database::models::notification_item::{NotificationActionBuilder, NotificationBuilder};
use crate::database::models::team_item::QueryTeamMember;
use crate::database::models::TeamMember;
use crate::models::ids::ProjectId;
use crate::models::teams::{Permissions, TeamId};
@ -32,19 +31,19 @@ pub async fn team_members_get_project(
.map_err(ApiError::DatabaseError)?;
if team_member.is_some() {
let team_members: Vec<crate::models::teams::TeamMember> = members_data
let team_members: Vec<_> = members_data
.into_iter()
.map(|data| convert_team_member(data, false))
.map(|data| crate::models::teams::TeamMember::from(data, false))
.collect();
return Ok(HttpResponse::Ok().json(team_members));
}
}
let team_members: Vec<crate::models::teams::TeamMember> = members_data
let team_members: Vec<_> = members_data
.into_iter()
.filter(|x| x.accepted)
.map(|data| convert_team_member(data, true))
.map(|data| crate::models::teams::TeamMember::from(data, true))
.collect();
Ok(HttpResponse::Ok().json(team_members))
@ -53,23 +52,6 @@ pub async fn team_members_get_project(
}
}
pub fn convert_team_member(
data: QueryTeamMember,
override_permissions: bool,
) -> crate::models::teams::TeamMember {
crate::models::teams::TeamMember {
team_id: data.team_id.into(),
user: super::users::convert_user(data.user),
role: data.role,
permissions: if override_permissions {
None
} else {
Some(data.permissions)
},
accepted: data.accepted,
}
}
#[get("{id}/members")]
pub async fn team_members_get(
req: HttpRequest,
@ -87,19 +69,19 @@ pub async fn team_members_get(
.map_err(ApiError::DatabaseError)?;
if team_member.is_some() {
let team_members: Vec<crate::models::teams::TeamMember> = members_data
let team_members: Vec<_> = members_data
.into_iter()
.map(|data| convert_team_member(data, false))
.map(|data| crate::models::teams::TeamMember::from(data, false))
.collect();
return Ok(HttpResponse::Ok().json(team_members));
}
}
let team_members: Vec<crate::models::teams::TeamMember> = members_data
let team_members: Vec<_> = members_data
.into_iter()
.filter(|x| x.accepted)
.map(|data| convert_team_member(data, true))
.map(|data| crate::models::teams::TeamMember::from(data, true))
.collect();
Ok(HttpResponse::Ok().json(team_members))

View File

@ -3,12 +3,11 @@ use crate::file_hosting::FileHost;
use crate::models::notifications::Notification;
use crate::models::projects::{Project, ProjectStatus};
use crate::models::users::{Role, UserId};
use crate::routes::notifications::convert_notification;
use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use crate::util::routes::read_from_payload;
use crate::util::validate::validation_errors_to_string;
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
use futures::StreamExt;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
@ -42,7 +41,7 @@ pub async fn users_get(
let users_data = User::get_many(user_ids, &**pool).await?;
let users: Vec<crate::models::users::User> = users_data.into_iter().map(convert_user).collect();
let users: Vec<crate::models::users::User> = users_data.into_iter().map(From::from).collect();
Ok(HttpResponse::Ok().json(users))
}
@ -68,27 +67,13 @@ pub async fn user_get(
}
if let Some(data) = user_data {
let response = convert_user(data);
let response: crate::models::users::User = data.into();
Ok(HttpResponse::Ok().json(response))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
pub fn convert_user(data: crate::database::models::user_item::User) -> crate::models::users::User {
crate::models::users::User {
id: data.id.into(),
github_id: data.github_id.map(|i| i as u64),
username: data.username,
name: data.name,
email: None,
avatar_url: data.avatar_url,
bio: data.bio,
created: data.created,
role: Role::from_string(&*data.role),
}
}
#[get("{user_id}/projects")]
pub async fn projects_list(
req: HttpRequest,
@ -114,11 +99,11 @@ pub async fn projects_list(
User::get_projects(id, ProjectStatus::Approved.as_str(), &**pool).await?
};
let response = crate::database::Project::get_many_full(project_data, &**pool)
let response: Vec<_> = crate::database::Project::get_many_full(project_data, &**pool)
.await?
.into_iter()
.map(super::projects::convert_project)
.collect::<Vec<Project>>();
.map(Project::from)
.collect();
Ok(HttpResponse::Ok().json(response))
} else {
@ -337,26 +322,15 @@ pub async fn user_icon_edit(
}
}
let mut bytes = web::BytesMut::new();
while let Some(item) = payload.next().await {
if bytes.len() >= 262144 {
return Err(ApiError::InvalidInputError(String::from(
"Icons must be smaller than 256KiB",
)));
} else {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInputError(
"Unable to parse bytes in payload sent!".to_string(),
)
})?);
}
}
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB")
.await?;
let upload_data = file_host
.upload_file(
content_type,
&format!("user/{}/icon.{}", user_id, ext.ext),
bytes.to_vec(),
bytes.freeze(),
)
.await?;
@ -468,11 +442,11 @@ pub async fn user_follows(
.try_collect::<Vec<crate::database::models::ProjectId>>()
.await?;
let projects = crate::database::Project::get_many_full(project_ids, &**pool)
let projects: Vec<_> = crate::database::Project::get_many_full(project_ids, &**pool)
.await?
.into_iter()
.map(super::projects::convert_project)
.collect::<Vec<Project>>();
.map(Project::from)
.collect();
Ok(HttpResponse::Ok().json(projects))
} else {
@ -502,7 +476,7 @@ pub async fn user_notifications(
crate::database::models::notification_item::Notification::get_many_user(id, &**pool)
.await?
.into_iter()
.map(convert_notification)
.map(Into::into)
.collect();
notifications.sort_by(|a, b| b.created.cmp(&a.created));

View File

@ -34,10 +34,10 @@ pub async fn get_mods(
.try_collect::<Vec<database::models::ProjectId>>()
.await?;
let projects: Vec<Project> = database::Project::get_many_full(project_ids, &**pool)
let projects: Vec<_> = database::Project::get_many_full(project_ids, &**pool)
.await?
.into_iter()
.map(crate::routes::projects::convert_project)
.map(Project::from)
.collect();
Ok(HttpResponse::Ok().json(projects))

View File

@ -1,10 +1,10 @@
use crate::file_hosting::FileHost;
use crate::models::projects::SearchRequest;
use crate::routes::project_creation::{project_create_inner, undo_uploads, CreateError};
use crate::routes::projects::{convert_project, ProjectIds};
use crate::routes::projects::ProjectIds;
use crate::routes::ApiError;
use crate::search::{search_for_project, SearchConfig, SearchError};
use crate::util::auth::get_user_from_headers;
use crate::util::auth::{get_user_from_headers, is_authorized};
use crate::{database, models};
use actix_multipart::Multipart;
use actix_web::web;
@ -98,37 +98,14 @@ pub async fn mods_get(
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
let mut projects = Vec::new();
let mut projects = Vec::with_capacity(projects_data.len());
for project_data in projects_data {
let mut authorized = !project_data.status.is_hidden();
if let Some(user) = &user_option {
if !authorized {
if user.role.is_mod() {
authorized = true;
} else {
let user_id: database::models::ids::UserId = user.id.into();
let project_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM team_members WHERE team_id = $1 AND user_id = $2)",
project_data.inner.team_id as database::models::ids::TeamId,
user_id as database::models::ids::UserId,
)
.fetch_one(&**pool)
.await?
.exists;
authorized = project_exists.unwrap_or(false);
}
}
}
if authorized {
projects.push(convert_project(project_data));
// can't use `map` and `collect` here since `is_authorized` must be async
for proj in projects_data {
if is_authorized(&proj, &user_option, &pool).await? {
projects.push(crate::models::projects::Project::from(proj))
}
}
Ok(HttpResponse::Ok().json(projects))
}

View File

@ -2,7 +2,7 @@ use crate::file_hosting::FileHost;
use crate::models::ids::{ProjectId, UserId, VersionId};
use crate::models::projects::{Dependency, GameVersion, Loader, Version, VersionFile, VersionType};
use crate::models::teams::Permissions;
use crate::routes::versions::{convert_version, VersionIds, VersionListFilters};
use crate::routes::versions::{VersionIds, VersionListFilters};
use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use crate::{database, models, Pepper};
@ -91,7 +91,7 @@ pub async fn version_list(
.map(|featured| featured == version.featured)
.unwrap_or(true)
})
.map(convert_version)
.map(Version::from)
.map(convert_to_legacy)
.collect::<Vec<_>>();
@ -118,16 +118,14 @@ pub async fn version_list(
version.game_versions.contains(&filter.0.version)
&& version.loaders.contains(&filter.1.loader)
})
.map(|version| {
response.push(convert_to_legacy(convert_version(version.clone())))
})
.map(|version| response.push(convert_to_legacy(Version::from(version.clone()))))
.unwrap_or(());
});
if response.is_empty() {
versions
.into_iter()
.for_each(|version| response.push(convert_to_legacy(convert_version(version))));
.for_each(|version| response.push(convert_to_legacy(Version::from(version))));
}
}
@ -154,7 +152,7 @@ pub async fn versions_get(
let mut versions = Vec::new();
for version_data in versions_data {
versions.push(convert_to_legacy(convert_version(version_data)));
versions.push(convert_to_legacy(Version::from(version_data)));
}
Ok(HttpResponse::Ok().json(versions))
@ -169,7 +167,7 @@ pub async fn version_get(
let version_data = database::models::Version::get_full(id.into(), &**pool).await?;
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(convert_to_legacy(convert_version(data))))
Ok(HttpResponse::Ok().json(convert_to_legacy(Version::from(data))))
} else {
Ok(HttpResponse::NotFound().body(""))
}
@ -214,7 +212,7 @@ pub async fn get_version_from_hash(
.await?;
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(super::versions::convert_version(data)))
Ok(HttpResponse::Ok().json(crate::models::projects::Version::from(data)))
} else {
Ok(HttpResponse::NotFound().body(""))
}

View File

@ -8,6 +8,7 @@ use crate::models::projects::{
use crate::models::teams::Permissions;
use crate::routes::project_creation::{CreateError, UploadedFile};
use crate::util::auth::get_user_from_headers;
use crate::util::routes::read_from_field;
use crate::util::validate::validation_errors_to_string;
use crate::validate::{validate_file, ValidationResult};
use actix_multipart::{Field, Multipart};
@ -587,20 +588,10 @@ pub async fn upload_file(
let content_type = crate::util::ext::project_file_type(file_extension)
.ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?;
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
// Project file size limit of 100MiB
const FILE_SIZE_CAP: usize = 100 * (1 << 20);
if data.len() >= FILE_SIZE_CAP {
return Err(CreateError::InvalidInput(
String::from("Project file exceeds the maximum of 100MiB. Contact a moderator or admin to request permission to upload larger files.")
));
} else {
let bytes = chunk.map_err(CreateError::MultipartError)?;
data.append(&mut bytes.to_vec());
}
}
let data = read_from_field(
field, 100 * (1 << 20),
"Project file exceeds the maximum of 100MiB. Contact a moderator or admin to request permission to upload larger files."
).await?;
let hash = sha1::Sha1::from(&data).hexdigest();
let exists = sqlx::query!(
@ -623,7 +614,7 @@ pub async fn upload_file(
}
let validation_result = validate_file(
data.as_slice(),
&data,
file_extension,
project_type,
loaders,
@ -638,7 +629,7 @@ pub async fn upload_file(
"data/{}/versions/{}/{}",
project_id, version_number, file_name
),
data.to_vec(),
data.freeze(),
)
.await?;

View File

@ -1,9 +1,11 @@
use super::ApiError;
use crate::database::models::version_item::QueryVersion;
use crate::file_hosting::FileHost;
use crate::models;
use crate::models::projects::{GameVersion, Loader};
use crate::models::projects::{GameVersion, Loader, Version};
use crate::models::teams::Permissions;
use crate::util::auth::get_user_from_headers;
use crate::util::routes::ok_or_not_found;
use crate::{database, Pepper};
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
@ -51,7 +53,7 @@ pub async fn get_version_from_hash(
.await?;
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(super::versions::convert_version(data)))
Ok(HttpResponse::Ok().json(models::projects::Version::from(data)))
} else {
Ok(HttpResponse::NotFound().body(""))
}
@ -361,11 +363,7 @@ pub async fn get_update_from_hash(
if let Some(version_id) = version_ids.last() {
let version_data = database::models::Version::get_full(*version_id, &**pool).await?;
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(super::versions::convert_version(data)))
} else {
Ok(HttpResponse::NotFound().body(""))
}
ok_or_not_found::<QueryVersion, Version>(version_data)
} else {
Ok(HttpResponse::NotFound().body(""))
}
@ -414,14 +412,16 @@ pub async fn get_versions_from_hashes(
)
.await?;
let mut response = HashMap::new();
for row in result {
if let Some(version) = versions_data.iter().find(|x| x.id.0 == row.version_id) {
response.insert(row.hash, super::versions::convert_version(version.clone()));
}
}
let response: Vec<_> = result
.into_iter()
.filter_map(|row| {
versions_data
.clone()
.into_iter()
.find(|x| x.id.0 == row.version_id)
.map(|v| (row.hash, crate::models::projects::Version::from(v)))
})
.collect();
Ok(HttpResponse::Ok().json(response))
}
@ -542,7 +542,7 @@ pub async fn update_files(
if let Some(version) = versions.iter().find(|x| x.id.0 == row.version_id) {
response.insert(
row.hash.clone(),
super::versions::convert_version(version.clone()),
models::projects::Version::from(version.clone()),
);
}
}

View File

@ -1,7 +1,7 @@
use super::ApiError;
use crate::database;
use crate::models;
use crate::models::projects::{Dependency, DependencyType};
use crate::models::projects::{Dependency, Version};
use crate::models::teams::Permissions;
use crate::util::auth::get_user_from_headers;
use crate::util::validate::validation_errors_to_string;
@ -55,7 +55,7 @@ pub async fn version_list(
.map(|featured| featured == version.featured)
.unwrap_or(true)
})
.map(convert_version)
.map(Version::from)
.collect::<Vec<_>>();
versions.sort_by(|a, b| b.date_published.cmp(&a.date_published));
@ -83,14 +83,14 @@ pub async fn version_list(
version.game_versions.contains(&filter.0.version)
&& version.loaders.contains(&filter.1.loader)
})
.map(|version| response.push(convert_version(version.clone())))
.map(|version| response.push(Version::from(version.clone())))
.unwrap_or(());
});
if response.is_empty() {
versions
.into_iter()
.for_each(|version| response.push(convert_version(version)));
.for_each(|version| response.push(Version::from(version)));
}
}
@ -119,12 +119,10 @@ pub async fn versions_get(
.collect();
let versions_data = database::models::Version::get_many_full(version_ids, &**pool).await?;
let mut versions = Vec::new();
for version_data in versions_data {
versions.push(convert_version(version_data));
}
let versions = versions_data
.into_iter()
.map(Version::from)
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(versions))
}
@ -137,77 +135,12 @@ pub async fn version_get(
let version_data = database::models::Version::get_full(id.into(), &**pool).await?;
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(convert_version(data)))
Ok(HttpResponse::Ok().json(models::projects::Version::from(data)))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
pub fn convert_version(
data: database::models::version_item::QueryVersion,
) -> models::projects::Version {
use models::projects::VersionType;
models::projects::Version {
id: data.id.into(),
project_id: data.project_id.into(),
author_id: data.author_id.into(),
featured: data.featured,
name: data.name,
version_number: data.version_number,
changelog: data.changelog,
changelog_url: data.changelog_url,
date_published: data.date_published,
downloads: data.downloads as u32,
version_type: match data.version_type.as_str() {
"release" => VersionType::Release,
"beta" => VersionType::Beta,
"alpha" => VersionType::Alpha,
_ => VersionType::Release,
},
files: data
.files
.into_iter()
.map(|f| {
models::projects::VersionFile {
url: f.url,
filename: f.filename,
// FIXME: Hashes are currently stored as an ascii byte slice instead
// of as an actual byte array in the database
hashes: f
.hashes
.into_iter()
.map(|(k, v)| Some((k, String::from_utf8(v).ok()?)))
.collect::<Option<_>>()
.unwrap_or_else(Default::default),
primary: f.primary,
}
})
.collect(),
dependencies: data
.dependencies
.into_iter()
.map(|d| Dependency {
version_id: d.version_id.map(|x| x.into()),
project_id: d.project_id.map(|x| x.into()),
dependency_type: DependencyType::from_str(&*d.dependency_type),
})
.collect(),
game_versions: data
.game_versions
.into_iter()
.map(models::projects::GameVersion)
.collect(),
loaders: data
.loaders
.into_iter()
.map(models::projects::Loader)
.collect(),
}
}
#[derive(Serialize, Deserialize, Validate)]
pub struct EditVersion {
#[validate(length(min = 3, max = 256))]

View File

@ -36,12 +36,8 @@ pub fn schedule_versions(
pool: sqlx::Pool<sqlx::Postgres>,
skip_initial: bool,
) {
let version_index_interval = std::time::Duration::from_secs(
dotenv::var("VERSION_INDEX_INTERVAL")
.ok()
.map(|i| i.parse().unwrap())
.unwrap_or(1800),
);
let version_index_interval =
std::time::Duration::from_secs(parse_var("VERSION_INDEX_INTERVAL").unwrap_or(1800));
let mut skip = skip_initial;
scheduler.run(version_index_interval, move || {
@ -74,6 +70,7 @@ pub enum VersionIndexingError {
DatabaseError(#[from] crate::database::models::DatabaseError),
}
use crate::util::env::parse_var;
use serde::Deserialize;
#[derive(Deserialize)]

View File

@ -3,7 +3,6 @@ use log::info;
use super::IndexingError;
use crate::database::models::ProjectId;
use crate::models::projects::ProjectStatus;
use crate::search::UploadSearchProject;
use sqlx::postgres::PgPool;
@ -12,6 +11,8 @@ pub async fn index_local(pool: PgPool) -> Result<Vec<UploadSearchProject>, Index
info!("Indexing local projects!");
Ok(
sqlx::query!(
//FIXME: there must be a way to reduce the duplicate lines between this query and the one in `query_one` here...
//region query
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.published published,
@ -39,19 +40,22 @@ pub async fn index_local(pool: PgPool) -> Result<Vec<UploadSearchProject>, Index
WHERE s.status = $1
GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;
",
ProjectStatus::Approved.as_str(),
//endregion query
crate::models::projects::ProjectStatus::Approved.as_str(),
crate::models::teams::OWNER_ROLE,
)
.fetch_many(&pool)
.try_filter_map(|e| async {
Ok(e.right().map(|m| {
let mut categories = m.categories.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>()).unwrap_or_default();
categories.append(&mut m.loaders.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>()).unwrap_or_default());
let mut categories = split_to_strings(m.categories);
categories.append(&mut split_to_strings(m.loaders));
let versions = split_to_strings(m.versions);
let versions : Vec<String> = m.versions.map(|x| x.split(',').map(|x| x.to_string()).collect()).unwrap_or_default();
let project_id : crate::models::projects::ProjectId = ProjectId(m.id).into();
let project_id: crate::models::projects::ProjectId = ProjectId(m.id).into();
// TODO: Cleanup - This method has a lot of code in common with the method below.
// But, since the macro returns an (de facto) unnamed struct,
// We cannot reuse the code easily. Ugh.
UploadSearchProject {
project_id: format!("{}", project_id),
title: m.title,
@ -76,64 +80,53 @@ pub async fn index_local(pool: PgPool) -> Result<Vec<UploadSearchProject>, Index
}
}))
})
.try_collect::<Vec<UploadSearchProject>>()
.try_collect::<Vec<_>>()
.await?
)
}
pub async fn query_one(
id: ProjectId,
exec: &mut sqlx::PgConnection,
) -> Result<UploadSearchProject, IndexingError> {
let m = sqlx::query!(
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.published published,
m.updated updated,
m.team_id team_id, m.license license, m.slug slug,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,
STRING_AGG(DISTINCT mg.image_url, ',') gallery
FROM mods m
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
LEFT OUTER JOIN versions v ON v.mod_id = m.id
LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id
LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id
LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id
LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id
INNER JOIN statuses s ON s.id = m.status
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN side_types cs ON m.client_side = cs.id
INNER JOIN side_types ss ON m.server_side = ss.id
INNER JOIN licenses l ON m.license = l.id
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $2
INNER JOIN users u ON tm.user_id = u.id
WHERE m.id = $1
GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;
",
id as ProjectId,
crate::models::teams::OWNER_ROLE,
)
//region query
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.published published,
m.updated updated,
m.team_id team_id, m.license license, m.slug slug,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,
STRING_AGG(DISTINCT mg.image_url, ',') gallery
FROM mods m
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
LEFT OUTER JOIN versions v ON v.mod_id = m.id
LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id
LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id
LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id
LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id
INNER JOIN statuses s ON s.id = m.status
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN side_types cs ON m.client_side = cs.id
INNER JOIN side_types ss ON m.server_side = ss.id
INNER JOIN licenses l ON m.license = l.id
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $2
INNER JOIN users u ON tm.user_id = u.id
WHERE m.id = $1
GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;
",
//endregion query
id as ProjectId,
crate::models::teams::OWNER_ROLE
)
.fetch_one(exec)
.await?;
let mut categories = m
.categories
.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>())
.unwrap_or_default();
categories.append(
&mut m
.loaders
.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>())
.unwrap_or_default(),
);
let versions: Vec<String> = m
.versions
.map(|x| x.split(',').map(|x| x.to_string()).collect())
.unwrap_or_default();
let mut categories = split_to_strings(m.categories);
categories.append(&mut split_to_strings(m.loaders));
let versions = split_to_strings(m.versions);
let project_id: crate::models::projects::ProjectId = ProjectId(m.id).into();
@ -160,9 +153,11 @@ pub async fn query_one(
server_side: m.server_side_type,
slug: m.slug,
project_type: m.project_type_name,
gallery: m
.gallery
.map(|x| x.split(',').map(|x| x.to_string()).collect())
.unwrap_or_default(),
gallery: split_to_strings(m.gallery),
})
}
fn split_to_strings(s: Option<String>) -> Vec<String> {
s.map(|x| x.split(',').map(ToString::to_string).collect())
.unwrap_or_default()
}

View File

@ -16,7 +16,7 @@ pub enum IndexingError {
#[error("Error while connecting to the MeiliSearch database")]
IndexDBError(#[from] meilisearch_sdk::errors::Error),
#[error("Error while serializing or deserializing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
SerdeError(#[from] serde_json::Error),
#[error("Error while parsing a timestamp: {0}")]
ParseDateError(#[from] chrono::format::ParseError),
#[error("Database Error: {0}")]
@ -40,6 +40,7 @@ pub struct IndexingSettings {
impl IndexingSettings {
#[allow(dead_code)]
pub fn from_env() -> Self {
//FIXME: what?
let index_local = true;
Self { index_local }
@ -64,7 +65,7 @@ pub async fn index_projects(
}
pub async fn reset_indices(config: &SearchConfig) -> Result<(), IndexingError> {
let client = Client::new(&*config.address, &*config.key);
let client = config.make_client();
client.delete_index("relevance_projects").await?;
client.delete_index("downloads_projects").await?;
@ -74,48 +75,28 @@ pub async fn reset_indices(config: &SearchConfig) -> Result<(), IndexingError> {
Ok(())
}
async fn update_index_helper<'a>(
client: &'a Client<'a>,
name: &'static str,
rule: &'static str,
) -> Result<Index<'a>, IndexingError> {
update_index(&client, name, {
let mut rules = default_rules();
rules.push_back(rule);
rules.into()
})
.await
}
pub async fn reconfigure_indices(config: &SearchConfig) -> Result<(), IndexingError> {
let client = Client::new(&*config.address, &*config.key);
let client = config.make_client();
// Relevance Index
update_index(&client, "relevance_projects", {
let mut relevance_rules = default_rules();
relevance_rules.push_back("desc(downloads)".to_string());
relevance_rules.into()
})
.await?;
// Downloads Index
update_index(&client, "downloads_projects", {
let mut downloads_rules = default_rules();
downloads_rules.push_front("desc(downloads)".to_string());
downloads_rules.into()
})
.await?;
// Follows Index
update_index(&client, "follows_projects", {
let mut follows_rules = default_rules();
follows_rules.push_front("desc(follows)".to_string());
follows_rules.into()
})
.await?;
// Updated Index
update_index(&client, "updated_projects", {
let mut updated_rules = default_rules();
updated_rules.push_front("desc(modified_timestamp)".to_string());
updated_rules.into()
})
.await?;
// Created Index
update_index(&client, "newest_projects", {
let mut newest_rules = default_rules();
newest_rules.push_front("desc(created_timestamp)".to_string());
newest_rules.into()
})
.await?;
update_index_helper(&client, "relevance_projects", "desc(downloads)").await?;
update_index_helper(&client, "downloads_projects", "desc(downloads)").await?;
update_index_helper(&client, "follows_projects", "desc(follows)").await?;
update_index_helper(&client, "updated_projects", "desc(modified_timestamp)").await?;
update_index_helper(&client, "newest_projects", "desc(created_timestamp)").await?;
Ok(())
}
@ -123,7 +104,7 @@ pub async fn reconfigure_indices(config: &SearchConfig) -> Result<(), IndexingEr
async fn update_index<'a>(
client: &'a Client<'a>,
name: &'a str,
rules: Vec<String>,
rules: Vec<&'static str>,
) -> Result<Index<'a>, IndexingError> {
let index = match client.get_index(name).await {
Ok(index) => index,
@ -143,8 +124,8 @@ async fn update_index<'a>(
async fn create_index<'a>(
client: &'a Client<'a>,
name: &'a str,
rules: impl FnOnce() -> Vec<String>,
name: &'static str,
rules: impl FnOnce() -> Vec<&'static str>,
) -> Result<Index<'a>, IndexingError> {
match client.get_index(name).await {
// TODO: update index settings on startup (or delete old indices on startup)
@ -176,127 +157,109 @@ async fn add_to_index(index: Index<'_>, mods: &[UploadSearchProject]) -> Result<
Ok(())
}
async fn create_and_add_to_index<'a>(
client: &'a Client<'a>,
projects: &'a Vec<UploadSearchProject>,
name: &'static str,
rule: &'static str,
) -> Result<(), IndexingError> {
let index = create_index(&client, name, || {
let mut relevance_rules = default_rules();
relevance_rules.push_back(rule);
relevance_rules.into()
})
.await?;
add_to_index(index, projects).await?;
Ok(())
}
pub async fn add_projects(
projects: Vec<UploadSearchProject>,
config: &SearchConfig,
) -> Result<(), IndexingError> {
let client = Client::new(&*config.address, &*config.key);
let client = config.make_client();
// Relevance Index
let relevance_index = create_index(&client, "relevance_projects", || {
let mut relevance_rules = default_rules();
relevance_rules.push_back("desc(downloads)".to_string());
relevance_rules.into()
})
create_and_add_to_index(&client, &projects, "relevance_projects", "desc(downloads)").await?;
create_and_add_to_index(&client, &projects, "downloads_projects", "desc(downloads)").await?;
create_and_add_to_index(&client, &projects, "follows_projects", "desc(follows)").await?;
create_and_add_to_index(
&client,
&projects,
"updated_projects",
"desc(modified_timestamp)",
)
.await?;
add_to_index(relevance_index, &projects).await?;
// Downloads Index
let downloads_index = create_index(&client, "downloads_projects", || {
let mut downloads_rules = default_rules();
downloads_rules.push_front("desc(downloads)".to_string());
downloads_rules.into()
})
create_and_add_to_index(
&client,
&projects,
"newest_projects",
"desc(created_timestamp)",
)
.await?;
add_to_index(downloads_index, &projects).await?;
// Follows Index
let follows_index = create_index(&client, "follows_projects", || {
let mut follows_rules = default_rules();
follows_rules.push_front("desc(follows)".to_string());
follows_rules.into()
})
.await?;
add_to_index(follows_index, &projects).await?;
// Updated Index
let updated_index = create_index(&client, "updated_projects", || {
let mut updated_rules = default_rules();
updated_rules.push_front("desc(modified_timestamp)".to_string());
updated_rules.into()
})
.await?;
add_to_index(updated_index, &projects).await?;
// Created Index
let newest_index = create_index(&client, "newest_projects", || {
let mut newest_rules = default_rules();
newest_rules.push_front("desc(created_timestamp)".to_string());
newest_rules.into()
})
.await?;
add_to_index(newest_index, &projects).await?;
Ok(())
}
//region Utils
fn default_rules() -> VecDeque<String> {
fn default_rules() -> VecDeque<&'static str> {
vec![
"typo".to_string(),
"words".to_string(),
"proximity".to_string(),
"attribute".to_string(),
"wordsPosition".to_string(),
"exactness".to_string(),
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
]
.into()
}
fn default_settings() -> Settings {
let displayed_attributes = vec![
"project_id".to_string(),
"project_type".to_string(),
"slug".to_string(),
"author".to_string(),
"title".to_string(),
"description".to_string(),
"categories".to_string(),
"versions".to_string(),
"downloads".to_string(),
"follows".to_string(),
"icon_url".to_string(),
"date_created".to_string(),
"date_modified".to_string(),
"latest_version".to_string(),
"license".to_string(),
"client_side".to_string(),
"server_side".to_string(),
"gallery".to_string(),
];
let searchable_attributes = vec![
"title".to_string(),
"description".to_string(),
"categories".to_string(),
"versions".to_string(),
"author".to_string(),
];
let stop_words: Vec<String> = Vec::new();
let synonyms: HashMap<String, Vec<String>> = HashMap::new();
Settings::new()
.with_displayed_attributes(displayed_attributes)
.with_searchable_attributes(searchable_attributes)
.with_stop_words(stop_words)
.with_synonyms(synonyms)
.with_attributes_for_faceting(vec![
String::from("categories"),
String::from("host"),
String::from("versions"),
String::from("license"),
String::from("client_side"),
String::from("server_side"),
String::from("project_type"),
])
.with_displayed_attributes(DEFAULT_DISPLAYED_ATTRIBUTES)
.with_searchable_attributes(DEFAULT_SEARCHABLE_ATTRIBUTES)
.with_stop_words(Vec::<String>::new())
.with_synonyms(HashMap::<String, Vec<String>>::new())
.with_attributes_for_faceting(DEFAULT_ATTRIBUTES_FOR_FACETING)
}
const DEFAULT_DISPLAYED_ATTRIBUTES: &[&str] = &[
"project_id",
"project_type",
"slug",
"author",
"title",
"description",
"categories",
"versions",
"downloads",
"follows",
"icon_url",
"date_created",
"date_modified",
"latest_version",
"license",
"client_side",
"server_side",
"gallery",
];
const DEFAULT_SEARCHABLE_ATTRIBUTES: &[&str] =
&["title", "description", "categories", "versions", "author"];
const DEFAULT_ATTRIBUTES_FOR_FACETING: &[&str] = &[
"categories",
"host",
"versions",
"license",
"client_side",
"server_side",
"project_type",
];
//endregion
// This shouldn't be relied on for proper sorting, but it makes an
// attempt at getting proper sorting for mojang's versions.
// This isn't currenly used, but I wrote it and it works, so I'm
// attempt at getting proper sorting for Mojang's versions.
// This isn't currently used, but I wrote it and it works, so I'm
// keeping this mess in case someone needs it in the future.
#[allow(dead_code)]
pub fn sort_projects(a: &str, b: &str) -> std::cmp::Ordering {
@ -346,7 +309,7 @@ pub fn sort_projects(a: &str, b: &str) -> std::cmp::Ordering {
(false, false) => a.0.cmp(&b.0),
(true, false) => Ordering::Greater,
(false, true) => Ordering::Less,
(true, true) => Ordering::Equal, // unreachable
(true, true) => unreachable!(),
}
}
}

View File

@ -16,7 +16,6 @@ impl CreationQueue {
queue: Mutex::new(Vec::with_capacity(10)),
}
}
pub fn add(&self, search_project: UploadSearchProject) {
// Can only panic if mutex is poisoned
self.queue.lock().unwrap().push(search_project);
@ -24,12 +23,8 @@ impl CreationQueue {
pub fn take(&self) -> Vec<UploadSearchProject> {
std::mem::replace(&mut *self.queue.lock().unwrap(), Vec::with_capacity(10))
}
}
pub async fn index_queue(
queue: &CreationQueue,
config: &SearchConfig,
) -> Result<(), IndexingError> {
let queue = queue.take();
add_projects(queue, config).await
pub async fn index(&self, config: &SearchConfig) -> Result<(), IndexingError> {
let queue = self.take();
add_projects(queue, config).await
}
}

View File

@ -17,7 +17,7 @@ pub enum SearchError {
#[error("MeiliSearch Error: {0}")]
MeiliSearchError(#[from] meilisearch_sdk::errors::Error),
#[error("Error while serializing or deserializing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
SerdeError(#[from] serde_json::Error),
#[error("Error while parsing an integer: {0}")]
IntParsingError(#[from] std::num::ParseIntError),
#[error("Environment Error")]
@ -31,7 +31,7 @@ impl actix_web::ResponseError for SearchError {
match self {
SearchError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
SearchError::MeiliSearchError(..) => StatusCode::BAD_REQUEST,
SearchError::SerDeError(..) => StatusCode::BAD_REQUEST,
SearchError::SerdeError(..) => StatusCode::BAD_REQUEST,
SearchError::IntParsingError(..) => StatusCode::BAD_REQUEST,
SearchError::InvalidIndex(..) => StatusCode::BAD_REQUEST,
}
@ -42,7 +42,7 @@ impl actix_web::ResponseError for SearchError {
error: match self {
SearchError::EnvError(..) => "environment_error",
SearchError::MeiliSearchError(..) => "meilisearch_error",
SearchError::SerDeError(..) => "invalid_input",
SearchError::SerdeError(..) => "invalid_input",
SearchError::IntParsingError(..) => "invalid_input",
SearchError::InvalidIndex(..) => "invalid_input",
},
@ -57,7 +57,13 @@ pub struct SearchConfig {
pub key: String,
}
/// A project document used for uploading projects to meilisearch's indices.
impl SearchConfig {
pub fn make_client(&self) -> Client {
Client::new(self.address.as_str(), self.key.as_str())
}
}
/// A project document used for uploading projects to MeiliSearch's indices.
/// This contains some extra data that is not returned by search results.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UploadSearchProject {

View File

@ -1,7 +1,12 @@
use crate::database;
use crate::database::models;
use crate::database::models::project_item::QueryProject;
use crate::models::users::{Role, User, UserId};
use crate::routes::ApiError;
use actix_web::http::HeaderMap;
use actix_web::web;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use thiserror::Error;
#[derive(Error, Debug)]
@ -11,7 +16,7 @@ pub enum AuthenticationError {
#[error("Database Error: {0}")]
DatabaseError(#[from] crate::database::models::DatabaseError),
#[error("Error while parsing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
SerdeError(#[from] serde_json::Error),
#[error("Error while communicating to GitHub OAuth2: {0}")]
GithubError(#[from] reqwest::Error),
#[error("Invalid Authentication Credentials")]
@ -65,7 +70,7 @@ where
avatar_url: result.avatar_url,
bio: result.bio,
created: result.created,
role: Role::from_string(&*result.role),
role: Role::from_string(&result.role),
}),
None => Err(AuthenticationError::InvalidCredentialsError),
}
@ -116,3 +121,33 @@ where
_ => Err(AuthenticationError::InvalidCredentialsError),
}
}
pub async fn is_authorized(
project_data: &QueryProject,
user_option: &Option<User>,
pool: &web::Data<PgPool>,
) -> Result<bool, ApiError> {
let mut authorized = !project_data.status.is_hidden();
if let Some(user) = &user_option {
if !authorized {
if user.role.is_mod() {
authorized = true;
} else {
let user_id: database::models::ids::UserId = user.id.into();
let project_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM team_members WHERE team_id = $1 AND user_id = $2)",
project_data.inner.team_id as database::models::ids::TeamId,
user_id as database::models::ids::UserId,
)
.fetch_one(&***pool)
.await?
.exists;
authorized = project_exists.unwrap_or(false);
}
}
}
Ok(authorized)
}

10
src/util/env.rs Normal file
View File

@ -0,0 +1,10 @@
use std::str::FromStr;
pub fn parse_var<T: FromStr>(var: &'static str) -> Option<T> {
dotenv::var(var).ok().and_then(|i| i.parse().ok())
}
pub fn parse_strings_from_var(var: &'static str) -> Option<Vec<String>> {
dotenv::var(var)
.ok()
.and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
}

View File

@ -1,20 +1,14 @@
pub fn get_image_content_type(extension: &str) -> Option<&'static str> {
let content_type = match &*extension {
"bmp" => "image/bmp",
"gif" => "image/gif",
"jpeg" | "jpg" | "jpe" => "image/jpeg",
"png" => "image/png",
"svg" | "svgz" => "image/svg+xml",
"webp" => "image/webp",
"rgb" => "image/x-rgb",
"mp4" => "video/mp4",
_ => "",
};
if !content_type.is_empty() {
Some(content_type)
} else {
None
match extension {
"bmp" => Some("image/bmp"),
"gif" => Some("image/gif"),
"jpeg" | "jpg" | "jpe" => Some("image/jpeg"),
"png" => Some("image/png"),
"svg" | "svgz" => Some("image/svg+xml"),
"webp" => Some("image/webp"),
"rgb" => Some("image/x-rgb"),
"mp4" => Some("video/mp4"),
_ => None,
}
}

View File

@ -1,4 +1,6 @@
pub mod auth;
pub mod env;
pub mod ext;
pub mod routes;
pub mod validate;
pub mod webhook;

53
src/util/routes.rs Normal file
View File

@ -0,0 +1,53 @@
use crate::routes::project_creation::CreateError;
use crate::routes::ApiError;
use actix_multipart::Field;
use actix_web::web::Payload;
use actix_web::HttpResponse;
use bytes::BytesMut;
use futures::StreamExt;
use serde::Serialize;
pub async fn read_from_payload(
payload: &mut Payload,
cap: usize,
err_msg: &'static str,
) -> Result<BytesMut, ApiError> {
let mut bytes = BytesMut::new();
while let Some(item) = payload.next().await {
if bytes.len() >= cap {
return Err(ApiError::InvalidInputError(String::from(err_msg)));
} else {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInputError("Unable to parse bytes in payload sent!".to_string())
})?);
}
}
Ok(bytes)
}
pub async fn read_from_field(
field: &mut Field,
cap: usize,
err_msg: &'static str,
) -> Result<BytesMut, CreateError> {
let mut bytes = BytesMut::new();
while let Some(chunk) = field.next().await {
if bytes.len() >= cap {
return Err(CreateError::InvalidInput(String::from(err_msg)));
} else {
bytes.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
}
Ok(bytes)
}
pub(crate) fn ok_or_not_found<T, U>(version_data: Option<T>) -> Result<HttpResponse, ApiError>
where
U: From<T> + Serialize,
{
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(U::from(data)))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}

View File

@ -51,5 +51,5 @@ pub fn validation_errors_to_string(errors: ValidationErrors, adder: Option<Strin
}
}
"".to_string()
String::new()
}

View File

@ -15,7 +15,7 @@ struct DiscordEmbed {
#[derive(Serialize)]
struct DiscordEmbedField {
pub name: String,
pub name: &'static str,
pub value: String,
pub inline: bool,
}
@ -36,36 +36,36 @@ pub async fn send_discord_webhook(
) -> Result<(), reqwest::Error> {
let mut fields = vec![
DiscordEmbedField {
name: "id".to_string(),
name: "id",
value: project.id.to_string(),
inline: true,
},
DiscordEmbedField {
name: "project_type".to_string(),
value: project.project_type.to_string(),
name: "project_type",
value: project.project_type.clone(),
inline: true,
},
DiscordEmbedField {
name: "client_side".to_string(),
name: "client_side",
value: project.client_side.to_string(),
inline: true,
},
DiscordEmbedField {
name: "server_side".to_string(),
name: "server_side",
value: project.server_side.to_string(),
inline: true,
},
DiscordEmbedField {
name: "categories".to_string(),
name: "categories",
value: project.categories.join(", "),
inline: true,
},
];
if let Some(slug) = project.slug.clone() {
if let Some(ref slug) = project.slug {
fields.push(DiscordEmbedField {
name: "slug".to_string(),
value: slug,
name: "slug",
value: slug.clone(),
inline: true,
});
}
@ -82,7 +82,7 @@ pub async fn send_discord_webhook(
title: project.title,
description: project.description,
timestamp: project.published,
color: 6137157,
color: 0x5DA545,
fields,
image: DiscordEmbedImage {
url: project.icon_url,

View File

@ -3,24 +3,24 @@ use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use zip::ZipArchive;
pub struct FabricValidator {}
pub struct FabricValidator;
impl super::Validator for FabricValidator {
fn get_file_extensions<'a>(&self) -> &'a [&'a str] {
fn get_file_extensions(&self) -> &[&str] {
&["jar", "zip"]
}
fn get_project_types<'a>(&self) -> &'a [&'a str] {
fn get_project_types(&self) -> &[&str] {
&["mod"]
}
fn get_supported_loaders<'a>(&self) -> &'a [&'a str] {
fn get_supported_loaders(&self) -> &[&str] {
&["fabric"]
}
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 18w49a, the first fabric version
SupportedGameVersions::PastDate(DateTime::<Utc>::from_utc(
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1543969469, 0),
Utc,
))
@ -31,9 +31,7 @@ impl super::Validator for FabricValidator {
archive: &mut ZipArchive<Cursor<&[u8]>>,
) -> Result<ValidationResult, ValidationError> {
archive.by_name("fabric.mod.json").map_err(|_| {
ValidationError::InvalidInputError(
"No fabric.mod.json present for Fabric file.".to_string(),
)
ValidationError::InvalidInputError("No fabric.mod.json present for Fabric file.".into())
})?;
if !archive
@ -41,7 +39,7 @@ impl super::Validator for FabricValidator {
.any(|name| name.ends_with("refmap.json") || name.ends_with(".class"))
{
return Ok(ValidationResult::Warning(
"Fabric mod file is a source file!".to_string(),
"Fabric mod file is a source file!".into(),
));
}

View File

@ -3,18 +3,18 @@ use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use zip::ZipArchive;
pub struct ForgeValidator {}
pub struct ForgeValidator;
impl super::Validator for ForgeValidator {
fn get_file_extensions<'a>(&self) -> &'a [&'a str] {
fn get_file_extensions(&self) -> &[&str] {
&["jar", "zip"]
}
fn get_project_types<'a>(&self) -> &'a [&'a str] {
fn get_project_types(&self) -> &[&str] {
&["mod"]
}
fn get_supported_loaders<'a>(&self) -> &'a [&'a str] {
fn get_supported_loaders(&self) -> &[&str] {
&["forge"]
}
@ -31,12 +31,12 @@ impl super::Validator for ForgeValidator {
archive: &mut ZipArchive<Cursor<&[u8]>>,
) -> Result<ValidationResult, ValidationError> {
archive.by_name("META-INF/mods.toml").map_err(|_| {
ValidationError::InvalidInputError("No mods.toml present for Forge file.".to_string())
ValidationError::InvalidInputError("No mods.toml present for Forge file.".into())
})?;
if !archive.file_names().any(|name| name.ends_with(".class")) {
return Ok(ValidationResult::Warning(
"Forge mod file is a source file!".to_string(),
"Forge mod file is a source file!".into(),
));
}
@ -46,26 +46,26 @@ impl super::Validator for ForgeValidator {
}
}
pub struct LegacyForgeValidator {}
pub struct LegacyForgeValidator;
impl super::Validator for LegacyForgeValidator {
fn get_file_extensions<'a>(&self) -> &'a [&'a str] {
fn get_file_extensions(&self) -> &[&str] {
&["jar", "zip"]
}
fn get_project_types<'a>(&self) -> &'a [&'a str] {
fn get_project_types(&self) -> &[&str] {
&["mod"]
}
fn get_supported_loaders<'a>(&self) -> &'a [&'a str] {
fn get_supported_loaders(&self) -> &[&str] {
&["forge"]
}
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Times between versions 1.5.2 to 1.12.2, which all use the legacy way of defining mods
SupportedGameVersions::Range(
DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(1366818300, 0), Utc),
DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(1505810340, 0), Utc),
DateTime::from_utc(NaiveDateTime::from_timestamp(1366818300, 0), Utc),
DateTime::from_utc(NaiveDateTime::from_timestamp(1505810340, 0), Utc),
)
}
@ -74,12 +74,12 @@ impl super::Validator for LegacyForgeValidator {
archive: &mut ZipArchive<Cursor<&[u8]>>,
) -> Result<ValidationResult, ValidationError> {
archive.by_name("mcmod.info").map_err(|_| {
ValidationError::InvalidInputError("No mcmod.info present for Forge file.".to_string())
ValidationError::InvalidInputError("No mcmod.info present for Forge file.".into())
})?;
if !archive.file_names().any(|name| name.ends_with(".class")) {
return Ok(ValidationResult::Warning(
"Forge mod file is a source file!".to_string(),
"Forge mod file is a source file!".into(),
));
}

View File

@ -18,9 +18,9 @@ pub enum ValidationError {
#[error("IO Error: {0}")]
IoError(#[from] std::io::Error),
#[error("Error while validating JSON: {0}")]
SerDeError(#[from] serde_json::Error),
SerdeError(#[from] serde_json::Error),
#[error("Invalid Input: {0}")]
InvalidInputError(String),
InvalidInputError(std::borrow::Cow<'static, str>),
}
#[derive(Eq, PartialEq)]
@ -28,7 +28,7 @@ pub enum ValidationResult {
/// File should be marked as primary
Pass,
/// File should not be marked primary, the reason for which is inside the String
Warning(String),
Warning(&'static str),
}
pub enum SupportedGameVersions {
@ -39,9 +39,9 @@ pub enum SupportedGameVersions {
}
pub trait Validator: Sync {
fn get_file_extensions<'a>(&self) -> &'a [&'a str];
fn get_project_types<'a>(&self) -> &'a [&'a str];
fn get_supported_loaders<'a>(&self) -> &'a [&'a str];
fn get_file_extensions(&self) -> &[&str];
fn get_project_types(&self) -> &[&str];
fn get_supported_loaders(&self) -> &[&str];
fn get_supported_game_versions(&self) -> SupportedGameVersions;
fn validate(
&self,
@ -50,10 +50,10 @@ pub trait Validator: Sync {
}
static VALIDATORS: [&dyn Validator; 4] = [
&PackValidator {},
&FabricValidator {},
&ForgeValidator {},
&LegacyForgeValidator {},
&PackValidator,
&FabricValidator,
&ForgeValidator,
&LegacyForgeValidator,
];
/// The return value is whether this file should be marked as primary or not, based on the analysis of the file
@ -89,10 +89,13 @@ pub fn validate_file(
}
if visited {
Err(ValidationError::InvalidInputError(format!(
"File extension {} is invalid for input file",
file_extension
)))
Err(ValidationError::InvalidInputError(
format!(
"File extension {} is invalid for input file",
file_extension
)
.into(),
))
} else {
Ok(ValidationResult::Pass)
}

View File

@ -1,4 +1,3 @@
use crate::models::projects::SideType;
use crate::validate::{SupportedGameVersions, ValidationError, ValidationResult};
use serde::{Deserialize, Serialize};
use std::io::{Cursor, Read};
@ -6,28 +5,13 @@ use zip::ZipArchive;
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PackFormat {
pub game: String,
pub struct PackFormat<'a> {
pub game: &'a str,
pub format_version: i32,
pub version_id: String,
pub name: String,
pub summary: Option<String>,
pub dependencies: std::collections::HashMap<PackDependency, String>,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PackFile {
pub path: String,
pub hashes: std::collections::HashMap<String, String>,
pub env: Environment,
pub downloads: Vec<String>,
}
#[derive(Serialize, Deserialize)]
pub struct Environment {
pub client: SideType,
pub server: SideType,
pub version_id: &'a str,
pub name: &'a str,
pub summary: Option<&'a str>,
pub dependencies: std::collections::HashMap<PackDependency, &'a str>,
}
#[derive(Serialize, Deserialize, Clone, Hash, PartialEq, Eq)]
@ -40,12 +24,12 @@ pub enum PackDependency {
impl std::fmt::Display for PackDependency {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "{}", self.as_str())
fmt.write_str(self.as_str())
}
}
impl PackDependency {
// These are constant, so this can remove unneccessary allocations (`to_string`)
// These are constant, so this can remove unnecessary allocations (`to_string`)
pub fn as_str(&self) -> &'static str {
match self {
PackDependency::Forge => "forge",
@ -55,18 +39,18 @@ impl PackDependency {
}
}
pub struct PackValidator {}
pub struct PackValidator;
impl super::Validator for PackValidator {
fn get_file_extensions<'a>(&self) -> &'a [&'a str] {
fn get_file_extensions(&self) -> &[&str] {
&["zip"]
}
fn get_project_types<'a>(&self) -> &'a [&'a str] {
fn get_project_types(&self) -> &[&str] {
&["modpack"]
}
fn get_supported_loaders<'a>(&self) -> &'a [&'a str] {
fn get_supported_loaders(&self) -> &[&str] {
&["forge", "fabric"]
}
@ -78,20 +62,19 @@ impl super::Validator for PackValidator {
&self,
archive: &mut ZipArchive<Cursor<&[u8]>>,
) -> Result<ValidationResult, ValidationError> {
let mut file = archive.by_name("index.json").map_err(|_| {
ValidationError::InvalidInputError("Pack manifest is missing.".to_string())
})?;
let mut file = archive
.by_name("index.json")
.map_err(|_| ValidationError::InvalidInputError("Pack manifest is missing.".into()))?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let pack: PackFormat = serde_json::from_str(&*contents)?;
let pack: PackFormat = serde_json::from_str(&contents)?;
if pack.game != *"minecraft" {
return Err(ValidationError::InvalidInputError(format!(
"Game {0} does not exist!",
pack.game
)));
if pack.game != "minecraft" {
return Err(ValidationError::InvalidInputError(
format!("Game {0} does not exist!", pack.game).into(),
));
}
Ok(ValidationResult::Pass)