Fix clippy errors + lint, use turbo CI

This commit is contained in:
Jai A 2024-10-18 16:07:35 -07:00
parent 663ab83b08
commit 8dd955563e
No known key found for this signature in database
GPG Key ID: 9A9F9B7250E9883C
186 changed files with 10615 additions and 6433 deletions

View File

@ -1,40 +0,0 @@
name: CI
on:
push:
branches: ["main"]
pull_request:
types: [opened, synchronize]
merge_group:
types: [ checks_requested ]
env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true
jobs:
build:
name: Build and Lint (Rust)
runs-on: ubuntu-22.04
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Get build cache
id: cache-build
uses: actions/cache@v2
with:
path: target/**
key: ${{ runner.os }}-rust-cache
- name: Install build dependencies
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: Build
run: cargo build
- name: Lint
run: cargo clippy --all-targets --all-features -- -D warnings

View File

@ -10,7 +10,7 @@ on:
jobs:
build:
name: Build, Test, and Lint (Turbo)
name: Build, Test, and Lint
runs-on: ubuntu-22.04
steps:
@ -66,5 +66,8 @@ jobs:
- name: Lint
run: pnpm lint
- name: Start docker compose
run: docker-compose up -d
- name: Test
run: pnpm test

63
Cargo.lock generated
View File

@ -183,7 +183,7 @@ dependencies = [
"futures-core",
"futures-util",
"mio 1.0.2",
"socket2 0.5.7",
"socket2",
"tokio",
"tracing",
]
@ -246,7 +246,7 @@ dependencies = [
"serde_json",
"serde_urlencoded",
"smallvec",
"socket2 0.5.7",
"socket2",
"time",
"url",
]
@ -407,6 +407,12 @@ dependencies = [
"derive_arbitrary",
]
[[package]]
name = "arc-swap"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]]
name = "argon2"
version = "0.5.3"
@ -1711,7 +1717,7 @@ dependencies = [
"openssl-probe",
"openssl-sys",
"schannel",
"socket2 0.5.7",
"socket2",
"windows-sys 0.52.0",
]
@ -1861,11 +1867,10 @@ dependencies = [
[[package]]
name = "deadpool"
version = "0.10.0"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490"
checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed"
dependencies = [
"async-trait",
"deadpool-runtime",
"num_cpus",
"tokio",
@ -1873,9 +1878,9 @@ dependencies = [
[[package]]
name = "deadpool-redis"
version = "0.14.0"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36f2381b0e993d06a1f6d49f486b33bc4004085bf980340fc05726bacc681fff"
checksum = "bfae6799b68a735270e4344ee3e834365f707c72da09c9a8bb89b45cc3351395"
dependencies = [
"deadpool",
"redis",
@ -3371,7 +3376,7 @@ dependencies = [
"httpdate",
"itoa 1.0.11",
"pin-project-lite",
"socket2 0.5.7",
"socket2",
"tokio",
"tower-service",
"tracing",
@ -3474,7 +3479,7 @@ dependencies = [
"http-body 1.0.1",
"hyper 1.4.1",
"pin-project-lite",
"socket2 0.5.7",
"socket2",
"tokio",
"tower-service",
"tracing",
@ -4212,7 +4217,7 @@ dependencies = [
"nom",
"percent-encoding",
"quoted_printable",
"socket2 0.5.7",
"socket2",
"tokio",
"url",
]
@ -4824,6 +4829,16 @@ dependencies = [
"winapi",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
dependencies = [
"num-integer",
"num-traits",
]
[[package]]
name = "num-bigint-dig"
version = "0.8.4"
@ -5499,7 +5514,7 @@ dependencies = [
"bincode",
"either",
"fnv",
"itertools 0.11.0",
"itertools 0.12.1",
"lazy_static",
"nom",
"quick-xml 0.31.0",
@ -5893,7 +5908,7 @@ dependencies = [
"quinn-udp",
"rustc-hash",
"rustls 0.23.13",
"socket2 0.5.7",
"socket2",
"thiserror",
"tokio",
"tracing",
@ -5924,7 +5939,7 @@ checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b"
dependencies = [
"libc",
"once_cell",
"socket2 0.5.7",
"socket2",
"tracing",
"windows-sys 0.59.0",
]
@ -6085,22 +6100,24 @@ dependencies = [
[[package]]
name = "redis"
version = "0.24.0"
version = "0.27.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c580d9cbbe1d1b479e8d67cf9daf6a62c957e6846048408b80b43ac3f6af84cd"
checksum = "81cccf17a692ce51b86564334614d72dcae1def0fd5ecebc9f02956da74352b5"
dependencies = [
"ahash 0.8.11",
"arc-swap",
"async-trait",
"bytes",
"combine",
"futures-util",
"itoa 1.0.11",
"num-bigint",
"percent-encoding",
"pin-project-lite",
"r2d2",
"ryu",
"sha1_smol",
"socket2 0.4.10",
"socket2",
"tokio",
"tokio-util",
"url",
@ -7325,16 +7342,6 @@ dependencies = [
"serde",
]
[[package]]
name = "socket2"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "socket2"
version = "0.5.7"
@ -8650,7 +8657,7 @@ dependencies = [
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2 0.5.7",
"socket2",
"tokio-macros",
"tracing",
"windows-sys 0.52.0",

View File

@ -0,0 +1,10 @@
{
"name": "@modrinth/app-playground",
"scripts": {
"build": "cargo build --release",
"lint": "cargo fmt --check && cargo clippy --all-targets --all-features -- -D warnings",
"fix": "cargo fmt && cargo clippy --fix",
"dev": "cargo run",
"test": "cargo test"
}
}

View File

@ -3,7 +3,10 @@
"scripts": {
"build": "tauri build",
"tauri": "tauri",
"dev": "tauri dev"
"dev": "tauri dev",
"test": "cargo test",
"lint": "cargo fmt --check && cargo clippy --all-targets --all-features -- -D warnings",
"fix": "cargo fmt && cargo clippy --fix"
},
"devDependencies": {
"@tauri-apps/cli": "2.0.0-rc.16"
@ -12,4 +15,4 @@
"@modrinth/app-frontend": "workspace:*",
"@modrinth/app-lib": "workspace:*"
}
}
}

View File

@ -88,8 +88,8 @@ rust_decimal = { version = "1.33.1", features = [
"serde-with-float",
"serde-with-str",
] }
redis = { version = "0.24.0", features = ["tokio-comp", "ahash", "r2d2"]}
deadpool-redis = "0.14.0"
redis = { version = "0.27.5", features = ["tokio-comp", "ahash", "r2d2"]}
deadpool-redis = "0.18.0"
clickhouse = { version = "0.11.2", features = ["uuid", "time"] }
uuid = { version = "1.2.2", features = ["v4", "fast-rng", "serde"] }

View File

@ -0,0 +1,10 @@
{
"name": "@modrinth/labrinth",
"scripts": {
"build": "cargo build --release",
"lint": "cargo fmt --check && cargo clippy --all-targets --all-features -- -D warnings",
"fix": "cargo fmt && cargo clippy --fix",
"dev": "cargo run",
"test": "cargo test"
}
}

View File

@ -10,11 +10,17 @@ use itertools::Itertools;
use sqlx::PgPool;
pub trait ValidateAuthorized {
fn validate_authorized(&self, user_option: Option<&User>) -> Result<(), ApiError>;
fn validate_authorized(
&self,
user_option: Option<&User>,
) -> Result<(), ApiError>;
}
pub trait ValidateAllAuthorized {
fn validate_all_authorized(self, user_option: Option<&User>) -> Result<(), ApiError>;
fn validate_all_authorized(
self,
user_option: Option<&User>,
) -> Result<(), ApiError>;
}
impl<'a, T, A> ValidateAllAuthorized for T
@ -22,7 +28,10 @@ where
T: IntoIterator<Item = &'a A>,
A: ValidateAuthorized + 'a,
{
fn validate_all_authorized(self, user_option: Option<&User>) -> Result<(), ApiError> {
fn validate_all_authorized(
self,
user_option: Option<&User>,
) -> Result<(), ApiError> {
self.into_iter()
.try_for_each(|c| c.validate_authorized(user_option))
}
@ -34,9 +43,14 @@ pub async fn is_visible_project(
pool: &PgPool,
hide_unlisted: bool,
) -> Result<bool, ApiError> {
filter_visible_project_ids(vec![project_data], user_option, pool, hide_unlisted)
.await
.map(|x| !x.is_empty())
filter_visible_project_ids(
vec![project_data],
user_option,
pool,
hide_unlisted,
)
.await
.map(|x| !x.is_empty())
}
pub async fn is_team_member_project(
@ -99,8 +113,10 @@ pub async fn filter_visible_project_ids(
// For hidden projects, return a filtered list of projects for which we are enlisted on the team
if !check_projects.is_empty() {
return_projects
.extend(filter_enlisted_projects_ids(check_projects, user_option, pool).await?);
return_projects.extend(
filter_enlisted_projects_ids(check_projects, user_option, pool)
.await?,
);
}
Ok(return_projects)
@ -143,7 +159,8 @@ pub async fn filter_enlisted_projects_ids(
.fetch(pool)
.map_ok(|row| {
for x in projects.iter() {
let bool = Some(x.id.0) == row.id && Some(x.team_id.0) == row.team_id;
let bool =
Some(x.id.0) == row.id && Some(x.team_id.0) == row.team_id;
if bool {
return_projects.push(x.id);
}
@ -195,7 +212,10 @@ pub async fn filter_visible_versions(
}
impl ValidateAuthorized for models::OAuthClient {
fn validate_authorized(&self, user_option: Option<&User>) -> Result<(), ApiError> {
fn validate_authorized(
&self,
user_option: Option<&User>,
) -> Result<(), ApiError> {
if let Some(user) = user_option {
return if user.role.is_mod() || user.id == self.created_by.into() {
Ok(())
@ -240,7 +260,8 @@ pub async fn filter_visible_version_ids(
// Then, get enlisted versions (Versions that are a part of a project we are a member of)
let enlisted_version_ids =
filter_enlisted_version_ids(versions.clone(), user_option, pool, redis).await?;
filter_enlisted_version_ids(versions.clone(), user_option, pool, redis)
.await?;
// Return versions that are not hidden, we are a mod of, or we are enlisted on the team of
for version in versions {
@ -248,7 +269,8 @@ pub async fn filter_visible_version_ids(
// - it's not hidden and we can see the project
// - we are a mod
// - we are enlisted on the team of the mod
if (!version.status.is_hidden() && visible_project_ids.contains(&version.project_id))
if (!version.status.is_hidden()
&& visible_project_ids.contains(&version.project_id))
|| user_option
.as_ref()
.map(|x| x.role.is_mod())
@ -292,7 +314,8 @@ pub async fn filter_enlisted_version_ids(
.as_ref()
.map(|x| x.role.is_mod())
.unwrap_or(false)
|| (user_option.is_some() && authorized_project_ids.contains(&version.project_id))
|| (user_option.is_some()
&& authorized_project_ids.contains(&version.project_id))
{
return_versions.push(version.id);
}
@ -307,7 +330,9 @@ pub async fn is_visible_collection(
) -> Result<bool, ApiError> {
let mut authorized = !collection_data.status.is_hidden();
if let Some(user) = &user_option {
if !authorized && (user.role.is_mod() || user.id == collection_data.user_id.into()) {
if !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
{
authorized = true;
}
}

View File

@ -16,7 +16,11 @@ pub enum MailError {
Smtp(#[from] lettre::transport::smtp::Error),
}
pub fn send_email_raw(to: String, subject: String, body: String) -> Result<(), MailError> {
pub fn send_email_raw(
to: String,
subject: String,
body: String,
) -> Result<(), MailError> {
let email = Message::builder()
.from(Mailbox::new(
Some("Modrinth".to_string()),

View File

@ -5,8 +5,9 @@ pub mod templates;
pub mod validate;
pub use crate::auth::email::send_email;
pub use checks::{
filter_enlisted_projects_ids, filter_enlisted_version_ids, filter_visible_collections,
filter_visible_project_ids, filter_visible_projects,
filter_enlisted_projects_ids, filter_enlisted_version_ids,
filter_visible_collections, filter_visible_project_ids,
filter_visible_projects,
};
use serde::{Deserialize, Serialize};
// pub use pat::{generate_pat, PersonalAccessToken};
@ -55,16 +56,22 @@ impl actix_web::ResponseError for AuthenticationError {
match self {
AuthenticationError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::Sqlx(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::Database(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::Database(..) => {
StatusCode::INTERNAL_SERVER_ERROR
}
AuthenticationError::SerDe(..) => StatusCode::BAD_REQUEST,
AuthenticationError::Reqwest(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::Reqwest(..) => {
StatusCode::INTERNAL_SERVER_ERROR
}
AuthenticationError::InvalidCredentials => StatusCode::UNAUTHORIZED,
AuthenticationError::Decoding(..) => StatusCode::BAD_REQUEST,
AuthenticationError::Mail(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::InvalidAuthMethod => StatusCode::UNAUTHORIZED,
AuthenticationError::InvalidClientId => StatusCode::UNAUTHORIZED,
AuthenticationError::Url => StatusCode::BAD_REQUEST,
AuthenticationError::FileHosting(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::FileHosting(..) => {
StatusCode::INTERNAL_SERVER_ERROR
}
AuthenticationError::DuplicateUser => StatusCode::BAD_REQUEST,
AuthenticationError::SocketError => StatusCode::BAD_REQUEST,
}
@ -99,7 +106,9 @@ impl AuthenticationError {
}
}
#[derive(Serialize, Deserialize, Default, Eq, PartialEq, Clone, Copy, Debug)]
#[derive(
Serialize, Deserialize, Default, Eq, PartialEq, Clone, Copy, Debug,
)]
#[serde(rename_all = "lowercase")]
pub enum AuthProvider {
#[default]

View File

@ -77,12 +77,16 @@ impl actix_web::ResponseError for OAuthError {
| OAuthErrorType::OnlySupportsAuthorizationCodeGrant(_)
| OAuthErrorType::RedirectUriChanged(_)
| OAuthErrorType::UnauthorizedClient => StatusCode::BAD_REQUEST,
OAuthErrorType::ClientAuthenticationFailed => StatusCode::UNAUTHORIZED,
OAuthErrorType::ClientAuthenticationFailed => {
StatusCode::UNAUTHORIZED
}
}
}
fn error_response(&self) -> HttpResponse {
if let Some(ValidatedRedirectUri(mut redirect_uri)) = self.valid_redirect_uri.clone() {
if let Some(ValidatedRedirectUri(mut redirect_uri)) =
self.valid_redirect_uri.clone()
{
redirect_uri = format!(
"{}?error={}&error_description={}",
redirect_uri,
@ -114,7 +118,9 @@ pub enum OAuthErrorType {
ClientMissingRedirectURI {
client_id: crate::database::models::OAuthClientId,
},
#[error("The provided redirect URI did not match any configured in the client")]
#[error(
"The provided redirect URI did not match any configured in the client"
)]
RedirectUriNotConfigured(String),
#[error("The provided scope was malformed or did not correspond to known scopes ({0})")]
FailedScopeParse(bitflags::parser::ParseError),
@ -159,14 +165,20 @@ impl OAuthErrorType {
// IETF RFC 6749 4.1.2.1 (https://datatracker.ietf.org/doc/html/rfc6749#autoid-38)
// And 5.2 (https://datatracker.ietf.org/doc/html/rfc6749#section-5.2)
match self {
Self::RedirectUriNotConfigured(_) | Self::ClientMissingRedirectURI { client_id: _ } => {
"invalid_uri"
Self::RedirectUriNotConfigured(_)
| Self::ClientMissingRedirectURI { client_id: _ } => "invalid_uri",
Self::AuthenticationError(_) | Self::InvalidAcceptFlowId => {
"server_error"
}
Self::RedirectUriChanged(_) | Self::MalformedId(_) => {
"invalid_request"
}
Self::AuthenticationError(_) | Self::InvalidAcceptFlowId => "server_error",
Self::RedirectUriChanged(_) | Self::MalformedId(_) => "invalid_request",
Self::FailedScopeParse(_) | Self::ScopesTooBroad => "invalid_scope",
Self::InvalidClientId(_) | Self::ClientAuthenticationFailed => "invalid_client",
Self::InvalidAuthCode | Self::OnlySupportsAuthorizationCodeGrant(_) => "invalid_grant",
Self::InvalidClientId(_) | Self::ClientAuthenticationFailed => {
"invalid_client"
}
Self::InvalidAuthCode
| Self::OnlySupportsAuthorizationCodeGrant(_) => "invalid_grant",
Self::UnauthorizedClient => "unauthorized_client",
Self::AccessDenied => "access_denied",
}

View File

@ -84,18 +84,19 @@ pub async fn init_oauth(
client.id,
)?;
let requested_scopes = oauth_info
.scope
.as_ref()
.map_or(Ok(client.max_scopes), |s| {
Scopes::parse_from_oauth_scopes(s).map_err(|e| {
OAuthError::redirect(
OAuthErrorType::FailedScopeParse(e),
&oauth_info.state,
&redirect_uri,
)
})
})?;
let requested_scopes =
oauth_info
.scope
.as_ref()
.map_or(Ok(client.max_scopes), |s| {
Scopes::parse_from_oauth_scopes(s).map_err(|e| {
OAuthError::redirect(
OAuthErrorType::FailedScopeParse(e),
&oauth_info.state,
&redirect_uri,
)
})
})?;
if !client.max_scopes.contains(requested_scopes) {
return Err(OAuthError::redirect(
@ -108,9 +109,13 @@ pub async fn init_oauth(
let existing_authorization =
OAuthClientAuthorization::get(client.id, user.id.into(), &**pool)
.await
.map_err(|e| OAuthError::redirect(e, &oauth_info.state, &redirect_uri))?;
let redirect_uris =
OAuthRedirectUris::new(oauth_info.redirect_uri.clone(), redirect_uri.clone());
.map_err(|e| {
OAuthError::redirect(e, &oauth_info.state, &redirect_uri)
})?;
let redirect_uris = OAuthRedirectUris::new(
oauth_info.redirect_uri.clone(),
redirect_uri.clone(),
);
match existing_authorization {
Some(existing_authorization)
if existing_authorization.scopes.contains(requested_scopes) =>
@ -130,14 +135,17 @@ pub async fn init_oauth(
let flow_id = Flow::InitOAuthAppApproval {
user_id: user.id.into(),
client_id: client.id,
existing_authorization_id: existing_authorization.map(|a| a.id),
existing_authorization_id: existing_authorization
.map(|a| a.id),
scopes: requested_scopes,
redirect_uris,
state: oauth_info.state.clone(),
}
.insert(Duration::minutes(30), &redis)
.await
.map_err(|e| OAuthError::redirect(e, &oauth_info.state, &redirect_uri))?;
.map_err(|e| {
OAuthError::redirect(e, &oauth_info.state, &redirect_uri)
})?;
let access_request = OAuthClientAccessRequest {
client_id: client.id.into(),
@ -169,7 +177,15 @@ pub async fn accept_client_scopes(
redis: Data<RedisPool>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, OAuthError> {
accept_or_reject_client_scopes(true, req, accept_body, pool, redis, session_queue).await
accept_or_reject_client_scopes(
true,
req,
accept_body,
pool,
redis,
session_queue,
)
.await
}
#[post("reject")]
@ -180,7 +196,8 @@ pub async fn reject_client_scopes(
redis: Data<RedisPool>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, OAuthError> {
accept_or_reject_client_scopes(false, req, body, pool, redis, session_queue).await
accept_or_reject_client_scopes(false, req, body, pool, redis, session_queue)
.await
}
#[derive(Serialize, Deserialize)]
@ -231,13 +248,17 @@ pub async fn request_token(
{
// https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3
if req_client_id != client_id.into() {
return Err(OAuthError::error(OAuthErrorType::UnauthorizedClient));
return Err(OAuthError::error(
OAuthErrorType::UnauthorizedClient,
));
}
if original_redirect_uri != req_params.redirect_uri {
return Err(OAuthError::error(OAuthErrorType::RedirectUriChanged(
req_params.redirect_uri.clone(),
)));
return Err(OAuthError::error(
OAuthErrorType::RedirectUriChanged(
req_params.redirect_uri.clone(),
),
));
}
if req_params.grant_type != "authorization_code" {
@ -251,7 +272,8 @@ pub async fn request_token(
let scopes = scopes - Scopes::restricted();
let mut transaction = pool.begin().await?;
let token_id = generate_oauth_access_token_id(&mut transaction).await?;
let token_id =
generate_oauth_access_token_id(&mut transaction).await?;
let token = generate_access_token();
let token_hash = OAuthAccessToken::hash_token(&token);
let time_until_expiration = OAuthAccessToken {
@ -323,7 +345,9 @@ pub async fn accept_or_reject_client_scopes(
}) = flow
{
if current_user.id != user_id.into() {
return Err(OAuthError::error(AuthenticationError::InvalidCredentials));
return Err(OAuthError::error(
AuthenticationError::InvalidCredentials,
));
}
if accept {
@ -331,10 +355,19 @@ pub async fn accept_or_reject_client_scopes(
let auth_id = match existing_authorization_id {
Some(id) => id,
None => generate_oauth_client_authorization_id(&mut transaction).await?,
None => {
generate_oauth_client_authorization_id(&mut transaction)
.await?
}
};
OAuthClientAuthorization::upsert(auth_id, client_id, user_id, scopes, &mut transaction)
.await?;
OAuthClientAuthorization::upsert(
auth_id,
client_id,
user_id,
scopes,
&mut transaction,
)
.await?;
transaction.commit().await?;
@ -402,14 +435,17 @@ async fn init_oauth_code_flow(
}
.insert(Duration::minutes(10), redis)
.await
.map_err(|e| OAuthError::redirect(e, &state, &redirect_uris.validated.clone()))?;
.map_err(|e| {
OAuthError::redirect(e, &state, &redirect_uris.validated.clone())
})?;
let mut redirect_params = vec![format!("code={code}")];
if let Some(state) = state {
redirect_params.push(format!("state={state}"));
}
let redirect_uri = append_params_to_uri(&redirect_uris.validated.0, &redirect_params);
let redirect_uri =
append_params_to_uri(&redirect_uris.validated.0, &redirect_params);
// IETF RFC 6749 Section 4.1.2 (https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.2)
Ok(HttpResponse::Ok()

View File

@ -18,17 +18,20 @@ impl ValidatedRedirectUri {
validate_against: impl IntoIterator<Item = &'a str> + Clone,
client_id: OAuthClientId,
) -> Result<Self, OAuthError> {
if let Some(first_client_redirect_uri) = validate_against.clone().into_iter().next() {
if let Some(first_client_redirect_uri) =
validate_against.clone().into_iter().next()
{
if let Some(to_validate) = to_validate {
if validate_against
.into_iter()
.any(|uri| same_uri_except_query_components(uri, to_validate))
{
if validate_against.into_iter().any(|uri| {
same_uri_except_query_components(uri, to_validate)
}) {
Ok(ValidatedRedirectUri(to_validate.clone()))
} else {
Err(OAuthError::error(OAuthErrorType::RedirectUriNotConfigured(
to_validate.clone(),
)))
Err(OAuthError::error(
OAuthErrorType::RedirectUriNotConfigured(
to_validate.clone(),
),
))
}
} else {
Ok(ValidatedRedirectUri(first_client_redirect_uri.to_string()))
@ -55,20 +58,26 @@ mod tests {
fn validate_for_none_returns_first_valid_uri() {
let validate_against = vec!["https://modrinth.com/a"];
let validated =
ValidatedRedirectUri::validate(&None, validate_against.clone(), OAuthClientId(0))
.unwrap();
let validated = ValidatedRedirectUri::validate(
&None,
validate_against.clone(),
OAuthClientId(0),
)
.unwrap();
assert_eq!(validate_against[0], validated.0);
}
#[test]
fn validate_for_valid_uri_returns_first_matching_uri_ignoring_query_params() {
fn validate_for_valid_uri_returns_first_matching_uri_ignoring_query_params()
{
let validate_against = vec![
"https://modrinth.com/a?q3=p3&q4=p4",
"https://modrinth.com/a/b/c?q1=p1&q2=p2",
];
let to_validate = "https://modrinth.com/a/b/c?query0=param0&query1=param1".to_string();
let to_validate =
"https://modrinth.com/a/b/c?query0=param0&query1=param1"
.to_string();
let validated = ValidatedRedirectUri::validate(
&Some(to_validate.clone()),
@ -85,10 +94,15 @@ mod tests {
let validate_against = vec!["https://modrinth.com/a"];
let to_validate = "https://modrinth.com/a/b".to_string();
let validated =
ValidatedRedirectUri::validate(&Some(to_validate), validate_against, OAuthClientId(0));
let validated = ValidatedRedirectUri::validate(
&Some(to_validate),
validate_against,
OAuthClientId(0),
);
assert!(validated
.is_err_and(|e| matches!(e.error_type, OAuthErrorType::RedirectUriNotConfigured(_))));
assert!(validated.is_err_and(|e| matches!(
e.error_type,
OAuthErrorType::RedirectUriNotConfigured(_)
)));
}
}

View File

@ -21,10 +21,15 @@ where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
// Fetch DB user record and minos user from headers
let (scopes, db_user) =
get_user_record_from_bearer_token(req, None, executor, redis, session_queue)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let (scopes, db_user) = get_user_record_from_bearer_token(
req,
None,
executor,
redis,
session_queue,
)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let user = User::from_full(db_user);
@ -58,31 +63,37 @@ where
let possible_user = match token.split_once('_') {
Some(("mrp", _)) => {
let pat =
crate::database::models::pat_item::PersonalAccessToken::get(token, executor, redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
crate::database::models::pat_item::PersonalAccessToken::get(
token, executor, redis,
)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if pat.expires < Utc::now() {
return Err(AuthenticationError::InvalidCredentials);
}
let user = user_item::User::get_id(pat.user_id, executor, redis).await?;
let user =
user_item::User::get_id(pat.user_id, executor, redis).await?;
session_queue.add_pat(pat.id).await;
user.map(|x| (pat.scopes, x))
}
Some(("mra", _)) => {
let session =
crate::database::models::session_item::Session::get(token, executor, redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let session = crate::database::models::session_item::Session::get(
token, executor, redis,
)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if session.expires < Utc::now() {
return Err(AuthenticationError::InvalidCredentials);
}
let user = user_item::User::get_id(session.user_id, executor, redis).await?;
let user =
user_item::User::get_id(session.user_id, executor, redis)
.await?;
let rate_limit_ignore = dotenvy::var("RATE_LIMIT_IGNORE_KEY")?;
if !req
@ -111,7 +122,9 @@ where
return Err(AuthenticationError::InvalidCredentials);
}
let user = user_item::User::get_id(access_token.user_id, executor, redis).await?;
let user =
user_item::User::get_id(access_token.user_id, executor, redis)
.await?;
session_queue.add_oauth_access_token(access_token.id).await;
@ -119,7 +132,8 @@ where
}
Some(("github", _)) | Some(("gho", _)) | Some(("ghp", _)) => {
let user = AuthProvider::GitHub.get_user(token).await?;
let id = AuthProvider::GitHub.get_user_id(&user.id, executor).await?;
let id =
AuthProvider::GitHub.get_user_id(&user.id, executor).await?;
let user = user_item::User::get_id(
id.ok_or_else(|| AuthenticationError::InvalidCredentials)?,
@ -135,7 +149,9 @@ where
Ok(possible_user)
}
pub fn extract_authorization_header(req: &HttpRequest) -> Result<&str, AuthenticationError> {
pub fn extract_authorization_header(
req: &HttpRequest,
) -> Result<&str, AuthenticationError> {
let headers = req.headers();
let token_val: Option<&HeaderValue> = headers.get(AUTHORIZATION);
token_val
@ -154,9 +170,15 @@ pub async fn check_is_moderator_from_headers<'a, 'b, E>(
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let user = get_user_from_headers(req, executor, redis, session_queue, required_scopes)
.await?
.1;
let user = get_user_from_headers(
req,
executor,
redis,
session_queue,
required_scopes,
)
.await?
.1;
if user.role.is_mod() {
Ok(user)

View File

@ -6,7 +6,8 @@ mod fetch;
pub use fetch::*;
pub async fn init_client() -> clickhouse::error::Result<clickhouse::Client> {
init_client_with_database(&dotenvy::var("CLICKHOUSE_DATABASE").unwrap()).await
init_client_with_database(&dotenvy::var("CLICKHOUSE_DATABASE").unwrap())
.await
}
pub async fn init_client_with_database(
@ -16,9 +17,12 @@ pub async fn init_client_with_database(
let mut http_connector = HttpConnector::new();
http_connector.enforce_http(false); // allow https URLs
let tls_connector = native_tls::TlsConnector::builder().build().unwrap().into();
let https_connector = HttpsConnector::from((http_connector, tls_connector));
let hyper_client = hyper::client::Client::builder().build(https_connector);
let tls_connector =
native_tls::TlsConnector::builder().build().unwrap().into();
let https_connector =
HttpsConnector::from((http_connector, tls_connector));
let hyper_client =
hyper::client::Client::builder().build(https_connector);
clickhouse::Client::with_http_client(hyper_client)
.with_url(dotenvy::var("CLICKHOUSE_URL").unwrap())

View File

@ -86,7 +86,10 @@ impl Category {
Ok(result.map(|r| CategoryId(r.id)))
}
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<Category>, DatabaseError>
pub async fn list<'a, E>(
exec: E,
redis: &RedisPool,
) -> Result<Vec<Category>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -128,7 +131,10 @@ impl Category {
}
impl LinkPlatform {
pub async fn get_id<'a, E>(id: &str, exec: E) -> Result<Option<LinkPlatformId>, DatabaseError>
pub async fn get_id<'a, E>(
id: &str,
exec: E,
) -> Result<Option<LinkPlatformId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -145,7 +151,10 @@ impl LinkPlatform {
Ok(result.map(|r| LinkPlatformId(r.id)))
}
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<LinkPlatform>, DatabaseError>
pub async fn list<'a, E>(
exec: E,
redis: &RedisPool,
) -> Result<Vec<LinkPlatform>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -174,7 +183,12 @@ impl LinkPlatform {
.await?;
redis
.set_serialized_to_json(TAGS_NAMESPACE, "link_platform", &result, None)
.set_serialized_to_json(
TAGS_NAMESPACE,
"link_platform",
&result,
None,
)
.await?;
Ok(result)
@ -182,7 +196,10 @@ impl LinkPlatform {
}
impl ReportType {
pub async fn get_id<'a, E>(name: &str, exec: E) -> Result<Option<ReportTypeId>, DatabaseError>
pub async fn get_id<'a, E>(
name: &str,
exec: E,
) -> Result<Option<ReportTypeId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -199,7 +216,10 @@ impl ReportType {
Ok(result.map(|r| ReportTypeId(r.id)))
}
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<String>, DatabaseError>
pub async fn list<'a, E>(
exec: E,
redis: &RedisPool,
) -> Result<Vec<String>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -224,7 +244,12 @@ impl ReportType {
.await?;
redis
.set_serialized_to_json(TAGS_NAMESPACE, "report_type", &result, None)
.set_serialized_to_json(
TAGS_NAMESPACE,
"report_type",
&result,
None,
)
.await?;
Ok(result)
@ -232,7 +257,10 @@ impl ReportType {
}
impl ProjectType {
pub async fn get_id<'a, E>(name: &str, exec: E) -> Result<Option<ProjectTypeId>, DatabaseError>
pub async fn get_id<'a, E>(
name: &str,
exec: E,
) -> Result<Option<ProjectTypeId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -249,7 +277,10 @@ impl ProjectType {
Ok(result.map(|r| ProjectTypeId(r.id)))
}
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<String>, DatabaseError>
pub async fn list<'a, E>(
exec: E,
redis: &RedisPool,
) -> Result<Vec<String>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -274,7 +305,12 @@ impl ProjectType {
.await?;
redis
.set_serialized_to_json(TAGS_NAMESPACE, "project_type", &result, None)
.set_serialized_to_json(
TAGS_NAMESPACE,
"project_type",
&result,
None,
)
.await?;
Ok(result)

View File

@ -122,9 +122,12 @@ impl ChargeItem {
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ChargeItem>, DatabaseError> {
let user_id = user_id.0;
let res = select_charges_with_predicate!("WHERE user_id = $1 ORDER BY due DESC", user_id)
.fetch_all(exec)
.await?;
let res = select_charges_with_predicate!(
"WHERE user_id = $1 ORDER BY due DESC",
user_id
)
.fetch_all(exec)
.await?;
Ok(res
.into_iter()

View File

@ -212,7 +212,10 @@ impl Collection {
Ok(val)
}
pub async fn clear_cache(id: CollectionId, redis: &RedisPool) -> Result<(), DatabaseError> {
pub async fn clear_cache(
id: CollectionId,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
redis.delete(COLLECTIONS_NAMESPACE, id.0).await?;

View File

@ -68,12 +68,20 @@ impl Flow {
.collect::<String>();
redis
.set_serialized_to_json(FLOWS_NAMESPACE, &flow, &self, Some(expires.num_seconds()))
.set_serialized_to_json(
FLOWS_NAMESPACE,
&flow,
&self,
Some(expires.num_seconds()),
)
.await?;
Ok(flow)
}
pub async fn get(id: &str, redis: &RedisPool) -> Result<Option<Flow>, DatabaseError> {
pub async fn get(
id: &str,
redis: &RedisPool,
) -> Result<Option<Flow>, DatabaseError> {
let mut redis = redis.connect().await?;
redis.get_deserialized_from_json(FLOWS_NAMESPACE, id).await
@ -95,7 +103,10 @@ impl Flow {
Ok(flow)
}
pub async fn remove(id: &str, redis: &RedisPool) -> Result<Option<()>, DatabaseError> {
pub async fn remove(
id: &str,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
let mut redis = redis.connect().await?;
redis.delete(FLOWS_NAMESPACE, id).await?;

View File

@ -264,25 +264,35 @@ generate_ids!(
ChargeId
);
#[derive(Copy, Clone, Debug, PartialEq, Eq, Type, Hash, Serialize, Deserialize)]
#[derive(
Copy, Clone, Debug, PartialEq, Eq, Type, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct UserId(pub i64);
#[derive(Copy, Clone, Debug, Type, Eq, Hash, PartialEq, Serialize, Deserialize)]
#[derive(
Copy, Clone, Debug, Type, Eq, Hash, PartialEq, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct TeamId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct TeamMemberId(pub i64);
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[derive(
Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct OrganizationId(pub i64);
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[derive(
Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct ProjectId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct ProjectTypeId(pub i32);
@ -292,16 +302,30 @@ pub struct StatusId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct GameId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct LinkPlatformId(pub i32);
#[derive(
Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize, PartialOrd, Ord,
Copy,
Clone,
Debug,
Type,
PartialEq,
Eq,
Hash,
Serialize,
Deserialize,
PartialOrd,
Ord,
)]
#[sqlx(transparent)]
pub struct VersionId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
@ -319,11 +343,15 @@ pub struct ReportId(pub i64);
#[sqlx(transparent)]
pub struct ReportTypeId(pub i32);
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize, Serialize)]
#[derive(
Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize, Serialize,
)]
#[sqlx(transparent)]
pub struct FileId(pub i64);
#[derive(Copy, Clone, Debug, Type, Deserialize, Serialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Deserialize, Serialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct PatId(pub i64);
@ -337,64 +365,102 @@ pub struct NotificationActionId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq)]
#[sqlx(transparent)]
pub struct ThreadId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ThreadMessageId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct SessionId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ImageId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash, PartialOrd, Ord,
Copy,
Clone,
Debug,
Type,
Serialize,
Deserialize,
Eq,
PartialEq,
Hash,
PartialOrd,
Ord,
)]
#[sqlx(transparent)]
pub struct LoaderFieldId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderFieldEnumId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderFieldEnumValueId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthClientId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthClientAuthorizationId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthRedirectUriId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthAccessTokenId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct PayoutId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ProductId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ProductPriceId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct UserSubscriptionId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ChargeId(pub i64);

View File

@ -223,7 +223,10 @@ impl Image {
Ok(val)
}
pub async fn clear_cache(id: ImageId, redis: &RedisPool) -> Result<(), DatabaseError> {
pub async fn clear_cache(
id: ImageId,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
redis.delete(IMAGES_NAMESPACE, id.0).await?;

View File

@ -12,7 +12,9 @@ use serde_json::json;
use crate::database::redis::RedisPool;
use super::{
loader_fields::{LoaderFieldEnum, LoaderFieldEnumValue, VersionField, VersionFieldValue},
loader_fields::{
LoaderFieldEnum, LoaderFieldEnumValue, VersionField, VersionFieldValue,
},
DatabaseError, LoaderFieldEnumValueId,
};
@ -44,13 +46,17 @@ impl MinecraftGameVersion {
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
{
let mut exec = exec.acquire().await?;
let game_version_enum = LoaderFieldEnum::get(Self::FIELD_NAME, &mut *exec, redis)
.await?
.ok_or_else(|| {
DatabaseError::SchemaError("Could not find game version enum.".to_string())
})?;
let game_version_enum =
LoaderFieldEnum::get(Self::FIELD_NAME, &mut *exec, redis)
.await?
.ok_or_else(|| {
DatabaseError::SchemaError(
"Could not find game version enum.".to_string(),
)
})?;
let game_version_enum_values =
LoaderFieldEnumValue::list(game_version_enum.id, &mut *exec, redis).await?;
LoaderFieldEnumValue::list(game_version_enum.id, &mut *exec, redis)
.await?;
let game_versions = game_version_enum_values
.into_iter()
@ -105,7 +111,9 @@ impl MinecraftGameVersion {
Ok(game_versions)
}
pub fn from_enum_value(loader_field_enum_value: LoaderFieldEnumValue) -> MinecraftGameVersion {
pub fn from_enum_value(
loader_field_enum_value: LoaderFieldEnumValue,
) -> MinecraftGameVersion {
MinecraftGameVersion {
id: loader_field_enum_value.id,
version: loader_field_enum_value.value,
@ -157,7 +165,10 @@ impl<'a> MinecraftGameVersionBuilder<'a> {
})
}
pub fn created(self, created: &'a DateTime<Utc>) -> MinecraftGameVersionBuilder<'a> {
pub fn created(
self,
created: &'a DateTime<Utc>,
) -> MinecraftGameVersionBuilder<'a> {
Self {
date: Some(created),
..self
@ -172,11 +183,12 @@ impl<'a> MinecraftGameVersionBuilder<'a> {
where
E: sqlx::Executor<'b, Database = sqlx::Postgres> + Copy,
{
let game_versions_enum = LoaderFieldEnum::get("game_versions", exec, redis)
.await?
.ok_or(DatabaseError::SchemaError(
"Missing loaders field: 'game_versions'".to_string(),
))?;
let game_versions_enum =
LoaderFieldEnum::get("game_versions", exec, redis)
.await?
.ok_or(DatabaseError::SchemaError(
"Missing loaders field: 'game_versions'".to_string(),
))?;
// Get enum id for game versions
let metadata = json!({

View File

@ -43,7 +43,10 @@ impl Game {
.find(|x| x.slug == slug))
}
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<Game>, DatabaseError>
pub async fn list<'a, E>(
exec: E,
redis: &RedisPool,
) -> Result<Vec<Game>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -72,7 +75,12 @@ impl Game {
.await?;
redis
.set_serialized_to_json(GAMES_LIST_NAMESPACE, "games", &result, None)
.set_serialized_to_json(
GAMES_LIST_NAMESPACE,
"games",
&result,
None,
)
.await?;
Ok(result)
@ -99,7 +107,8 @@ impl Loader {
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let mut redis = redis.connect().await?;
let cached_id: Option<i32> = redis.get_deserialized_from_json(LOADER_ID, name).await?;
let cached_id: Option<i32> =
redis.get_deserialized_from_json(LOADER_ID, name).await?;
if let Some(cached_id) = cached_id {
return Ok(Some(LoaderId(cached_id)));
}
@ -124,7 +133,10 @@ impl Loader {
Ok(result)
}
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<Loader>, DatabaseError>
pub async fn list<'a, E>(
exec: E,
redis: &RedisPool,
) -> Result<Vec<Loader>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -169,7 +181,12 @@ impl Loader {
.await?;
redis
.set_serialized_to_json(LOADERS_LIST_NAMESPACE, "all", &result, None)
.set_serialized_to_json(
LOADERS_LIST_NAMESPACE,
"all",
&result,
None,
)
.await?;
Ok(result)
@ -198,7 +215,10 @@ pub enum LoaderFieldType {
ArrayBoolean,
}
impl LoaderFieldType {
pub fn build(field_type_name: &str, loader_field_enum: Option<i32>) -> Option<LoaderFieldType> {
pub fn build(
field_type_name: &str,
loader_field_enum: Option<i32>,
) -> Option<LoaderFieldType> {
Some(match (field_type_name, loader_field_enum) {
("integer", _) => LoaderFieldType::Integer,
("text", _) => LoaderFieldType::Text,
@ -207,7 +227,9 @@ impl LoaderFieldType {
("array_text", _) => LoaderFieldType::ArrayText,
("array_boolean", _) => LoaderFieldType::ArrayBoolean,
("enum", Some(id)) => LoaderFieldType::Enum(LoaderFieldEnumId(id)),
("array_enum", Some(id)) => LoaderFieldType::ArrayEnum(LoaderFieldEnumId(id)),
("array_enum", Some(id)) => {
LoaderFieldType::ArrayEnum(LoaderFieldEnumId(id))
}
_ => return None,
})
}
@ -303,7 +325,10 @@ impl QueryVersionField {
self
}
pub fn with_enum_value(mut self, enum_value: LoaderFieldEnumValueId) -> Self {
pub fn with_enum_value(
mut self,
enum_value: LoaderFieldEnumValueId,
) -> Self {
self.enum_value = Some(enum_value);
self
}
@ -359,7 +384,8 @@ impl LoaderField {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let found_loader_fields = Self::get_fields_per_loader(loader_ids, exec, redis).await?;
let found_loader_fields =
Self::get_fields_per_loader(loader_ids, exec, redis).await?;
let result = found_loader_fields
.into_values()
.flatten()
@ -464,7 +490,12 @@ impl LoaderField {
.collect();
redis
.set_serialized_to_json(LOADER_FIELDS_NAMESPACE_ALL, "", &result, None)
.set_serialized_to_json(
LOADER_FIELDS_NAMESPACE_ALL,
"",
&result,
None,
)
.await?;
Ok(result)
@ -482,7 +513,10 @@ impl LoaderFieldEnum {
let mut redis = redis.connect().await?;
let cached_enum = redis
.get_deserialized_from_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name)
.get_deserialized_from_json(
LOADER_FIELD_ENUMS_ID_NAMESPACE,
enum_name,
)
.await?;
if let Some(cached_enum) = cached_enum {
return Ok(cached_enum);
@ -507,7 +541,12 @@ impl LoaderFieldEnum {
});
redis
.set_serialized_to_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name, &result, None)
.set_serialized_to_json(
LOADER_FIELD_ENUMS_ID_NAMESPACE,
enum_name,
&result,
None,
)
.await?;
Ok(result)
@ -540,7 +579,9 @@ impl LoaderFieldEnumValue {
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let get_enum_id = |x: &LoaderField| match x.field_type {
LoaderFieldType::Enum(id) | LoaderFieldType::ArrayEnum(id) => Some(id),
LoaderFieldType::Enum(id) | LoaderFieldType::ArrayEnum(id) => {
Some(id)
}
_ => None,
};
@ -556,7 +597,10 @@ impl LoaderFieldEnumValue {
let mut res = HashMap::new();
for lf in loader_fields {
if let Some(id) = get_enum_id(lf) {
res.insert(lf.id, values.get(&id).unwrap_or(&Vec::new()).to_vec());
res.insert(
lf.id,
values.get(&id).unwrap_or(&Vec::new()).to_vec(),
);
}
}
Ok(res)
@ -566,7 +610,10 @@ impl LoaderFieldEnumValue {
loader_field_enum_ids: &[LoaderFieldEnumId],
exec: E,
redis: &RedisPool,
) -> Result<HashMap<LoaderFieldEnumId, Vec<LoaderFieldEnumValue>>, DatabaseError>
) -> Result<
HashMap<LoaderFieldEnumId, Vec<LoaderFieldEnumValue>>,
DatabaseError,
>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -665,34 +712,33 @@ impl VersionField {
VersionFieldValue::Text(s) => {
query_version_fields.push(base.clone().with_string_value(s))
}
VersionFieldValue::Boolean(b) => {
query_version_fields.push(base.clone().with_int_value(if b { 1 } else { 0 }))
}
VersionFieldValue::Boolean(b) => query_version_fields
.push(base.clone().with_int_value(if b { 1 } else { 0 })),
VersionFieldValue::ArrayInteger(v) => {
for i in v {
query_version_fields.push(base.clone().with_int_value(i));
query_version_fields
.push(base.clone().with_int_value(i));
}
}
VersionFieldValue::ArrayText(v) => {
for s in v {
query_version_fields.push(base.clone().with_string_value(s));
query_version_fields
.push(base.clone().with_string_value(s));
}
}
VersionFieldValue::ArrayBoolean(v) => {
for b in v {
query_version_fields.push(base.clone().with_int_value(if b {
1
} else {
0
}));
query_version_fields.push(
base.clone().with_int_value(if b { 1 } else { 0 }),
);
}
}
VersionFieldValue::Enum(_, v) => {
query_version_fields.push(base.clone().with_enum_value(v.id))
}
VersionFieldValue::Enum(_, v) => query_version_fields
.push(base.clone().with_enum_value(v.id)),
VersionFieldValue::ArrayEnum(_, v) => {
for ev in v {
query_version_fields.push(base.clone().with_enum_value(ev.id));
query_version_fields
.push(base.clone().with_enum_value(ev.id));
}
}
};
@ -740,7 +786,8 @@ impl VersionField {
value: serde_json::Value,
enum_variants: Vec<LoaderFieldEnumValue>,
) -> Result<VersionField, String> {
let value = VersionFieldValue::parse(&loader_field, value, enum_variants)?;
let value =
VersionFieldValue::parse(&loader_field, value, enum_variants)?;
// Ensure, if applicable, that the value is within the min/max bounds
let countable = match &value {
@ -802,11 +849,13 @@ impl VersionField {
query_loader_fields
.iter()
.flat_map(|q| {
let loader_field_type =
match LoaderFieldType::build(&q.field_type, q.enum_type.map(|l| l.0)) {
Some(lft) => lft,
None => return vec![],
};
let loader_field_type = match LoaderFieldType::build(
&q.field_type,
q.enum_type.map(|l| l.0),
) {
Some(lft) => lft,
None => return vec![],
};
let loader_field = LoaderField {
id: q.id,
field: q.field.clone(),
@ -908,7 +957,8 @@ impl VersionFieldValue {
Ok(match field_type {
LoaderFieldType::Integer => VersionFieldValue::Integer(
serde_json::from_value(value).map_err(|_| incorrect_type_error("integer"))?,
serde_json::from_value(value)
.map_err(|_| incorrect_type_error("integer"))?,
),
LoaderFieldType::Text => VersionFieldValue::Text(
value
@ -928,7 +978,9 @@ impl VersionFieldValue {
}),
LoaderFieldType::ArrayText => VersionFieldValue::ArrayText({
let array_values: Vec<String> = serde_json::from_value(value)
.map_err(|_| incorrect_type_error("array of strings"))?;
.map_err(|_| {
incorrect_type_error("array of strings")
})?;
array_values.into_iter().collect()
}),
LoaderFieldType::ArrayBoolean => VersionFieldValue::ArrayBoolean({
@ -937,8 +989,12 @@ impl VersionFieldValue {
array_values.into_iter().map(|v| v != 0).collect()
}),
LoaderFieldType::Enum(id) => VersionFieldValue::Enum(*id, {
let enum_value = value.as_str().ok_or_else(|| incorrect_type_error("enum"))?;
if let Some(ev) = enum_array.into_iter().find(|v| v.value == enum_value) {
let enum_value = value
.as_str()
.ok_or_else(|| incorrect_type_error("enum"))?;
if let Some(ev) =
enum_array.into_iter().find(|v| v.value == enum_value)
{
ev
} else {
return Err(format!(
@ -946,21 +1002,27 @@ impl VersionFieldValue {
));
}
}),
LoaderFieldType::ArrayEnum(id) => VersionFieldValue::ArrayEnum(*id, {
let array_values: Vec<String> = serde_json::from_value(value)
.map_err(|_| incorrect_type_error("array of enums"))?;
let mut enum_values = vec![];
for av in array_values {
if let Some(ev) = enum_array.iter().find(|v| v.value == av) {
enum_values.push(ev.clone());
} else {
return Err(format!(
LoaderFieldType::ArrayEnum(id) => {
VersionFieldValue::ArrayEnum(*id, {
let array_values: Vec<String> =
serde_json::from_value(value).map_err(|_| {
incorrect_type_error("array of enums")
})?;
let mut enum_values = vec![];
for av in array_values {
if let Some(ev) =
enum_array.iter().find(|v| v.value == av)
{
enum_values.push(ev.clone());
} else {
return Err(format!(
"Provided value '{av}' is not a valid variant for {field_name}"
));
}
}
}
enum_values
}),
enum_values
})
}
})
}
@ -1046,141 +1108,180 @@ impl VersionFieldValue {
)));
}
let mut value = match field_type {
// Singleton fields
// If there are multiple, we assume multiple versions are being concatenated
LoaderFieldType::Integer => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Integer(
qvf.int_value
.ok_or(did_not_exist_error(field_name, "int_value"))?,
),
))
})
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
LoaderFieldType::Text => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Text(
qvf.string_value
.ok_or(did_not_exist_error(field_name, "string_value"))?,
),
))
})
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
LoaderFieldType::Boolean => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Boolean(
qvf.int_value
.ok_or(did_not_exist_error(field_name, "int_value"))?
!= 0,
),
))
})
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
LoaderFieldType::Enum(id) => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Enum(*id, {
let enum_id = qvf
.enum_value
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev.metadata.clone().unwrap_or_default(),
}
}),
))
})
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
let mut value =
match field_type {
// Singleton fields
// If there are multiple, we assume multiple versions are being concatenated
LoaderFieldType::Integer => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Integer(qvf.int_value.ok_or(
did_not_exist_error(field_name, "int_value"),
)?),
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Text => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Text(qvf.string_value.ok_or(
did_not_exist_error(field_name, "string_value"),
)?),
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Boolean => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Boolean(
qvf.int_value.ok_or(did_not_exist_error(
field_name,
"int_value",
))? != 0,
),
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Enum(id) => qvfs
.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
VersionFieldValue::Enum(*id, {
let enum_id = qvf.enum_value.ok_or(
did_not_exist_error(
field_name,
"enum_value",
),
)?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(
field_name,
"enum_value",
))?;
LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev
.metadata
.clone()
.unwrap_or_default(),
}
}),
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
// Array fields
// We concatenate into one array
LoaderFieldType::ArrayInteger => vec![(
version_id,
VersionFieldValue::ArrayInteger(
qvfs.into_iter()
.map(|qvf| {
qvf.int_value
.ok_or(did_not_exist_error(field_name, "int_value"))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayText => vec![(
version_id,
VersionFieldValue::ArrayText(
qvfs.into_iter()
.map(|qvf| {
qvf.string_value
.ok_or(did_not_exist_error(field_name, "string_value"))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayBoolean => vec![(
version_id,
VersionFieldValue::ArrayBoolean(
qvfs.into_iter()
.map(|qvf| {
Ok::<bool, DatabaseError>(
qvf.int_value
.ok_or(did_not_exist_error(field_name, "int_value"))?
!= 0,
)
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayEnum(id) => vec![(
version_id,
VersionFieldValue::ArrayEnum(
*id,
qvfs.into_iter()
.map(|qvf| {
let enum_id = qvf
.enum_value
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
Ok::<_, DatabaseError>(LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev.metadata.clone().unwrap_or_default(),
// Array fields
// We concatenate into one array
LoaderFieldType::ArrayInteger => vec![(
version_id,
VersionFieldValue::ArrayInteger(
qvfs.into_iter()
.map(|qvf| {
qvf.int_value.ok_or(did_not_exist_error(
field_name,
"int_value",
))
})
})
.collect::<Result<_, _>>()?,
),
)],
};
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayText => vec![(
version_id,
VersionFieldValue::ArrayText(
qvfs.into_iter()
.map(|qvf| {
qvf.string_value.ok_or(did_not_exist_error(
field_name,
"string_value",
))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayBoolean => vec![(
version_id,
VersionFieldValue::ArrayBoolean(
qvfs.into_iter()
.map(|qvf| {
Ok::<bool, DatabaseError>(
qvf.int_value.ok_or(
did_not_exist_error(
field_name,
"int_value",
),
)? != 0,
)
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayEnum(id) => vec![(
version_id,
VersionFieldValue::ArrayEnum(
*id,
qvfs.into_iter()
.map(|qvf| {
let enum_id = qvf.enum_value.ok_or(
did_not_exist_error(
field_name,
"enum_value",
),
)?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(
field_name,
"enum_value",
))?;
Ok::<_, DatabaseError>(LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev
.metadata
.clone()
.unwrap_or_default(),
})
})
.collect::<Result<_, _>>()?,
),
)],
};
// Sort arrayenums by ordering, then by created
for (_, v) in value.iter_mut() {
if let VersionFieldValue::ArrayEnum(_, v) = v {
v.sort_by(|a, b| a.ordering.cmp(&b.ordering).then(a.created.cmp(&b.created)));
v.sort_by(|a, b| {
a.ordering.cmp(&b.ordering).then(a.created.cmp(&b.created))
});
}
}
@ -1190,7 +1291,9 @@ impl VersionFieldValue {
// Serialize to internal value, such as for converting to user-facing JSON
pub fn serialize_internal(&self) -> serde_json::Value {
match self {
VersionFieldValue::Integer(i) => serde_json::Value::Number((*i).into()),
VersionFieldValue::Integer(i) => {
serde_json::Value::Number((*i).into())
}
VersionFieldValue::Text(s) => serde_json::Value::String(s.clone()),
VersionFieldValue::Boolean(b) => serde_json::Value::Bool(*b),
VersionFieldValue::ArrayInteger(v) => serde_json::Value::Array(
@ -1203,10 +1306,12 @@ impl VersionFieldValue {
.map(|s| serde_json::Value::String(s.clone()))
.collect(),
),
VersionFieldValue::ArrayBoolean(v) => {
serde_json::Value::Array(v.iter().map(|b| serde_json::Value::Bool(*b)).collect())
VersionFieldValue::ArrayBoolean(v) => serde_json::Value::Array(
v.iter().map(|b| serde_json::Value::Bool(*b)).collect(),
),
VersionFieldValue::Enum(_, v) => {
serde_json::Value::String(v.value.clone())
}
VersionFieldValue::Enum(_, v) => serde_json::Value::String(v.value.clone()),
VersionFieldValue::ArrayEnum(_, v) => serde_json::Value::Array(
v.iter()
.map(|v| serde_json::Value::String(v.value.clone()))
@ -1222,11 +1327,17 @@ impl VersionFieldValue {
VersionFieldValue::Integer(i) => vec![i.to_string()],
VersionFieldValue::Text(s) => vec![s.clone()],
VersionFieldValue::Boolean(b) => vec![b.to_string()],
VersionFieldValue::ArrayInteger(v) => v.iter().map(|i| i.to_string()).collect(),
VersionFieldValue::ArrayInteger(v) => {
v.iter().map(|i| i.to_string()).collect()
}
VersionFieldValue::ArrayText(v) => v.clone(),
VersionFieldValue::ArrayBoolean(v) => v.iter().map(|b| b.to_string()).collect(),
VersionFieldValue::ArrayBoolean(v) => {
v.iter().map(|b| b.to_string()).collect()
}
VersionFieldValue::Enum(_, v) => vec![v.value.clone()],
VersionFieldValue::ArrayEnum(_, v) => v.iter().map(|v| v.value.clone()).collect(),
VersionFieldValue::ArrayEnum(_, v) => {
v.iter().map(|v| v.value.clone()).collect()
}
}
}

View File

@ -46,7 +46,8 @@ impl NotificationBuilder {
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let notification_ids =
generate_many_notification_ids(users.len(), &mut *transaction).await?;
generate_many_notification_ids(users.len(), &mut *transaction)
.await?;
let body = serde_json::value::to_value(&self.body)?;
let bodies = notification_ids
@ -97,7 +98,8 @@ impl Notification {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
let notification_ids_parsed: Vec<i64> =
notification_ids.iter().map(|x| x.0).collect();
sqlx::query!(
"
SELECT n.id, n.user_id, n.name, n.text, n.link, n.created, n.read, n.type notification_type, n.body,
@ -153,7 +155,10 @@ impl Notification {
let mut redis = redis.connect().await?;
let cached_notifications: Option<Vec<Notification>> = redis
.get_deserialized_from_json(USER_NOTIFICATIONS_NAMESPACE, &user_id.0.to_string())
.get_deserialized_from_json(
USER_NOTIFICATIONS_NAMESPACE,
&user_id.0.to_string(),
)
.await?;
if let Some(notifications) = cached_notifications {
@ -227,7 +232,8 @@ impl Notification {
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
let notification_ids_parsed: Vec<i64> =
notification_ids.iter().map(|x| x.0).collect();
let affected_users = sqlx::query!(
"
@ -243,7 +249,11 @@ impl Notification {
.try_collect::<Vec<_>>()
.await?;
Notification::clear_user_notifications_cache(affected_users.iter(), redis).await?;
Notification::clear_user_notifications_cache(
affected_users.iter(),
redis,
)
.await?;
Ok(Some(()))
}
@ -261,7 +271,8 @@ impl Notification {
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
let notification_ids_parsed: Vec<i64> =
notification_ids.iter().map(|x| x.0).collect();
sqlx::query!(
"
@ -286,7 +297,11 @@ impl Notification {
.try_collect::<Vec<_>>()
.await?;
Notification::clear_user_notifications_cache(affected_users.iter(), redis).await?;
Notification::clear_user_notifications_cache(
affected_users.iter(),
redis,
)
.await?;
Ok(Some(()))
}
@ -298,11 +313,9 @@ impl Notification {
let mut redis = redis.connect().await?;
redis
.delete_many(
user_ids
.into_iter()
.map(|id| (USER_NOTIFICATIONS_NAMESPACE, Some(id.0.to_string()))),
)
.delete_many(user_ids.into_iter().map(|id| {
(USER_NOTIFICATIONS_NAMESPACE, Some(id.0.to_string()))
}))
.await?;
Ok(())

View File

@ -91,10 +91,12 @@ impl OAuthClient {
) -> Result<Vec<OAuthClient>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
let results =
select_clients_with_predicate!("WHERE clients.id = ANY($1::bigint[])", ids_ref)
.fetch_all(exec)
.await?;
let results = select_clients_with_predicate!(
"WHERE clients.id = ANY($1::bigint[])",
ids_ref
)
.fetch_all(exec)
.await?;
Ok(results.into_iter().map(|r| r.into()).collect_vec())
}
@ -104,9 +106,12 @@ impl OAuthClient {
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<OAuthClient>, DatabaseError> {
let user_id_param = user_id.0;
let clients = select_clients_with_predicate!("WHERE created_by = $1", user_id_param)
.fetch_all(exec)
.await?;
let clients = select_clients_with_predicate!(
"WHERE created_by = $1",
user_id_param
)
.fetch_all(exec)
.await?;
Ok(clients.into_iter().map(|r| r.into()).collect())
}
@ -153,7 +158,8 @@ impl OAuthClient {
.execute(&mut **transaction)
.await?;
Self::insert_redirect_uris(&self.redirect_uris, &mut **transaction).await?;
Self::insert_redirect_uris(&self.redirect_uris, &mut **transaction)
.await?;
Ok(())
}
@ -231,7 +237,9 @@ impl OAuthClient {
impl From<ClientQueryResult> for OAuthClient {
fn from(r: ClientQueryResult) -> Self {
let redirects = if let (Some(ids), Some(uris)) = (r.uri_ids.as_ref(), r.uri_vals.as_ref()) {
let redirects = if let (Some(ids), Some(uris)) =
(r.uri_ids.as_ref(), r.uri_vals.as_ref())
{
ids.iter()
.zip(uris.iter())
.map(|(id, uri)| OAuthRedirectUri {

View File

@ -1,4 +1,7 @@
use super::{DatabaseError, OAuthAccessTokenId, OAuthClientAuthorizationId, OAuthClientId, UserId};
use super::{
DatabaseError, OAuthAccessTokenId, OAuthClientAuthorizationId,
OAuthClientId, UserId,
};
use crate::models::pats::Scopes;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};

View File

@ -1,4 +1,6 @@
use crate::{database::redis::RedisPool, models::ids::base62_impl::parse_base62};
use crate::{
database::redis::RedisPool, models::ids::base62_impl::parse_base62,
};
use dashmap::DashMap;
use futures::TryStreamExt;
use std::fmt::{Debug, Display};
@ -100,7 +102,11 @@ impl Organization {
Self::get_many(&ids, exec, redis).await
}
pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug>(
pub async fn get_many<
'a,
E,
T: Display + Hash + Eq + PartialEq + Clone + Debug,
>(
organization_strings: &[T],
exec: E,
redis: &RedisPool,

View File

@ -55,7 +55,11 @@ impl PersonalAccessToken {
Ok(())
}
pub async fn get<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug>(
pub async fn get<
'a,
E,
T: Display + Hash + Eq + PartialEq + Clone + Debug,
>(
id: T,
exec: E,
redis: &RedisPool,
@ -83,7 +87,11 @@ impl PersonalAccessToken {
PersonalAccessToken::get_many(&ids, exec, redis).await
}
pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug>(
pub async fn get_many<
'a,
E,
T: Display + Hash + Eq + PartialEq + Clone + Debug,
>(
pat_strings: &[T],
exec: E,
redis: &RedisPool,
@ -151,7 +159,10 @@ impl PersonalAccessToken {
let mut redis = redis.connect().await?;
let res = redis
.get_deserialized_from_json::<Vec<i64>>(PATS_USERS_NAMESPACE, &user_id.0.to_string())
.get_deserialized_from_json::<Vec<i64>>(
PATS_USERS_NAMESPACE,
&user_id.0.to_string(),
)
.await?;
if let Some(res) = res {
@ -194,13 +205,18 @@ impl PersonalAccessToken {
}
redis
.delete_many(clear_pats.into_iter().flat_map(|(id, token, user_id)| {
[
(PATS_NAMESPACE, id.map(|i| i.0.to_string())),
(PATS_TOKENS_NAMESPACE, token),
(PATS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())),
]
}))
.delete_many(clear_pats.into_iter().flat_map(
|(id, token, user_id)| {
[
(PATS_NAMESPACE, id.map(|i| i.0.to_string())),
(PATS_TOKENS_NAMESPACE, token),
(
PATS_USERS_NAMESPACE,
user_id.map(|i| i.0.to_string()),
),
]
},
))
.await?;
Ok(())

View File

@ -48,7 +48,10 @@ impl Payout {
Ok(())
}
pub async fn get<'a, 'b, E>(id: PayoutId, executor: E) -> Result<Option<Payout>, DatabaseError>
pub async fn get<'a, 'b, E>(
id: PayoutId,
executor: E,
) -> Result<Option<Payout>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{

View File

@ -1,4 +1,6 @@
use crate::database::models::{product_item, DatabaseError, ProductId, ProductPriceId};
use crate::database::models::{
product_item, DatabaseError, ProductId, ProductPriceId,
};
use crate::database::redis::RedisPool;
use crate::models::billing::{Price, ProductMetadata};
use dashmap::DashMap;
@ -61,9 +63,12 @@ impl ProductItem {
) -> Result<Vec<ProductItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
let results = select_products_with_predicate!("WHERE id = ANY($1::bigint[])", ids_ref)
.fetch_all(exec)
.await?;
let results = select_products_with_predicate!(
"WHERE id = ANY($1::bigint[])",
ids_ref
)
.fetch_all(exec)
.await?;
Ok(results
.into_iter()
@ -95,7 +100,10 @@ pub struct QueryProduct {
}
impl QueryProduct {
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<QueryProduct>, DatabaseError>
pub async fn list<'a, E>(
exec: E,
redis: &RedisPool,
) -> Result<Vec<QueryProduct>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -201,9 +209,12 @@ impl ProductPriceItem {
) -> Result<Vec<ProductPriceItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
let results = select_prices_with_predicate!("WHERE id = ANY($1::bigint[])", ids_ref)
.fetch_all(exec)
.await?;
let results = select_prices_with_predicate!(
"WHERE id = ANY($1::bigint[])",
ids_ref
)
.fetch_all(exec)
.await?;
Ok(results
.into_iter()
@ -228,20 +239,25 @@ impl ProductPriceItem {
let ids_ref: &[i64] = &ids;
use futures_util::TryStreamExt;
let prices = select_prices_with_predicate!("WHERE product_id = ANY($1::bigint[])", ids_ref)
.fetch(exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProductId, Vec<ProductPriceItem>>, x| {
if let Ok(item) = <ProductPriceResult as TryInto<ProductPriceItem>>::try_into(x)
{
acc.entry(item.product_id).or_default().push(item);
}
let prices = select_prices_with_predicate!(
"WHERE product_id = ANY($1::bigint[])",
ids_ref
)
.fetch(exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProductId, Vec<ProductPriceItem>>, x| {
if let Ok(item) = <ProductPriceResult as TryInto<
ProductPriceItem,
>>::try_into(x)
{
acc.entry(item.product_id).or_default().push(item);
}
async move { Ok(acc) }
},
)
.await?;
async move { Ok(acc) }
},
)
.await?;
Ok(prices)
}

View File

@ -1,5 +1,6 @@
use super::loader_fields::{
QueryLoaderField, QueryLoaderFieldEnumValue, QueryVersionField, VersionField,
QueryLoaderField, QueryLoaderFieldEnumValue, QueryVersionField,
VersionField,
};
use super::{ids::*, User};
use crate::database::models;
@ -72,15 +73,15 @@ impl GalleryItem {
project_id: ProjectId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
let (project_ids, image_urls, raw_image_urls, featureds, names, descriptions, orderings): (
Vec<_>,
Vec<_>,
Vec<_>,
Vec<_>,
Vec<_>,
Vec<_>,
Vec<_>,
) = items
let (
project_ids,
image_urls,
raw_image_urls,
featureds,
names,
descriptions,
orderings,
): (Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>) = items
.into_iter()
.map(|gi| {
(
@ -128,7 +129,11 @@ impl ModCategory {
items: Vec<Self>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let (project_ids, category_ids, is_additionals): (Vec<_>, Vec<_>, Vec<_>) = items
let (project_ids, category_ids, is_additionals): (
Vec<_>,
Vec<_>,
Vec<_>,
) = items
.into_iter()
.map(|mc| (mc.project_id.0, mc.category_id.0, mc.is_additional))
.multiunzip();
@ -223,9 +228,19 @@ impl ProjectBuilder {
version.insert(&mut *transaction).await?;
}
LinkUrl::insert_many_projects(link_urls, self.project_id, &mut *transaction).await?;
LinkUrl::insert_many_projects(
link_urls,
self.project_id,
&mut *transaction,
)
.await?;
GalleryItem::insert_many(gallery_items, self.project_id, &mut *transaction).await?;
GalleryItem::insert_many(
gallery_items,
self.project_id,
&mut *transaction,
)
.await?;
let project_id = self.project_id;
let mod_categories = categories
@ -323,7 +338,8 @@ impl Project {
let project = Self::get_id(id, &mut **transaction, redis).await?;
if let Some(project) = project {
Project::clear_cache(id, project.inner.slug, Some(true), redis).await?;
Project::clear_cache(id, project.inner.slug, Some(true), redis)
.await?;
sqlx::query!(
"
@ -389,7 +405,8 @@ impl Project {
.await?;
for version in project.versions {
super::Version::remove_full(version, redis, transaction).await?;
super::Version::remove_full(version, redis, transaction)
.await?;
}
sqlx::query!(
@ -422,7 +439,8 @@ impl Project {
.execute(&mut **transaction)
.await?;
models::TeamMember::clear_cache(project.inner.team_id, redis).await?;
models::TeamMember::clear_cache(project.inner.team_id, redis)
.await?;
let affected_user_ids = sqlx::query!(
"
@ -476,9 +494,13 @@ impl Project {
where
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
{
Project::get_many(&[crate::models::ids::ProjectId::from(id)], executor, redis)
.await
.map(|x| x.into_iter().next())
Project::get_many(
&[crate::models::ids::ProjectId::from(id)],
executor,
redis,
)
.await
.map(|x| x.into_iter().next())
}
pub async fn get_many_ids<'a, E>(
@ -496,7 +518,11 @@ impl Project {
Project::get_many(&ids, exec, redis).await
}
pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug>(
pub async fn get_many<
'a,
E,
T: Display + Hash + Eq + PartialEq + Clone + Debug,
>(
project_strings: &[T],
exec: E,
redis: &RedisPool,
@ -837,11 +863,15 @@ impl Project {
id: ProjectId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>, DatabaseError>
) -> Result<
Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>,
DatabaseError,
>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
type Dependencies = Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>;
type Dependencies =
Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>;
let mut redis = redis.connect().await?;
@ -881,7 +911,12 @@ impl Project {
.await?;
redis
.set_serialized_to_json(PROJECTS_DEPENDENCIES_NAMESPACE, id.0, &dependencies, None)
.set_serialized_to_json(
PROJECTS_DEPENDENCIES_NAMESPACE,
id.0,
&dependencies,
None,
)
.await?;
Ok(dependencies)
}

View File

@ -56,7 +56,10 @@ impl Report {
Ok(())
}
pub async fn get<'a, E>(id: ReportId, exec: E) -> Result<Option<QueryReport>, sqlx::Error>
pub async fn get<'a, E>(
id: ReportId,
exec: E,
) -> Result<Option<QueryReport>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -74,7 +77,8 @@ impl Report {
{
use futures::stream::TryStreamExt;
let report_ids_parsed: Vec<i64> = report_ids.iter().map(|x| x.0).collect();
let report_ids_parsed: Vec<i64> =
report_ids.iter().map(|x| x.0).collect();
let reports = sqlx::query!(
"
SELECT r.id, rt.name, r.mod_id, r.version_id, r.user_id, r.body, r.reporter, r.created, t.id thread_id, r.closed
@ -133,8 +137,11 @@ impl Report {
.await?;
if let Some(thread_id) = thread_id {
crate::database::models::Thread::remove_full(ThreadId(thread_id.id), transaction)
.await?;
crate::database::models::Thread::remove_full(
ThreadId(thread_id.id),
transaction,
)
.await?;
}
sqlx::query!(

View File

@ -82,7 +82,11 @@ pub struct Session {
}
impl Session {
pub async fn get<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug>(
pub async fn get<
'a,
E,
T: Display + Hash + Eq + PartialEq + Clone + Debug,
>(
id: T,
exec: E,
redis: &RedisPool,
@ -103,9 +107,13 @@ impl Session {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
Session::get_many(&[crate::models::ids::SessionId::from(id)], executor, redis)
.await
.map(|x| x.into_iter().next())
Session::get_many(
&[crate::models::ids::SessionId::from(id)],
executor,
redis,
)
.await
.map(|x| x.into_iter().next())
}
pub async fn get_many_ids<'a, E>(
@ -123,7 +131,11 @@ impl Session {
Session::get_many(&ids, exec, redis).await
}
pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug>(
pub async fn get_many<
'a,
E,
T: Display + Hash + Eq + PartialEq + Clone + Debug,
>(
session_strings: &[T],
exec: E,
redis: &RedisPool,
@ -226,14 +238,23 @@ impl Session {
.await?;
redis
.set_serialized_to_json(SESSIONS_USERS_NAMESPACE, user_id.0, &db_sessions, None)
.set_serialized_to_json(
SESSIONS_USERS_NAMESPACE,
user_id.0,
&db_sessions,
None,
)
.await?;
Ok(db_sessions)
}
pub async fn clear_cache(
clear_sessions: Vec<(Option<SessionId>, Option<String>, Option<UserId>)>,
clear_sessions: Vec<(
Option<SessionId>,
Option<String>,
Option<UserId>,
)>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
@ -243,17 +264,18 @@ impl Session {
}
redis
.delete_many(
clear_sessions
.into_iter()
.flat_map(|(id, session, user_id)| {
[
(SESSIONS_NAMESPACE, id.map(|i| i.0.to_string())),
(SESSIONS_IDS_NAMESPACE, session),
(SESSIONS_USERS_NAMESPACE, user_id.map(|i| i.0.to_string())),
]
}),
)
.delete_many(clear_sessions.into_iter().flat_map(
|(id, session, user_id)| {
[
(SESSIONS_NAMESPACE, id.map(|i| i.0.to_string())),
(SESSIONS_IDS_NAMESPACE, session),
(
SESSIONS_USERS_NAMESPACE,
user_id.map(|i| i.0.to_string()),
),
]
},
))
.await?;
Ok(())
}

View File

@ -149,10 +149,12 @@ impl Team {
// Only one of project_id or organization_id will be set
let mut team_association_id = None;
if let Some(pid) = t.pid {
team_association_id = Some(TeamAssociationId::Project(ProjectId(pid)));
team_association_id =
Some(TeamAssociationId::Project(ProjectId(pid)));
}
if let Some(oid) = t.oid {
team_association_id = Some(TeamAssociationId::Organization(OrganizationId(oid)));
team_association_id =
Some(TeamAssociationId::Organization(OrganizationId(oid)));
}
return Ok(team_association_id);
}
@ -257,7 +259,10 @@ impl TeamMember {
Ok(val.into_iter().flatten().collect())
}
pub async fn clear_cache(id: TeamId, redis: &RedisPool) -> Result<(), super::DatabaseError> {
pub async fn clear_cache(
id: TeamId,
redis: &RedisPool,
) -> Result<(), super::DatabaseError> {
let mut redis = redis.connect().await?;
redis.delete(TEAMS_NAMESPACE, id.0).await?;
Ok(())
@ -354,11 +359,14 @@ impl TeamMember {
user_id,
role: m.role,
is_owner: m.is_owner,
permissions: ProjectPermissions::from_bits(m.permissions as u64)
.unwrap_or_default(),
organization_permissions: m
.organization_permissions
.map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()),
permissions: ProjectPermissions::from_bits(
m.permissions as u64,
)
.unwrap_or_default(),
organization_permissions: m.organization_permissions.map(|p| {
OrganizationPermissions::from_bits(p as u64)
.unwrap_or_default()
}),
accepted: m.accepted,
payouts_split: m.payouts_split,
ordering: m.ordering,
@ -574,11 +582,14 @@ impl TeamMember {
user_id,
role: m.role,
is_owner: m.is_owner,
permissions: ProjectPermissions::from_bits(m.permissions as u64)
.unwrap_or_default(),
organization_permissions: m
.organization_permissions
.map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()),
permissions: ProjectPermissions::from_bits(
m.permissions as u64,
)
.unwrap_or_default(),
organization_permissions: m.organization_permissions.map(|p| {
OrganizationPermissions::from_bits(p as u64)
.unwrap_or_default()
}),
accepted: m.accepted,
payouts_split: m.payouts_split,
ordering: m.ordering,
@ -623,11 +634,14 @@ impl TeamMember {
user_id,
role: m.role,
is_owner: m.is_owner,
permissions: ProjectPermissions::from_bits(m.permissions as u64)
.unwrap_or_default(),
organization_permissions: m
.organization_permissions
.map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()),
permissions: ProjectPermissions::from_bits(
m.permissions as u64,
)
.unwrap_or_default(),
organization_permissions: m.organization_permissions.map(|p| {
OrganizationPermissions::from_bits(p as u64)
.unwrap_or_default()
}),
accepted: m.accepted,
payouts_split: m.payouts_split,
ordering: m.ordering,
@ -666,11 +680,14 @@ impl TeamMember {
user_id,
role: m.role,
is_owner: m.is_owner,
permissions: ProjectPermissions::from_bits(m.permissions as u64)
.unwrap_or_default(),
organization_permissions: m
.organization_permissions
.map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()),
permissions: ProjectPermissions::from_bits(
m.permissions as u64,
)
.unwrap_or_default(),
organization_permissions: m.organization_permissions.map(|p| {
OrganizationPermissions::from_bits(p as u64)
.unwrap_or_default()
}),
accepted: m.accepted,
payouts_split: m.payouts_split,
ordering: m.ordering,
@ -695,10 +712,15 @@ impl TeamMember {
Self::get_from_user_id(project.team_id, user_id, executor).await?;
let organization =
Organization::get_associated_organization_project_id(project.id, executor).await?;
Organization::get_associated_organization_project_id(
project.id, executor,
)
.await?;
let organization_team_member = if let Some(organization) = &organization {
Self::get_from_user_id(organization.team_id, user_id, executor).await?
let organization_team_member = if let Some(organization) = &organization
{
Self::get_from_user_id(organization.team_id, user_id, executor)
.await?
} else {
None
};

View File

@ -112,7 +112,10 @@ impl ThreadBuilder {
}
impl Thread {
pub async fn get<'a, E>(id: ThreadId, exec: E) -> Result<Option<Thread>, sqlx::Error>
pub async fn get<'a, E>(
id: ThreadId,
exec: E,
) -> Result<Option<Thread>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -130,7 +133,8 @@ impl Thread {
{
use futures::stream::TryStreamExt;
let thread_ids_parsed: Vec<i64> = thread_ids.iter().map(|x| x.0).collect();
let thread_ids_parsed: Vec<i64> =
thread_ids.iter().map(|x| x.0).collect();
let threads = sqlx::query!(
"
SELECT t.id, t.thread_type, t.mod_id, t.report_id,
@ -225,7 +229,8 @@ impl ThreadMessage {
{
use futures::stream::TryStreamExt;
let message_ids_parsed: Vec<i64> = message_ids.iter().map(|x| x.0).collect();
let message_ids_parsed: Vec<i64> =
message_ids.iter().map(|x| x.0).collect();
let messages = sqlx::query!(
"
SELECT tm.id, tm.author_id, tm.thread_id, tm.body, tm.created, tm.hide_identity
@ -261,7 +266,8 @@ impl ThreadMessage {
WHERE id = $1
",
id as ThreadMessageId,
serde_json::to_value(MessageBody::Deleted { private }).unwrap_or(serde_json::json!({}))
serde_json::to_value(MessageBody::Deleted { private })
.unwrap_or(serde_json::json!({}))
)
.execute(&mut **transaction)
.await?;

View File

@ -135,7 +135,11 @@ impl User {
User::get_many(&ids, exec, redis).await
}
pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug>(
pub async fn get_many<
'a,
E,
T: Display + Hash + Eq + PartialEq + Clone + Debug,
>(
users_strings: &[T],
exec: E,
redis: &RedisPool,
@ -213,7 +217,10 @@ impl User {
Ok(val)
}
pub async fn get_email<'a, E>(email: &str, exec: E) -> Result<Option<UserId>, sqlx::Error>
pub async fn get_email<'a, E>(
email: &str,
exec: E,
) -> Result<Option<UserId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -268,7 +275,12 @@ impl User {
.await?;
redis
.set_serialized_to_json(USERS_PROJECTS_NAMESPACE, user_id.0, &db_projects, None)
.set_serialized_to_json(
USERS_PROJECTS_NAMESPACE,
user_id.0,
&db_projects,
None,
)
.await?;
Ok(db_projects)
@ -323,7 +335,10 @@ impl User {
Ok(projects)
}
pub async fn get_follows<'a, E>(user_id: UserId, exec: E) -> Result<Vec<ProjectId>, sqlx::Error>
pub async fn get_follows<'a, E>(
user_id: UserId,
exec: E,
) -> Result<Vec<ProjectId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -344,7 +359,10 @@ impl User {
Ok(projects)
}
pub async fn get_reports<'a, E>(user_id: UserId, exec: E) -> Result<Vec<ReportId>, sqlx::Error>
pub async fn get_reports<'a, E>(
user_id: UserId,
exec: E,
) -> Result<Vec<ReportId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -417,9 +435,9 @@ impl User {
redis
.delete_many(
user_ids
.iter()
.map(|id| (USERS_PROJECTS_NAMESPACE, Some(id.0.to_string()))),
user_ids.iter().map(|id| {
(USERS_PROJECTS_NAMESPACE, Some(id.0.to_string()))
}),
)
.await?;
@ -434,9 +452,11 @@ impl User {
let user = Self::get_id(id, &mut **transaction, redis).await?;
if let Some(delete_user) = user {
User::clear_caches(&[(id, Some(delete_user.username))], redis).await?;
User::clear_caches(&[(id, Some(delete_user.username))], redis)
.await?;
let deleted_user: UserId = crate::models::users::DELETED_USER.into();
let deleted_user: UserId =
crate::models::users::DELETED_USER.into();
sqlx::query!(
"
@ -509,7 +529,8 @@ impl User {
.await?;
for collection_id in user_collections {
models::Collection::remove(collection_id, transaction, redis).await?;
models::Collection::remove(collection_id, transaction, redis)
.await?;
}
let report_threads = sqlx::query!(

View File

@ -1,5 +1,9 @@
use crate::database::models::{DatabaseError, ProductPriceId, UserId, UserSubscriptionId};
use crate::models::billing::{PriceDuration, SubscriptionMetadata, SubscriptionStatus};
use crate::database::models::{
DatabaseError, ProductPriceId, UserId, UserSubscriptionId,
};
use crate::models::billing::{
PriceDuration, SubscriptionMetadata, SubscriptionStatus,
};
use chrono::{DateTime, Utc};
use itertools::Itertools;
use std::convert::{TryFrom, TryInto};
@ -69,10 +73,12 @@ impl UserSubscriptionItem {
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
let results =
select_user_subscriptions_with_predicate!("WHERE us.id = ANY($1::bigint[])", ids_ref)
.fetch_all(exec)
.await?;
let results = select_user_subscriptions_with_predicate!(
"WHERE us.id = ANY($1::bigint[])",
ids_ref
)
.fetch_all(exec)
.await?;
Ok(results
.into_iter()
@ -85,9 +91,12 @@ impl UserSubscriptionItem {
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let user_id = user_id.0;
let results = select_user_subscriptions_with_predicate!("WHERE us.user_id = $1", user_id)
.fetch_all(exec)
.await?;
let results = select_user_subscriptions_with_predicate!(
"WHERE us.user_id = $1",
user_id
)
.fetch_all(exec)
.await?;
Ok(results
.into_iter()

View File

@ -220,7 +220,12 @@ impl VersionBuilder {
file.insert(version_id, transaction).await?;
}
DependencyBuilder::insert_many(dependencies, self.version_id, transaction).await?;
DependencyBuilder::insert_many(
dependencies,
self.version_id,
transaction,
)
.await?;
let loader_versions = loaders
.iter()
@ -898,13 +903,20 @@ impl Version {
redis
.delete_many(
iter::once((VERSIONS_NAMESPACE, Some(version.inner.id.0.to_string()))).chain(
version.files.iter().flat_map(|file| {
iter::once((
VERSIONS_NAMESPACE,
Some(version.inner.id.0.to_string()),
))
.chain(version.files.iter().flat_map(
|file| {
file.hashes.iter().map(|(algo, hash)| {
(VERSION_FILES_NAMESPACE, Some(format!("{}_{}", algo, hash)))
(
VERSION_FILES_NAMESPACE,
Some(format!("{}_{}", algo, hash)),
)
})
}),
),
},
)),
)
.await?;
Ok(())
@ -1016,7 +1028,11 @@ mod tests {
Utc::now().checked_sub_months(Months::new(months)).unwrap()
}
fn get_version(id: i64, ordering: Option<i32>, date_published: DateTime<Utc>) -> Version {
fn get_version(
id: i64,
ordering: Option<i32>,
date_published: DateTime<Utc>,
) -> Version {
Version {
id: VersionId(id),
ordering,

View File

@ -6,7 +6,8 @@ use std::time::Duration;
pub async fn connect() -> Result<PgPool, sqlx::Error> {
info!("Initializing database connection");
let database_url = dotenvy::var("DATABASE_URL").expect("`DATABASE_URL` not in .env");
let database_url =
dotenvy::var("DATABASE_URL").expect("`DATABASE_URL` not in .env");
let pool = PgPoolOptions::new()
.min_connections(
dotenvy::var("DATABASE_MIN_CONNECTIONS")

View File

@ -32,18 +32,20 @@ impl RedisPool {
// testing pool uses a hashmap to mimic redis behaviour for very small data sizes (ie: tests)
// PANICS: production pool will panic if redis url is not set
pub fn new(meta_namespace: Option<String>) -> Self {
let redis_pool = Config::from_url(dotenvy::var("REDIS_URL").expect("Redis URL not set"))
.builder()
.expect("Error building Redis pool")
.max_size(
dotenvy::var("DATABASE_MAX_CONNECTIONS")
.ok()
.and_then(|x| x.parse().ok())
.unwrap_or(10000),
)
.runtime(Runtime::Tokio1)
.build()
.expect("Redis connection failed");
let redis_pool = Config::from_url(
dotenvy::var("REDIS_URL").expect("Redis URL not set"),
)
.builder()
.expect("Error building Redis pool")
.max_size(
dotenvy::var("DATABASE_MAX_CONNECTIONS")
.ok()
.and_then(|x| x.parse().ok())
.unwrap_or(10000),
)
.runtime(Runtime::Tokio1)
.build()
.expect("Redis connection failed");
RedisPool {
pool: redis_pool,
@ -68,7 +70,14 @@ impl RedisPool {
F: FnOnce(Vec<K>) -> Fut,
Fut: Future<Output = Result<DashMap<K, T>, DatabaseError>>,
T: Serialize + DeserializeOwned,
K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize + Debug,
K: Display
+ Hash
+ Eq
+ PartialEq
+ Clone
+ DeserializeOwned
+ Serialize
+ Debug,
{
Ok(self
.get_cached_keys_raw(namespace, keys, closure)
@ -88,15 +97,28 @@ impl RedisPool {
F: FnOnce(Vec<K>) -> Fut,
Fut: Future<Output = Result<DashMap<K, T>, DatabaseError>>,
T: Serialize + DeserializeOwned,
K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize + Debug,
K: Display
+ Hash
+ Eq
+ PartialEq
+ Clone
+ DeserializeOwned
+ Serialize
+ Debug,
{
self.get_cached_keys_raw_with_slug(namespace, None, false, keys, |ids| async move {
Ok(closure(ids)
.await?
.into_iter()
.map(|(key, val)| (key, (None::<String>, val)))
.collect())
})
self.get_cached_keys_raw_with_slug(
namespace,
None,
false,
keys,
|ids| async move {
Ok(closure(ids)
.await?
.into_iter()
.map(|(key, val)| (key, (None::<String>, val)))
.collect())
},
)
.await
}
@ -113,7 +135,13 @@ impl RedisPool {
Fut: Future<Output = Result<DashMap<K, (Option<S>, T)>, DatabaseError>>,
T: Serialize + DeserializeOwned,
I: Display + Hash + Eq + PartialEq + Clone + Debug,
K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize,
K: Display
+ Hash
+ Eq
+ PartialEq
+ Clone
+ DeserializeOwned
+ Serialize,
S: Display + Clone + DeserializeOwned + Serialize + Debug,
{
Ok(self
@ -143,7 +171,13 @@ impl RedisPool {
Fut: Future<Output = Result<DashMap<K, (Option<S>, T)>, DatabaseError>>,
T: Serialize + DeserializeOwned,
I: Display + Hash + Eq + PartialEq + Clone + Debug,
K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize,
K: Display
+ Hash
+ Eq
+ PartialEq
+ Clone
+ DeserializeOwned
+ Serialize,
S: Display + Clone + DeserializeOwned + Serialize + Debug,
{
let connection = self.connect().await?.connection;
@ -158,7 +192,8 @@ impl RedisPool {
}
let get_cached_values =
|ids: DashMap<String, I>, mut connection: deadpool_redis::Connection| async move {
|ids: DashMap<String, I>,
mut connection: deadpool_redis::Connection| async move {
let slug_ids = if let Some(slug_namespace) = slug_namespace {
cmd("MGET")
.arg(
@ -176,7 +211,7 @@ impl RedisPool {
})
.collect::<Vec<_>>(),
)
.query_async::<_, Vec<Option<String>>>(&mut connection)
.query_async::<Vec<Option<String>>>(&mut connection)
.await?
.into_iter()
.flatten()
@ -195,15 +230,23 @@ impl RedisPool {
.map(|x| x.to_string())
}))
.chain(slug_ids)
.map(|x| format!("{}_{namespace}:{x}", self.meta_namespace))
.map(|x| {
format!(
"{}_{namespace}:{x}",
self.meta_namespace
)
})
.collect::<Vec<_>>(),
)
.query_async::<_, Vec<Option<String>>>(&mut connection)
.query_async::<Vec<Option<String>>>(&mut connection)
.await?
.into_iter()
.filter_map(|x| {
x.and_then(|val| serde_json::from_str::<RedisValue<T, K, S>>(&val).ok())
.map(|val| (val.key.clone(), val))
x.and_then(|val| {
serde_json::from_str::<RedisValue<T, K, S>>(&val)
.ok()
})
.map(|val| (val.key.clone(), val))
})
.collect::<HashMap<_, _>>();
@ -213,11 +256,14 @@ impl RedisPool {
let current_time = Utc::now();
let mut expired_values = HashMap::new();
let (cached_values_raw, mut connection, ids) = get_cached_values(ids, connection).await?;
let (cached_values_raw, mut connection, ids) =
get_cached_values(ids, connection).await?;
let mut cached_values = cached_values_raw
.into_iter()
.filter_map(|(key, val)| {
if Utc.timestamp_opt(val.iat + ACTUAL_EXPIRY, 0).unwrap() < current_time {
if Utc.timestamp_opt(val.iat + ACTUAL_EXPIRY, 0).unwrap()
< current_time
{
expired_values.insert(val.key.to_string(), val);
None
@ -244,7 +290,8 @@ impl RedisPool {
if !ids.is_empty() {
let mut pipe = redis::pipe();
let fetch_ids = ids.iter().map(|x| x.key().clone()).collect::<Vec<_>>();
let fetch_ids =
ids.iter().map(|x| x.key().clone()).collect::<Vec<_>>();
fetch_ids.iter().for_each(|key| {
pipe.atomic().set_options(
@ -257,7 +304,7 @@ impl RedisPool {
);
});
let results = pipe
.query_async::<_, Vec<Option<i32>>>(&mut connection)
.query_async::<Vec<Option<i32>>>(&mut connection)
.await?;
for (idx, key) in fetch_ids.into_iter().enumerate() {
@ -288,12 +335,22 @@ impl RedisPool {
#[allow(clippy::type_complexity)]
let mut fetch_tasks: Vec<
Pin<Box<dyn Future<Output = Result<HashMap<K, RedisValue<T, K, S>>, DatabaseError>>>>,
Pin<
Box<
dyn Future<
Output = Result<
HashMap<K, RedisValue<T, K, S>>,
DatabaseError,
>,
>,
>,
>,
> = Vec::new();
if !ids.is_empty() {
fetch_tasks.push(Box::pin(async {
let fetch_ids = ids.iter().map(|x| x.value().clone()).collect::<Vec<_>>();
let fetch_ids =
ids.iter().map(|x| x.value().clone()).collect::<Vec<_>>();
let vals = closure(fetch_ids).await?;
let mut return_values = HashMap::new();
@ -309,7 +366,10 @@ impl RedisPool {
};
pipe.atomic().set_ex(
format!("{}_{namespace}:{key}", self.meta_namespace),
format!(
"{}_{namespace}:{key}",
self.meta_namespace
),
serde_json::to_string(&value)?,
DEFAULT_EXPIRY as u64,
);
@ -347,23 +407,29 @@ impl RedisPool {
let base62 = to_base62(value);
ids.remove(&base62);
pipe.atomic()
.del(format!("{}_{namespace}:{base62}/lock", self.meta_namespace));
pipe.atomic().del(format!(
"{}_{namespace}:{base62}/lock",
self.meta_namespace
));
}
pipe.atomic()
.del(format!("{}_{namespace}:{key}/lock", self.meta_namespace));
pipe.atomic().del(format!(
"{}_{namespace}:{key}/lock",
self.meta_namespace
));
return_values.insert(key, value);
}
}
for (key, _) in ids {
pipe.atomic()
.del(format!("{}_{namespace}:{key}/lock", self.meta_namespace));
pipe.atomic().del(format!(
"{}_{namespace}:{key}/lock",
self.meta_namespace
));
}
pipe.query_async(&mut connection).await?;
pipe.query_async::<()>(&mut connection).await?;
Ok(return_values)
}));
@ -373,7 +439,8 @@ impl RedisPool {
fetch_tasks.push(Box::pin(async {
let mut connection = self.pool.get().await?;
let mut interval = tokio::time::interval(Duration::from_millis(100));
let mut interval =
tokio::time::interval(Duration::from_millis(100));
let start = Utc::now();
loop {
let results = cmd("MGET")
@ -381,11 +448,15 @@ impl RedisPool {
subscribe_ids
.iter()
.map(|x| {
format!("{}_{namespace}:{}/lock", self.meta_namespace, x.key())
format!(
"{}_{namespace}:{}/lock",
self.meta_namespace,
x.key()
)
})
.collect::<Vec<_>>(),
)
.query_async::<_, Vec<Option<String>>>(&mut connection)
.query_async::<Vec<Option<String>>>(&mut connection)
.await?;
if results.into_iter().all(|x| x.is_none()) {
@ -399,7 +470,8 @@ impl RedisPool {
interval.tick().await;
}
let (return_values, _, _) = get_cached_values(subscribe_ids, connection).await?;
let (return_values, _, _) =
get_cached_values(subscribe_ids, connection).await?;
Ok(return_values)
}));
@ -436,7 +508,7 @@ impl RedisConnection {
]
.as_slice(),
);
redis_execute(&mut cmd, &mut self.connection).await?;
redis_execute::<()>(&mut cmd, &mut self.connection).await?;
Ok(())
}
@ -468,7 +540,8 @@ impl RedisConnection {
let mut cmd = cmd("GET");
redis_args(
&mut cmd,
vec![format!("{}_{}:{}", self.meta_namespace, namespace, id)].as_slice(),
vec![format!("{}_{}:{}", self.meta_namespace, namespace, id)]
.as_slice(),
);
let res = redis_execute(&mut cmd, &mut self.connection).await?;
Ok(res)
@ -488,16 +561,21 @@ impl RedisConnection {
.and_then(|x| serde_json::from_str(&x).ok()))
}
pub async fn delete<T1>(&mut self, namespace: &str, id: T1) -> Result<(), DatabaseError>
pub async fn delete<T1>(
&mut self,
namespace: &str,
id: T1,
) -> Result<(), DatabaseError>
where
T1: Display,
{
let mut cmd = cmd("DEL");
redis_args(
&mut cmd,
vec![format!("{}_{}:{}", self.meta_namespace, namespace, id)].as_slice(),
vec![format!("{}_{}:{}", self.meta_namespace, namespace, id)]
.as_slice(),
);
redis_execute(&mut cmd, &mut self.connection).await?;
redis_execute::<()>(&mut cmd, &mut self.connection).await?;
Ok(())
}
@ -511,14 +589,15 @@ impl RedisConnection {
if let Some(id) = id {
redis_args(
&mut cmd,
[format!("{}_{}:{}", self.meta_namespace, namespace, id)].as_slice(),
[format!("{}_{}:{}", self.meta_namespace, namespace, id)]
.as_slice(),
);
any = true;
}
}
if any {
redis_execute(&mut cmd, &mut self.connection).await?;
redis_execute::<()>(&mut cmd, &mut self.connection).await?;
}
Ok(())
@ -547,6 +626,6 @@ pub async fn redis_execute<T>(
where
T: redis::FromRedisValue,
{
let res = cmd.query_async::<_, T>(redis).await?;
let res = cmd.query_async::<T>(redis).await?;
Ok(res)
}

View File

@ -16,10 +16,12 @@ pub struct BackblazeHost {
impl BackblazeHost {
pub async fn new(key_id: &str, key: &str, bucket_id: &str) -> Self {
let authorization_data = authorization::authorize_account(key_id, key).await.unwrap();
let upload_url_data = authorization::get_upload_url(&authorization_data, bucket_id)
.await
.unwrap();
let authorization_data =
authorization::authorize_account(key_id, key).await.unwrap();
let upload_url_data =
authorization::get_upload_url(&authorization_data, bucket_id)
.await
.unwrap();
BackblazeHost {
upload_url_data,
@ -38,8 +40,13 @@ impl FileHost for BackblazeHost {
) -> Result<UploadFileData, FileHostingError> {
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
let upload_data =
upload::upload_file(&self.upload_url_data, content_type, file_name, file_bytes).await?;
let upload_data = upload::upload_file(
&self.upload_url_data,
content_type,
file_name,
file_bytes,
)
.await?;
Ok(UploadFileData {
file_id: upload_data.file_id,
file_name: upload_data.file_name,
@ -74,8 +81,12 @@ impl FileHost for BackblazeHost {
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
let delete_data =
delete::delete_file_version(&self.authorization_data, file_id, file_name).await?;
let delete_data = delete::delete_file_version(
&self.authorization_data,
file_id,
file_name,
)
.await?;
Ok(DeleteFileData {
file_id: delete_data.file_id,
file_name: delete_data.file_name,
@ -83,7 +94,9 @@ impl FileHost for BackblazeHost {
}
}
pub async fn process_response<T>(response: Response) -> Result<T, FileHostingError>
pub async fn process_response<T>(
response: Response,
) -> Result<T, FileHostingError>
where
T: for<'de> Deserialize<'de>,
{

View File

@ -56,7 +56,13 @@ pub async fn get_upload_url(
bucket_id: &str,
) -> Result<UploadUrlData, FileHostingError> {
let response = reqwest::Client::new()
.post(format!("{}/b2api/v2/b2_get_upload_url", authorization_data.api_url).to_string())
.post(
format!(
"{}/b2api/v2/b2_get_upload_url",
authorization_data.api_url
)
.to_string(),
)
.header(reqwest::header::CONTENT_TYPE, "application/json")
.header(
reqwest::header::AUTHORIZATION,

View File

@ -21,9 +21,12 @@ impl FileHost for MockHost {
file_name: &str,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let path = std::path::Path::new(&dotenvy::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::create_dir_all(path.parent().ok_or(FileHostingError::InvalidFilename)?)?;
let path =
std::path::Path::new(&dotenvy::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::create_dir_all(
path.parent().ok_or(FileHostingError::InvalidFilename)?,
)?;
let content_sha1 = sha1::Sha1::from(&file_bytes).hexdigest();
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
@ -45,8 +48,9 @@ impl FileHost for MockHost {
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
let path = std::path::Path::new(&dotenvy::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
let path =
std::path::Path::new(&dotenvy::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
if path.exists() {
std::fs::remove_file(path)?;
}

View File

@ -1,4 +1,6 @@
use crate::file_hosting::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use crate::file_hosting::{
DeleteFileData, FileHost, FileHostingError, UploadFileData,
};
use async_trait::async_trait;
use bytes::Bytes;
use chrono::Utc;
@ -31,12 +33,23 @@ impl S3Host {
endpoint: url.to_string(),
}
},
Credentials::new(Some(access_token), Some(secret), None, None, None).map_err(|_| {
FileHostingError::S3Error("Error while creating credentials".to_string())
Credentials::new(
Some(access_token),
Some(secret),
None,
None,
None,
)
.map_err(|_| {
FileHostingError::S3Error(
"Error while creating credentials".to_string(),
)
})?,
)
.map_err(|_| {
FileHostingError::S3Error("Error while creating Bucket instance".to_string())
FileHostingError::S3Error(
"Error while creating Bucket instance".to_string(),
)
})?;
Ok(S3Host { bucket })
@ -55,10 +68,16 @@ impl FileHost for S3Host {
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
self.bucket
.put_object_with_content_type(format!("/{file_name}"), &file_bytes, content_type)
.put_object_with_content_type(
format!("/{file_name}"),
&file_bytes,
content_type,
)
.await
.map_err(|_| {
FileHostingError::S3Error("Error while uploading file to S3".to_string())
FileHostingError::S3Error(
"Error while uploading file to S3".to_string(),
)
})?;
Ok(UploadFileData {
@ -82,7 +101,9 @@ impl FileHost for S3Host {
.delete_object(format!("/{file_name}"))
.await
.map_err(|_| {
FileHostingError::S3Error("Error while deleting file from S3".to_string())
FileHostingError::S3Error(
"Error while deleting file from S3".to_string(),
)
})?;
Ok(DeleteFileData {

View File

@ -6,7 +6,8 @@ use actix_web::web;
use database::redis::RedisPool;
use log::{info, warn};
use queue::{
analytics::AnalyticsQueue, payouts::PayoutsQueue, session::AuthQueue, socket::ActiveSockets,
analytics::AnalyticsQueue, payouts::PayoutsQueue, session::AuthQueue,
socket::ActiveSockets,
};
use sqlx::Postgres;
use tokio::sync::RwLock;
@ -74,7 +75,8 @@ pub fn app_setup(
dotenvy::var("BIND_ADDR").unwrap()
);
let automated_moderation_queue = web::Data::new(AutomatedModerationQueue::default());
let automated_moderation_queue =
web::Data::new(AutomatedModerationQueue::default());
{
let automated_moderation_queue_ref = automated_moderation_queue.clone();
@ -110,8 +112,9 @@ pub fn app_setup(
// The interval in seconds at which the local database is indexed
// for searching. Defaults to 1 hour if unset.
let local_index_interval =
std::time::Duration::from_secs(parse_var("LOCAL_INDEX_INTERVAL").unwrap_or(3600));
let local_index_interval = std::time::Duration::from_secs(
parse_var("LOCAL_INDEX_INTERVAL").unwrap_or(3600),
);
let pool_ref = pool.clone();
let search_config_ref = search_config.clone();
@ -122,7 +125,12 @@ pub fn app_setup(
let search_config_ref = search_config_ref.clone();
async move {
info!("Indexing local database");
let result = index_projects(pool_ref, redis_pool_ref.clone(), &search_config_ref).await;
let result = index_projects(
pool_ref,
redis_pool_ref.clone(),
&search_config_ref,
)
.await;
if let Err(e) = result {
warn!("Local project indexing failed: {:?}", e);
}
@ -172,7 +180,11 @@ pub fn app_setup(
}
});
scheduler::schedule_versions(&mut scheduler, pool.clone(), redis_pool.clone());
scheduler::schedule_versions(
&mut scheduler,
pool.clone(),
redis_pool.clone(),
);
let session_queue = web::Data::new(AuthQueue::new());
@ -258,14 +270,20 @@ pub fn app_setup(
});
}
let stripe_client = stripe::Client::new(dotenvy::var("STRIPE_API_KEY").unwrap());
let stripe_client =
stripe::Client::new(dotenvy::var("STRIPE_API_KEY").unwrap());
{
let pool_ref = pool.clone();
let redis_ref = redis_pool.clone();
let stripe_client_ref = stripe_client.clone();
actix_rt::spawn(async move {
routes::internal::billing::task(stripe_client_ref, pool_ref, redis_ref).await;
routes::internal::billing::task(
stripe_client_ref,
pool_ref,
redis_ref,
)
.await;
});
}
@ -274,12 +292,14 @@ pub fn app_setup(
let redis_ref = redis_pool.clone();
actix_rt::spawn(async move {
routes::internal::billing::subscription_task(pool_ref, redis_ref).await;
routes::internal::billing::subscription_task(pool_ref, redis_ref)
.await;
});
}
let ip_salt = Pepper {
pepper: models::ids::Base62Id(models::ids::random_base62(11)).to_string(),
pepper: models::ids::Base62Id(models::ids::random_base62(11))
.to_string(),
};
let payouts_queue = web::Data::new(PayoutsQueue::new());
@ -304,23 +324,22 @@ pub fn app_setup(
}
}
pub fn app_config(cfg: &mut web::ServiceConfig, labrinth_config: LabrinthConfig) {
cfg.app_data(
web::FormConfig::default()
.error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()),
)
.app_data(
web::PathConfig::default()
.error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()),
)
.app_data(
web::QueryConfig::default()
.error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()),
)
.app_data(
web::JsonConfig::default()
.error_handler(|err, _req| routes::ApiError::Validation(err.to_string()).into()),
)
pub fn app_config(
cfg: &mut web::ServiceConfig,
labrinth_config: LabrinthConfig,
) {
cfg.app_data(web::FormConfig::default().error_handler(|err, _req| {
routes::ApiError::Validation(err.to_string()).into()
}))
.app_data(web::PathConfig::default().error_handler(|err, _req| {
routes::ApiError::Validation(err.to_string()).into()
}))
.app_data(web::QueryConfig::default().error_handler(|err, _req| {
routes::ApiError::Validation(err.to_string()).into()
}))
.app_data(web::JsonConfig::default().error_handler(|err, _req| {
routes::ApiError::Validation(err.to_string()).into()
}))
.app_data(web::Data::new(labrinth_config.redis_pool.clone()))
.app_data(web::Data::new(labrinth_config.pool.clone()))
.app_data(web::Data::new(labrinth_config.file_host.clone()))

View File

@ -21,7 +21,8 @@ pub struct Pepper {
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
dotenvy::dotenv().ok();
env_logger::Builder::from_env(Env::default().default_filter_or("info")).init();
env_logger::Builder::from_env(Env::default().default_filter_or("info"))
.init();
if check_env_vars() {
error!("Some environment variables are missing!");
@ -56,35 +57,38 @@ async fn main() -> std::io::Result<()> {
// Redis connector
let redis_pool = RedisPool::new(None);
let storage_backend = dotenvy::var("STORAGE_BACKEND").unwrap_or_else(|_| "local".to_string());
let storage_backend =
dotenvy::var("STORAGE_BACKEND").unwrap_or_else(|_| "local".to_string());
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> = match storage_backend.as_str() {
"backblaze" => Arc::new(
file_hosting::BackblazeHost::new(
&dotenvy::var("BACKBLAZE_KEY_ID").unwrap(),
&dotenvy::var("BACKBLAZE_KEY").unwrap(),
&dotenvy::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await,
),
"s3" => Arc::new(
S3Host::new(
&dotenvy::var("S3_BUCKET_NAME").unwrap(),
&dotenvy::var("S3_REGION").unwrap(),
&dotenvy::var("S3_URL").unwrap(),
&dotenvy::var("S3_ACCESS_TOKEN").unwrap(),
&dotenvy::var("S3_SECRET").unwrap(),
)
.unwrap(),
),
"local" => Arc::new(file_hosting::MockHost::new()),
_ => panic!("Invalid storage backend specified. Aborting startup!"),
};
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> =
match storage_backend.as_str() {
"backblaze" => Arc::new(
file_hosting::BackblazeHost::new(
&dotenvy::var("BACKBLAZE_KEY_ID").unwrap(),
&dotenvy::var("BACKBLAZE_KEY").unwrap(),
&dotenvy::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await,
),
"s3" => Arc::new(
S3Host::new(
&dotenvy::var("S3_BUCKET_NAME").unwrap(),
&dotenvy::var("S3_REGION").unwrap(),
&dotenvy::var("S3_URL").unwrap(),
&dotenvy::var("S3_ACCESS_TOKEN").unwrap(),
&dotenvy::var("S3_SECRET").unwrap(),
)
.unwrap(),
),
"local" => Arc::new(file_hosting::MockHost::new()),
_ => panic!("Invalid storage backend specified. Aborting startup!"),
};
info!("Initializing clickhouse connection");
let mut clickhouse = clickhouse::init_client().await.unwrap();
let maxmind_reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap());
let maxmind_reader =
Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap());
let prometheus = PrometheusMetricsBuilder::new("labrinth")
.endpoint("/metrics")

View File

@ -3,8 +3,8 @@ use serde::{Deserialize, Serialize};
use crate::models::{
ids::{
NotificationId, OrganizationId, ProjectId, ReportId, TeamId, ThreadId, ThreadMessageId,
UserId, VersionId,
NotificationId, OrganizationId, ProjectId, ReportId, TeamId, ThreadId,
ThreadMessageId, UserId, VersionId,
},
notifications::{Notification, NotificationAction, NotificationBody},
projects::ProjectStatus,
@ -78,11 +78,21 @@ pub enum LegacyNotificationBody {
impl LegacyNotification {
pub fn from(notification: Notification) -> Self {
let type_ = match &notification.body {
NotificationBody::ProjectUpdate { .. } => Some("project_update".to_string()),
NotificationBody::TeamInvite { .. } => Some("team_invite".to_string()),
NotificationBody::OrganizationInvite { .. } => Some("organization_invite".to_string()),
NotificationBody::StatusChange { .. } => Some("status_change".to_string()),
NotificationBody::ModeratorMessage { .. } => Some("moderator_message".to_string()),
NotificationBody::ProjectUpdate { .. } => {
Some("project_update".to_string())
}
NotificationBody::TeamInvite { .. } => {
Some("team_invite".to_string())
}
NotificationBody::OrganizationInvite { .. } => {
Some("organization_invite".to_string())
}
NotificationBody::StatusChange { .. } => {
Some("status_change".to_string())
}
NotificationBody::ModeratorMessage { .. } => {
Some("moderator_message".to_string())
}
NotificationBody::LegacyMarkdown {
notification_type, ..
} => notification_type.clone(),

View File

@ -9,8 +9,8 @@ use crate::database::models::{version_item, DatabaseError};
use crate::database::redis::RedisPool;
use crate::models::ids::{ProjectId, VersionId};
use crate::models::projects::{
Dependency, License, Link, Loader, ModeratorMessage, MonetizationStatus, Project,
ProjectStatus, Version, VersionFile, VersionStatus, VersionType,
Dependency, License, Link, Loader, ModeratorMessage, MonetizationStatus,
Project, ProjectStatus, Version, VersionFile, VersionStatus, VersionType,
};
use crate::models::threads::ThreadId;
use crate::routes::v2_reroute::{self, capitalize_first};
@ -87,12 +87,13 @@ impl LegacyProject {
.cloned()
.unwrap_or("project".to_string()); // Default to 'project' if none are found
let project_type = if og_project_type == "datapack" || og_project_type == "plugin" {
// These are not supported in V2, so we'll just use 'mod' instead
"mod".to_string()
} else {
og_project_type.clone()
};
let project_type =
if og_project_type == "datapack" || og_project_type == "plugin" {
// These are not supported in V2, so we'll just use 'mod' instead
"mod".to_string()
} else {
og_project_type.clone()
};
(project_type, og_project_type)
}
@ -102,7 +103,10 @@ impl LegacyProject {
// - This can be any version, because the fields are ones that used to be on the project itself.
// - Its conceivable that certain V3 projects that have many different ones may not have the same fields on all of them.
// It's safe to use a db version_item for this as the only info is side types, game versions, and loader fields (for loaders), which used to be public on project anyway.
pub fn from(data: Project, versions_item: Option<version_item::QueryVersion>) -> Self {
pub fn from(
data: Project,
versions_item: Option<version_item::QueryVersion>,
) -> Self {
let mut client_side = LegacySideType::Unknown;
let mut server_side = LegacySideType::Unknown;
@ -110,7 +114,8 @@ impl LegacyProject {
// We'll prioritize 'modpack' first, and if neither are found, use the first one.
// If there are no project types, default to 'project'
let project_types = data.project_types;
let (mut project_type, og_project_type) = Self::get_project_type(&project_types);
let (mut project_type, og_project_type) =
Self::get_project_type(&project_types);
let mut loaders = data.loaders;
@ -128,16 +133,22 @@ impl LegacyProject {
let fields = versions_item
.version_fields
.iter()
.map(|f| (f.field_name.clone(), f.value.clone().serialize_internal()))
.map(|f| {
(f.field_name.clone(), f.value.clone().serialize_internal())
})
.collect::<HashMap<_, _>>();
(client_side, server_side) =
v2_reroute::convert_side_types_v2(&fields, Some(&*og_project_type));
(client_side, server_side) = v2_reroute::convert_side_types_v2(
&fields,
Some(&*og_project_type),
);
// - if loader is mrpack, this is a modpack
// the loaders are whatever the corresponding loader fields are
if loaders.contains(&"mrpack".to_string()) {
project_type = "modpack".to_string();
if let Some(mrpack_loaders) = data.fields.iter().find(|f| f.0 == "mrpack_loaders") {
if let Some(mrpack_loaders) =
data.fields.iter().find(|f| f.0 == "mrpack_loaders")
{
let values = mrpack_loaders
.1
.iter()
@ -227,7 +238,8 @@ impl LegacyProject {
.iter()
.filter_map(|p| p.versions.first().map(|i| (*i).into()))
.collect();
let example_versions = version_item::Version::get_many(&version_ids, exec, redis).await?;
let example_versions =
version_item::Version::get_many(&version_ids, exec, redis).await?;
let mut legacy_projects = Vec::new();
for project in data {
let version_item = example_versions
@ -308,7 +320,9 @@ pub struct LegacyVersion {
impl From<Version> for LegacyVersion {
fn from(data: Version) -> Self {
let mut game_versions = Vec::new();
if let Some(value) = data.fields.get("game_versions").and_then(|v| v.as_array()) {
if let Some(value) =
data.fields.get("game_versions").and_then(|v| v.as_array())
{
for gv in value {
if let Some(game_version) = gv.as_str() {
game_versions.push(game_version.to_string());
@ -318,14 +332,17 @@ impl From<Version> for LegacyVersion {
// - if loader is mrpack, this is a modpack
// the v2 loaders are whatever the corresponding loader fields are
let mut loaders = data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
let mut loaders =
data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
if loaders.contains(&"mrpack".to_string()) {
if let Some((_, mrpack_loaders)) = data
.fields
.into_iter()
.find(|(key, _)| key == "mrpack_loaders")
{
if let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders) {
if let Ok(mrpack_loaders) =
serde_json::from_value(mrpack_loaders)
{
loaders = mrpack_loaders;
}
}

View File

@ -92,14 +92,16 @@ impl LegacyResultSearchProject {
.cloned()
.unwrap_or("project".to_string()); // Default to 'project' if none are found
let project_type = if og_project_type == "datapack" || og_project_type == "plugin" {
// These are not supported in V2, so we'll just use 'mod' instead
"mod".to_string()
} else {
og_project_type.clone()
};
let project_type =
if og_project_type == "datapack" || og_project_type == "plugin" {
// These are not supported in V2, so we'll just use 'mod' instead
"mod".to_string()
} else {
og_project_type.clone()
};
let project_loader_fields = result_search_project.project_loader_fields.clone();
let project_loader_fields =
result_search_project.project_loader_fields.clone();
let get_one_bool_loader_field = |key: &str| {
project_loader_fields
.get(key)
@ -110,17 +112,20 @@ impl LegacyResultSearchProject {
};
let singleplayer = get_one_bool_loader_field("singleplayer");
let client_only = get_one_bool_loader_field("client_only").unwrap_or(false);
let server_only = get_one_bool_loader_field("server_only").unwrap_or(false);
let client_only =
get_one_bool_loader_field("client_only").unwrap_or(false);
let server_only =
get_one_bool_loader_field("server_only").unwrap_or(false);
let client_and_server = get_one_bool_loader_field("client_and_server");
let (client_side, server_side) = v2_reroute::convert_side_types_v2_bools(
singleplayer,
client_only,
server_only,
client_and_server,
Some(&*og_project_type),
);
let (client_side, server_side) =
v2_reroute::convert_side_types_v2_bools(
singleplayer,
client_only,
server_only,
client_and_server,
Some(&*og_project_type),
);
let client_side = client_side.to_string();
let server_side = server_side.to_string();

View File

@ -1,4 +1,6 @@
use crate::models::ids::{ImageId, ProjectId, ReportId, ThreadId, ThreadMessageId};
use crate::models::ids::{
ImageId, ProjectId, ReportId, ThreadId, ThreadMessageId,
};
use crate::models::projects::ProjectStatus;
use crate::models::users::{User, UserId};
use chrono::{DateTime, Utc};
@ -57,8 +59,12 @@ pub enum LegacyThreadType {
impl From<crate::models::v3::threads::ThreadType> for LegacyThreadType {
fn from(t: crate::models::v3::threads::ThreadType) -> Self {
match t {
crate::models::v3::threads::ThreadType::Report => LegacyThreadType::Report,
crate::models::v3::threads::ThreadType::Project => LegacyThreadType::Project,
crate::models::v3::threads::ThreadType::Report => {
LegacyThreadType::Report
}
crate::models::v3::threads::ThreadType::Project => {
LegacyThreadType::Project
}
crate::models::v3::threads::ThreadType::DirectMessage => {
LegacyThreadType::DirectMessage
}

View File

@ -106,7 +106,9 @@ pub struct UserSubscription {
impl From<crate::database::models::user_subscription_item::UserSubscriptionItem>
for UserSubscription
{
fn from(x: crate::database::models::user_subscription_item::UserSubscriptionItem) -> Self {
fn from(
x: crate::database::models::user_subscription_item::UserSubscriptionItem,
) -> Self {
Self {
id: x.id.into(),
user_id: x.user_id.into(),

View File

@ -13,7 +13,9 @@ pub use super::teams::TeamId;
pub use super::threads::ThreadId;
pub use super::threads::ThreadMessageId;
pub use super::users::UserId;
pub use crate::models::billing::{ChargeId, ProductId, ProductPriceId, UserSubscriptionId};
pub use crate::models::billing::{
ChargeId, ProductId, ProductPriceId, UserSubscriptionId,
};
use thiserror::Error;
/// Generates a random 64 bit integer that is exactly `n` characters
@ -41,7 +43,11 @@ pub fn random_base62_rng<R: rand::RngCore>(rng: &mut R, n: usize) -> u64 {
random_base62_rng_range(rng, n, n)
}
pub fn random_base62_rng_range<R: rand::RngCore>(rng: &mut R, n_min: usize, n_max: usize) -> u64 {
pub fn random_base62_rng_range<R: rand::RngCore>(
rng: &mut R,
n_min: usize,
n_max: usize,
) -> u64 {
use rand::Rng;
assert!(n_min > 0 && n_max <= 11 && n_min <= n_max);
// gen_range is [low, high): max value is `MULTIPLES[n] - 1`,
@ -155,7 +161,10 @@ pub mod base62_impl {
impl<'de> Visitor<'de> for Base62Visitor {
type Value = Base62Id;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
fn expecting(
&self,
formatter: &mut std::fmt::Formatter,
) -> std::fmt::Result {
formatter.write_str("a base62 string id")
}
@ -211,7 +220,9 @@ pub mod base62_impl {
}
// We don't want this panicking or wrapping on integer overflow
if let Some(n) = num.checked_mul(62).and_then(|n| n.checked_add(next_digit)) {
if let Some(n) =
num.checked_mul(62).and_then(|n| n.checked_add(next_digit))
{
num = n;
} else {
return Err(DecodingError::Overflow);

View File

@ -90,7 +90,9 @@ impl ImageContext {
match self {
ImageContext::Project { project_id } => project_id.map(|x| x.0),
ImageContext::Version { version_id } => version_id.map(|x| x.0),
ImageContext::ThreadMessage { thread_message_id } => thread_message_id.map(|x| x.0),
ImageContext::ThreadMessage { thread_message_id } => {
thread_message_id.map(|x| x.0)
}
ImageContext::Report { report_id } => report_id.map(|x| x.0),
ImageContext::Unknown => None,
}

View File

@ -3,7 +3,9 @@ use super::ids::OrganizationId;
use super::users::UserId;
use crate::database::models::notification_item::Notification as DBNotification;
use crate::database::models::notification_item::NotificationAction as DBNotificationAction;
use crate::models::ids::{ProjectId, ReportId, TeamId, ThreadId, ThreadMessageId, VersionId};
use crate::models::ids::{
ProjectId, ReportId, TeamId, ThreadId, ThreadMessageId, VersionId,
};
use crate::models::projects::ProjectStatus;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};

View File

@ -93,7 +93,11 @@ impl From<DBOAuthClient> for OAuthClient {
name: value.name,
icon_url: value.icon_url,
max_scopes: value.max_scopes,
redirect_uris: value.redirect_uris.into_iter().map(|r| r.into()).collect(),
redirect_uris: value
.redirect_uris
.into_iter()
.map(|r| r.into())
.collect(),
created_by: value.created_by.into(),
created: value.created,
url: value.url,

View File

@ -1,4 +1,6 @@
use crate::{models::v2::projects::LegacySideType, util::env::parse_strings_from_var};
use crate::{
models::v2::projects::LegacySideType, util::env::parse_strings_from_var,
};
use serde::{Deserialize, Serialize};
use validator::Validate;
@ -29,7 +31,9 @@ pub struct PackFile {
pub file_size: u32,
}
fn validate_download_url(values: &[String]) -> Result<(), validator::ValidationError> {
fn validate_download_url(
values: &[String],
) -> Result<(), validator::ValidationError> {
for value in values {
let url = url::Url::parse(value)
.ok()
@ -39,7 +43,8 @@ fn validate_download_url(values: &[String]) -> Result<(), validator::ValidationE
return Err(validator::ValidationError::new("invalid URL"));
}
let domains = parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS").unwrap_or_default();
let domains = parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS")
.unwrap_or_default();
if !domains.contains(
&url.domain()
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?

View File

@ -131,7 +131,9 @@ impl Scopes {
self.intersects(Self::restricted())
}
pub fn parse_from_oauth_scopes(scopes: &str) -> Result<Scopes, bitflags::parser::ParseError> {
pub fn parse_from_oauth_scopes(
scopes: &str,
) -> Result<Scopes, bitflags::parser::ParseError> {
let scopes = scopes.replace(['+', ' '], "|").replace("%20", "|");
bitflags::parser::from_str(&scopes)
}
@ -187,7 +189,9 @@ mod test {
#[test]
fn test_parse_from_oauth_scopes_well_formed() {
let raw = "USER_READ_EMAIL SESSION_READ ORGANIZATION_CREATE";
let expected = Scopes::USER_READ_EMAIL | Scopes::SESSION_READ | Scopes::ORGANIZATION_CREATE;
let expected = Scopes::USER_READ_EMAIL
| Scopes::SESSION_READ
| Scopes::ORGANIZATION_CREATE;
let parsed = Scopes::parse_from_oauth_scopes(raw).unwrap();
@ -224,7 +228,8 @@ mod test {
#[test]
fn test_parse_from_oauth_scopes_url_encoded() {
let raw = urlencoding::encode("PAT_WRITE COLLECTION_DELETE").to_string();
let raw =
urlencoding::encode("PAT_WRITE COLLECTION_DELETE").to_string();
let expected = Scopes::PAT_WRITE | Scopes::COLLECTION_DELETE;
let parsed = Scopes::parse_from_oauth_scopes(&raw).unwrap();

View File

@ -128,7 +128,9 @@ pub fn from_duplicate_version_fields(
let mut fields: HashMap<String, Vec<serde_json::Value>> = HashMap::new();
for vf in version_fields {
// We use a string directly, so we can remove duplicates
let serialized = if let Some(inner_array) = vf.value.serialize_internal().as_array() {
let serialized = if let Some(inner_array) =
vf.value.serialize_internal().as_array()
{
inner_array.clone()
} else {
vec![vf.value.serialize_internal()]
@ -151,7 +153,8 @@ pub fn from_duplicate_version_fields(
impl From<QueryProject> for Project {
fn from(data: QueryProject) -> Self {
let fields = from_duplicate_version_fields(data.aggregate_version_fields);
let fields =
from_duplicate_version_fields(data.aggregate_version_fields);
let m = data.inner;
Self {
id: m.id.into(),
@ -655,7 +658,9 @@ pub struct Version {
pub fields: HashMap<String, serde_json::Value>,
}
pub fn skip_nulls<'de, D>(deserializer: D) -> Result<HashMap<String, serde_json::Value>, D::Error>
pub fn skip_nulls<'de, D>(
deserializer: D,
) -> Result<HashMap<String, serde_json::Value>, D::Error>
where
D: serde::Deserializer<'de>,
{
@ -708,7 +713,9 @@ impl From<QueryVersion> for Version {
version_id: d.version_id.map(|i| VersionId(i.0 as u64)),
project_id: d.project_id.map(|i| ProjectId(i.0 as u64)),
file_name: d.file_name,
dependency_type: DependencyType::from_string(d.dependency_type.as_str()),
dependency_type: DependencyType::from_string(
d.dependency_type.as_str(),
),
})
.collect(),
loaders: data.loaders.into_iter().map(Loader).collect(),

View File

@ -118,7 +118,8 @@ impl OrganizationPermissions {
}
if role.is_mod() {
return Some(
OrganizationPermissions::EDIT_DETAILS | OrganizationPermissions::ADD_PROJECT,
OrganizationPermissions::EDIT_DETAILS
| OrganizationPermissions::ADD_PROJECT,
);
}
None

View File

@ -93,7 +93,11 @@ impl ThreadType {
}
impl Thread {
pub fn from(data: crate::database::models::Thread, users: Vec<User>, user: &User) -> Self {
pub fn from(
data: crate::database::models::Thread,
users: Vec<User>,
user: &User,
) -> Self {
let thread_type = data.type_;
Thread {
@ -107,7 +111,8 @@ impl Thread {
.filter(|x| {
if let MessageBody::Text { private, .. } = x.body {
!private || user.role.is_mod()
} else if let MessageBody::Deleted { private, .. } = x.body {
} else if let MessageBody::Deleted { private, .. } = x.body
{
!private || user.role.is_mod()
} else {
true
@ -121,7 +126,10 @@ impl Thread {
}
impl ThreadMessage {
pub fn from(data: crate::database::models::ThreadMessage, user: &User) -> Self {
pub fn from(
data: crate::database::models::ThreadMessage,
user: &User,
) -> Self {
Self {
id: data.id.into(),
author_id: if data.hide_identity && !user.role.is_mod() {

View File

@ -36,12 +36,14 @@ impl AnalyticsQueue {
fn strip_ip(ip: Ipv6Addr) -> u64 {
if let Some(ip) = ip.to_ipv4_mapped() {
let octets = ip.octets();
u64::from_be_bytes([octets[0], octets[1], octets[2], octets[3], 0, 0, 0, 0])
u64::from_be_bytes([
octets[0], octets[1], octets[2], octets[3], 0, 0, 0, 0,
])
} else {
let octets = ip.octets();
u64::from_be_bytes([
octets[0], octets[1], octets[2], octets[3], octets[4], octets[5], octets[6],
octets[7],
octets[0], octets[1], octets[2], octets[3], octets[4],
octets[5], octets[6], octets[7],
])
}
}
@ -98,7 +100,8 @@ impl AnalyticsQueue {
raw_views.push((views, true));
}
let mut redis = redis.pool.get().await.map_err(DatabaseError::RedisPool)?;
let mut redis =
redis.pool.get().await.map_err(DatabaseError::RedisPool)?;
let results = cmd("MGET")
.arg(
@ -107,7 +110,7 @@ impl AnalyticsQueue {
.map(|x| format!("{}:{}-{}", VIEWS_NAMESPACE, x.0, x.1))
.collect::<Vec<_>>(),
)
.query_async::<_, Vec<Option<u32>>>(&mut redis)
.query_async::<Vec<Option<u32>>>(&mut redis)
.await
.map_err(DatabaseError::CacheError)?;
@ -115,24 +118,25 @@ impl AnalyticsQueue {
for (idx, count) in results.into_iter().enumerate() {
let key = &views_keys[idx];
let new_count = if let Some((views, monetized)) = raw_views.get_mut(idx) {
if let Some(count) = count {
if count > 3 {
*monetized = false;
continue;
}
let new_count =
if let Some((views, monetized)) = raw_views.get_mut(idx) {
if let Some(count) = count {
if count > 3 {
*monetized = false;
continue;
}
if (count + views.len() as u32) > 3 {
*monetized = false;
}
if (count + views.len() as u32) > 3 {
*monetized = false;
}
count + (views.len() as u32)
count + (views.len() as u32)
} else {
views.len() as u32
}
} else {
views.len() as u32
}
} else {
1
};
1
};
pipe.atomic().set_ex(
format!("{}:{}-{}", VIEWS_NAMESPACE, key.0, key.1),
@ -140,7 +144,7 @@ impl AnalyticsQueue {
6 * 60 * 60,
);
}
pipe.query_async(&mut *redis)
pipe.query_async::<()>(&mut *redis)
.await
.map_err(DatabaseError::CacheError)?;
@ -163,21 +167,26 @@ impl AnalyticsQueue {
let mut downloads_keys = Vec::new();
let raw_downloads = DashMap::new();
for (index, (key, download)) in downloads_queue.into_iter().enumerate() {
for (index, (key, download)) in
downloads_queue.into_iter().enumerate()
{
downloads_keys.push(key);
raw_downloads.insert(index, download);
}
let mut redis = redis.pool.get().await.map_err(DatabaseError::RedisPool)?;
let mut redis =
redis.pool.get().await.map_err(DatabaseError::RedisPool)?;
let results = cmd("MGET")
.arg(
downloads_keys
.iter()
.map(|x| format!("{}:{}-{}", DOWNLOADS_NAMESPACE, x.0, x.1))
.map(|x| {
format!("{}:{}-{}", DOWNLOADS_NAMESPACE, x.0, x.1)
})
.collect::<Vec<_>>(),
)
.query_async::<_, Vec<Option<u32>>>(&mut redis)
.query_async::<Vec<Option<u32>>>(&mut redis)
.await
.map_err(DatabaseError::CacheError)?;
@ -202,7 +211,7 @@ impl AnalyticsQueue {
6 * 60 * 60,
);
}
pipe.query_async(&mut *redis)
pipe.query_async::<()>(&mut *redis)
.await
.map_err(DatabaseError::CacheError)?;

View File

@ -46,11 +46,13 @@ impl MaxMindIndexer {
if let Ok(entries) = archive.entries() {
for mut file in entries.flatten() {
if let Ok(path) = file.header().path() {
if path.extension().and_then(|x| x.to_str()) == Some("mmdb") {
if path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
let reader = maxminddb::Reader::from_source(buf).unwrap();
let reader =
maxminddb::Reader::from_source(buf).unwrap();
return Ok(Some(reader));
}
@ -71,10 +73,9 @@ impl MaxMindIndexer {
let maxmind = self.reader.read().await;
if let Some(ref maxmind) = *maxmind {
maxmind
.lookup::<Country>(ip.into())
.ok()
.and_then(|x| x.country.and_then(|x| x.iso_code.map(|x| x.to_string())))
maxmind.lookup::<Country>(ip.into()).ok().and_then(|x| {
x.country.and_then(|x| x.iso_code.map(|x| x.to_string()))
})
} else {
None
}

View File

@ -128,10 +128,14 @@ impl ModerationMessage {
pub fn header(&self) -> &'static str {
match self {
ModerationMessage::NoPrimaryFile => "No primary files",
ModerationMessage::PackFilesNotAllowed { .. } => "Copyrighted Content",
ModerationMessage::PackFilesNotAllowed { .. } => {
"Copyrighted Content"
}
ModerationMessage::MissingGalleryImage => "Missing Gallery Images",
ModerationMessage::MissingLicense => "Missing License",
ModerationMessage::MissingCustomLicenseUrl { .. } => "Missing License URL",
ModerationMessage::MissingCustomLicenseUrl { .. } => {
"Missing License URL"
}
ModerationMessage::NoSideTypes => "Missing Environment Information",
}
}
@ -806,7 +810,9 @@ impl ApprovalType {
pub fn from_string(string: &str) -> Option<Self> {
match string {
"yes" => Some(ApprovalType::Yes),
"with-attribution-and-source" => Some(ApprovalType::WithAttributionAndSource),
"with-attribution-and-source" => {
Some(ApprovalType::WithAttributionAndSource)
}
"with-attribution" => Some(ApprovalType::WithAttribution),
"no" => Some(ApprovalType::No),
"permanent-no" => Some(ApprovalType::PermanentNo),
@ -818,7 +824,9 @@ impl ApprovalType {
pub(crate) fn as_str(&self) -> &'static str {
match self {
ApprovalType::Yes => "yes",
ApprovalType::WithAttributionAndSource => "with-attribution-and-source",
ApprovalType::WithAttributionAndSource => {
"with-attribution-and-source"
}
ApprovalType::WithAttribution => "with-attribution",
ApprovalType::No => "no",
ApprovalType::PermanentNo => "permanent-no",

View File

@ -1,5 +1,6 @@
use crate::models::payouts::{
PayoutDecimal, PayoutInterval, PayoutMethod, PayoutMethodFee, PayoutMethodType,
PayoutDecimal, PayoutInterval, PayoutMethod, PayoutMethodFee,
PayoutMethodType,
};
use crate::models::projects::MonetizationStatus;
use crate::routes::ApiError;
@ -81,12 +82,17 @@ impl PayoutsQueue {
.form(&form)
.send()
.await
.map_err(|_| ApiError::Payments("Error while authenticating with PayPal".to_string()))?
.map_err(|_| {
ApiError::Payments(
"Error while authenticating with PayPal".to_string(),
)
})?
.json()
.await
.map_err(|_| {
ApiError::Payments(
"Error while authenticating with PayPal (deser error)".to_string(),
"Error while authenticating with PayPal (deser error)"
.to_string(),
)
})?;
@ -114,7 +120,9 @@ impl PayoutsQueue {
if credentials.expires < Utc::now() {
drop(read);
self.refresh_token().await.map_err(|_| {
ApiError::Payments("Error while authenticating with PayPal".to_string())
ApiError::Payments(
"Error while authenticating with PayPal".to_string(),
)
})?
} else {
credentials.clone()
@ -122,7 +130,9 @@ impl PayoutsQueue {
} else {
drop(read);
self.refresh_token().await.map_err(|_| {
ApiError::Payments("Error while authenticating with PayPal".to_string())
ApiError::Payments(
"Error while authenticating with PayPal".to_string(),
)
})?
};
@ -138,7 +148,10 @@ impl PayoutsQueue {
)
.header(
"Authorization",
format!("{} {}", credentials.token_type, credentials.access_token),
format!(
"{} {}",
credentials.token_type, credentials.access_token
),
);
if let Some(body) = body {
@ -149,15 +162,16 @@ impl PayoutsQueue {
.body(body);
}
let resp = request
.send()
.await
.map_err(|_| ApiError::Payments("could not communicate with PayPal".to_string()))?;
let resp = request.send().await.map_err(|_| {
ApiError::Payments("could not communicate with PayPal".to_string())
})?;
let status = resp.status();
let value = resp.json::<Value>().await.map_err(|_| {
ApiError::Payments("could not retrieve PayPal response body".to_string())
ApiError::Payments(
"could not retrieve PayPal response body".to_string(),
)
})?;
if !status.is_success() {
@ -173,14 +187,18 @@ impl PayoutsQueue {
pub error_description: String,
}
if let Ok(error) = serde_json::from_value::<PayPalError>(value.clone()) {
if let Ok(error) =
serde_json::from_value::<PayPalError>(value.clone())
{
return Err(ApiError::Payments(format!(
"error name: {}, message: {}",
error.name, error.message
)));
}
if let Ok(error) = serde_json::from_value::<PayPalIdentityError>(value) {
if let Ok(error) =
serde_json::from_value::<PayPalIdentityError>(value)
{
return Err(ApiError::Payments(format!(
"error name: {}, message: {}",
error.error, error.error_description
@ -216,15 +234,18 @@ impl PayoutsQueue {
request = request.json(&body);
}
let resp = request
.send()
.await
.map_err(|_| ApiError::Payments("could not communicate with Tremendous".to_string()))?;
let resp = request.send().await.map_err(|_| {
ApiError::Payments(
"could not communicate with Tremendous".to_string(),
)
})?;
let status = resp.status();
let value = resp.json::<Value>().await.map_err(|_| {
ApiError::Payments("could not retrieve Tremendous response body".to_string())
ApiError::Payments(
"could not retrieve Tremendous response body".to_string(),
)
})?;
if !status.is_success() {
@ -235,12 +256,15 @@ impl PayoutsQueue {
message: String,
}
let err =
serde_json::from_value::<TremendousError>(array.clone()).map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body".to_string(),
)
})?;
let err = serde_json::from_value::<TremendousError>(
array.clone(),
)
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
}
@ -254,8 +278,12 @@ impl PayoutsQueue {
Ok(serde_json::from_value(value)?)
}
pub async fn get_payout_methods(&self) -> Result<Vec<PayoutMethod>, ApiError> {
async fn refresh_payout_methods(queue: &PayoutsQueue) -> Result<PayoutMethods, ApiError> {
pub async fn get_payout_methods(
&self,
) -> Result<Vec<PayoutMethod>, ApiError> {
async fn refresh_payout_methods(
queue: &PayoutsQueue,
) -> Result<PayoutMethods, ApiError> {
let mut options = queue.payout_options.write().await;
let mut methods = Vec::new();
@ -304,7 +332,11 @@ impl PayoutsQueue {
}
let response = queue
.make_tremendous_request::<(), TremendousResponse>(Method::GET, "products", None)
.make_tremendous_request::<(), TremendousResponse>(
Method::GET,
"products",
None,
)
.await?;
for product in response.products {
@ -361,7 +393,11 @@ impl PayoutsQueue {
id: product.id,
type_: PayoutMethodType::Tremendous,
name: product.name.clone(),
supported_countries: product.countries.into_iter().map(|x| x.abbr).collect(),
supported_countries: product
.countries
.into_iter()
.map(|x| x.abbr)
.collect(),
image_url: product
.images
.into_iter()
@ -412,7 +448,8 @@ impl PayoutsQueue {
methods.push(method);
}
const UPRANK_IDS: &[&str] = &["ET0ZVETV5ILN", "Q24BD9EZ332JT", "UIL1ZYJU5MKN"];
const UPRANK_IDS: &[&str] =
&["ET0ZVETV5ILN", "Q24BD9EZ332JT", "UIL1ZYJU5MKN"];
const DOWNRANK_IDS: &[&str] = &["EIPF8Q00EMM1", "OU2MWXYWPNWQ"];
methods.sort_by(|a, b| {
@ -558,7 +595,10 @@ pub async fn make_aditude_request(
Ok(json)
}
pub async fn process_payout(pool: &PgPool, client: &clickhouse::Client) -> Result<(), ApiError> {
pub async fn process_payout(
pool: &PgPool,
client: &clickhouse::Client,
) -> Result<(), ApiError> {
let start: DateTime<Utc> = DateTime::from_naive_utc_and_offset(
(Utc::now() - Duration::days(1))
.date_naive()
@ -750,8 +790,12 @@ pub async fn process_payout(pool: &PgPool, client: &clickhouse::Client) -> Resul
);
}
let aditude_res =
make_aditude_request(&["METRIC_IMPRESSIONS", "METRIC_REVENUE"], "Yesterday", "1d").await?;
let aditude_res = make_aditude_request(
&["METRIC_IMPRESSIONS", "METRIC_REVENUE"],
"Yesterday",
"1d",
)
.await?;
let aditude_amount: Decimal = aditude_res
.iter()
@ -777,8 +821,9 @@ pub async fn process_payout(pool: &PgPool, client: &clickhouse::Client) -> Resul
// Clean.io fee (ad antimalware). Per 1000 impressions.
let clean_io_fee = Decimal::from(8) / Decimal::from(1000);
let net_revenue =
aditude_amount - (clean_io_fee * Decimal::from(aditude_impressions) / Decimal::from(1000));
let net_revenue = aditude_amount
- (clean_io_fee * Decimal::from(aditude_impressions)
/ Decimal::from(1000));
let payout = net_revenue * (Decimal::from(1) - modrinth_cut);
@ -811,11 +856,13 @@ pub async fn process_payout(pool: &PgPool, client: &clickhouse::Client) -> Resul
let project_multiplier: Decimal =
Decimal::from(**value) / Decimal::from(multipliers.sum);
let sum_splits: Decimal = project.team_members.iter().map(|x| x.1).sum();
let sum_splits: Decimal =
project.team_members.iter().map(|x| x.1).sum();
if sum_splits > Decimal::ZERO {
for (user_id, split) in project.team_members {
let payout: Decimal = payout * project_multiplier * (split / sum_splits);
let payout: Decimal =
payout * project_multiplier * (split / sum_splits);
if payout > Decimal::ZERO {
insert_user_ids.push(user_id);

View File

@ -1,6 +1,8 @@
use crate::database::models::pat_item::PersonalAccessToken;
use crate::database::models::session_item::Session;
use crate::database::models::{DatabaseError, OAuthAccessTokenId, PatId, SessionId, UserId};
use crate::database::models::{
DatabaseError, OAuthAccessTokenId, PatId, SessionId, UserId,
};
use crate::database::redis::RedisPool;
use crate::routes::internal::session::SessionMetadata;
use chrono::Utc;
@ -38,7 +40,10 @@ impl AuthQueue {
self.pat_queue.lock().await.insert(id);
}
pub async fn add_oauth_access_token(&self, id: crate::database::models::OAuthAccessTokenId) {
pub async fn add_oauth_access_token(
&self,
id: crate::database::models::OAuthAccessTokenId,
) {
self.oauth_access_token_queue.lock().await.insert(id);
}
@ -56,10 +61,15 @@ impl AuthQueue {
std::mem::replace(&mut queue, HashSet::with_capacity(len))
}
pub async fn index(&self, pool: &PgPool, redis: &RedisPool) -> Result<(), DatabaseError> {
pub async fn index(
&self,
pool: &PgPool,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let session_queue = self.take_sessions().await;
let pat_queue = Self::take_hashset(&self.pat_queue).await;
let oauth_access_token_queue = Self::take_hashset(&self.oauth_access_token_queue).await;
let oauth_access_token_queue =
Self::take_hashset(&self.oauth_access_token_queue).await;
if !session_queue.is_empty()
|| !pat_queue.is_empty()
@ -104,7 +114,11 @@ impl AuthQueue {
.await?;
for (id, session, user_id) in expired_ids {
clear_cache_sessions.push((Some(id), Some(session), Some(user_id)));
clear_cache_sessions.push((
Some(id),
Some(session),
Some(user_id),
));
Session::remove(id, &mut transaction).await?;
}
@ -128,7 +142,11 @@ impl AuthQueue {
.execute(&mut *transaction)
.await?;
update_oauth_access_token_last_used(oauth_access_token_queue, &mut transaction).await?;
update_oauth_access_token_last_used(
oauth_access_token_queue,
&mut transaction,
)
.await?;
transaction.commit().await?;
PersonalAccessToken::clear_cache(clear_cache_pats, redis).await?;

View File

@ -65,19 +65,22 @@ pub async fn page_view_ingest(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
.await
.ok();
let user =
get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
.await
.ok();
let conn_info = req.connection_info().peer_addr().map(|x| x.to_string());
let url = Url::parse(&url_input.url)
.map_err(|_| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?;
let url = Url::parse(&url_input.url).map_err(|_| {
ApiError::InvalidInput("invalid page view URL specified!".to_string())
})?;
let domain = url
.host_str()
.ok_or_else(|| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?;
let domain = url.host_str().ok_or_else(|| {
ApiError::InvalidInput("invalid page view URL specified!".to_string())
})?;
let allowed_origins = parse_strings_from_var("CORS_ALLOWED_ORIGINS").unwrap_or_default();
let allowed_origins =
parse_strings_from_var("CORS_ALLOWED_ORIGINS").unwrap_or_default();
if !(domain.ends_with(".modrinth.com")
|| domain == "modrinth.com"
|| allowed_origins.contains(&"*".to_string()))
@ -98,11 +101,13 @@ pub async fn page_view_ingest(
})
.collect::<HashMap<String, String>>();
let ip = convert_to_ip_v6(if let Some(header) = headers.get("cf-connecting-ip") {
header
} else {
conn_info.as_deref().unwrap_or_default()
})
let ip = convert_to_ip_v6(
if let Some(header) = headers.get("cf-connecting-ip") {
header
} else {
conn_info.as_deref().unwrap_or_default()
},
)
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
let mut view = PageView {
@ -135,8 +140,12 @@ pub async fn page_view_ingest(
];
if PROJECT_TYPES.contains(&segments_vec[0]) {
let project =
crate::database::models::Project::get(segments_vec[1], &**pool, &redis).await?;
let project = crate::database::models::Project::get(
segments_vec[1],
&**pool,
&redis,
)
.await?;
if let Some(project) = project {
view.project_id = project.inner.id.0 as u64;
@ -167,7 +176,9 @@ pub async fn playtime_ingest(
req: HttpRequest,
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
session_queue: web::Data<AuthQueue>,
playtime_input: web::Json<HashMap<crate::models::ids::VersionId, PlaytimeInput>>,
playtime_input: web::Json<
HashMap<crate::models::ids::VersionId, PlaytimeInput>,
>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
@ -200,7 +211,8 @@ pub async fn playtime_ingest(
continue;
}
if let Some(version) = versions.iter().find(|x| id == x.inner.id.into()) {
if let Some(version) = versions.iter().find(|x| id == x.inner.id.into())
{
analytics_queue.add_playtime(Playtime {
recorded: get_current_tenths_of_ms(),
seconds: playtime.seconds as u64,

View File

@ -53,16 +53,25 @@ pub async fn count_download(
.find(|x| x.0.to_lowercase() == "authorization")
.map(|x| &**x.1);
let user = get_user_record_from_bearer_token(&req, token, &**pool, &redis, &session_queue)
.await
.ok()
.flatten();
let user = get_user_record_from_bearer_token(
&req,
token,
&**pool,
&redis,
&session_queue,
)
.await
.ok()
.flatten();
let project_id: crate::database::models::ids::ProjectId = download_body.project_id.into();
let project_id: crate::database::models::ids::ProjectId =
download_body.project_id.into();
let id_option = crate::models::ids::base62_impl::parse_base62(&download_body.version_name)
.ok()
.map(|x| x as i64);
let id_option = crate::models::ids::base62_impl::parse_base62(
&download_body.version_name,
)
.ok()
.map(|x| x as i64);
let (version_id, project_id) = if let Some(version) = sqlx::query!(
"
@ -95,8 +104,9 @@ pub async fn count_download(
));
};
let url = url::Url::parse(&download_body.url)
.map_err(|_| ApiError::InvalidInput("invalid download URL specified!".to_string()))?;
let url = url::Url::parse(&download_body.url).map_err(|_| {
ApiError::InvalidInput("invalid download URL specified!".to_string())
})?;
let ip = crate::routes::analytics::convert_to_ip_v6(&download_body.ip)
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
@ -127,7 +137,10 @@ pub async fn count_download(
.headers
.clone()
.into_iter()
.filter(|x| !crate::routes::analytics::FILTERED_HEADERS.contains(&&*x.0.to_lowercase()))
.filter(|x| {
!crate::routes::analytics::FILTERED_HEADERS
.contains(&&*x.0.to_lowercase())
})
.collect(),
});

File diff suppressed because it is too large Load Diff

View File

@ -87,7 +87,8 @@ impl TempUser {
}
}
let user_id = crate::database::models::generate_user_id(transaction).await?;
let user_id =
crate::database::models::generate_user_id(transaction).await?;
let mut username_increment: i32 = 0;
let mut username = None;
@ -103,7 +104,12 @@ impl TempUser {
}
);
let new_id = crate::database::models::User::get(&test_username, client, redis).await?;
let new_id = crate::database::models::User::get(
&test_username,
client,
redis,
)
.await?;
if new_id.is_none() {
username = Some(test_username);
@ -112,71 +118,74 @@ impl TempUser {
}
}
let (avatar_url, raw_avatar_url) = if let Some(avatar_url) = self.avatar_url {
let res = reqwest::get(&avatar_url).await?;
let headers = res.headers().clone();
let (avatar_url, raw_avatar_url) =
if let Some(avatar_url) = self.avatar_url {
let res = reqwest::get(&avatar_url).await?;
let headers = res.headers().clone();
let img_data = if let Some(content_type) = headers
.get(reqwest::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
{
get_image_ext(content_type)
} else {
avatar_url.rsplit('.').next()
};
let img_data = if let Some(content_type) = headers
.get(reqwest::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
{
get_image_ext(content_type)
} else {
avatar_url.rsplit('.').next()
};
if let Some(ext) = img_data {
let bytes = res.bytes().await?;
if let Some(ext) = img_data {
let bytes = res.bytes().await?;
let upload_result = upload_image_optimized(
&format!("user/{}", crate::models::users::UserId::from(user_id)),
bytes,
ext,
Some(96),
Some(1.0),
&**file_host,
)
.await;
let upload_result = upload_image_optimized(
&format!(
"user/{}",
crate::models::users::UserId::from(user_id)
),
bytes,
ext,
Some(96),
Some(1.0),
&**file_host,
)
.await;
if let Ok(upload_result) = upload_result {
(Some(upload_result.url), Some(upload_result.raw_url))
if let Ok(upload_result) = upload_result {
(Some(upload_result.url), Some(upload_result.raw_url))
} else {
(None, None)
}
} else {
(None, None)
}
} else {
(None, None)
}
} else {
(None, None)
};
};
if let Some(username) = username {
crate::database::models::User {
id: user_id,
github_id: if provider == AuthProvider::GitHub {
Some(
self.id
.clone()
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
self.id.clone().parse().map_err(|_| {
AuthenticationError::InvalidCredentials
})?,
)
} else {
None
},
discord_id: if provider == AuthProvider::Discord {
Some(
self.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
self.id.parse().map_err(|_| {
AuthenticationError::InvalidCredentials
})?,
)
} else {
None
},
gitlab_id: if provider == AuthProvider::GitLab {
Some(
self.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
self.id.parse().map_err(|_| {
AuthenticationError::InvalidCredentials
})?,
)
} else {
None
@ -188,9 +197,9 @@ impl TempUser {
},
steam_id: if provider == AuthProvider::Steam {
Some(
self.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
self.id.parse().map_err(|_| {
AuthenticationError::InvalidCredentials
})?,
)
} else {
None
@ -236,7 +245,10 @@ impl TempUser {
}
impl AuthProvider {
pub fn get_redirect_url(&self, state: String) -> Result<String, AuthenticationError> {
pub fn get_redirect_url(
&self,
state: String,
) -> Result<String, AuthenticationError> {
let self_addr = dotenvy::var("SELF_ADDR")?;
let raw_redirect_uri = format!("{}/v2/auth/callback", self_addr);
let redirect_uri = urlencoding::encode(&raw_redirect_uri);
@ -316,7 +328,8 @@ impl AuthProvider {
&self,
query: HashMap<String, String>,
) -> Result<String, AuthenticationError> {
let redirect_uri = format!("{}/v2/auth/callback", dotenvy::var("SELF_ADDR")?);
let redirect_uri =
format!("{}/v2/auth/callback", dotenvy::var("SELF_ADDR")?);
#[derive(Deserialize)]
struct AccessToken {
@ -454,22 +467,26 @@ impl AuthProvider {
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
form.insert(
"openid.assoc_handle".to_string(),
&**query
.get("openid.assoc_handle")
.ok_or_else(|| AuthenticationError::InvalidCredentials)?,
&**query.get("openid.assoc_handle").ok_or_else(|| {
AuthenticationError::InvalidCredentials
})?,
);
form.insert("openid.signed".to_string(), &**signed);
form.insert(
"openid.sig".to_string(),
&**query
.get("openid.sig")
.ok_or_else(|| AuthenticationError::InvalidCredentials)?,
&**query.get("openid.sig").ok_or_else(|| {
AuthenticationError::InvalidCredentials
})?,
);
form.insert(
"openid.ns".to_string(),
"http://specs.openid.net/auth/2.0",
);
form.insert("openid.ns".to_string(), "http://specs.openid.net/auth/2.0");
form.insert("openid.mode".to_string(), "check_authentication");
for val in signed.split(',') {
if let Some(arr_val) = query.get(&format!("openid.{}", val)) {
if let Some(arr_val) = query.get(&format!("openid.{}", val))
{
form.insert(format!("openid.{}", val), &**arr_val);
}
}
@ -484,9 +501,10 @@ impl AuthProvider {
.await?;
if res.contains("is_valid:true") {
let identity = query
.get("openid.identity")
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let identity =
query.get("openid.identity").ok_or_else(|| {
AuthenticationError::InvalidCredentials
})?;
identity
.rsplit('/')
@ -533,7 +551,10 @@ impl AuthProvider {
Ok(res)
}
pub async fn get_user(&self, token: &str) -> Result<TempUser, AuthenticationError> {
pub async fn get_user(
&self,
token: &str,
) -> Result<TempUser, AuthenticationError> {
let res = match self {
AuthProvider::GitHub => {
let response = reqwest::Client::new()
@ -549,7 +570,9 @@ impl AuthProvider {
.get("x-oauth-client-id")
.and_then(|x| x.to_str().ok());
if client_id != Some(&*dotenvy::var("GITHUB_CLIENT_ID").unwrap()) {
if client_id
!= Some(&*dotenvy::var("GITHUB_CLIENT_ID").unwrap())
{
return Err(AuthenticationError::InvalidClientId);
}
}
@ -599,9 +622,12 @@ impl AuthProvider {
id: discord_user.id,
username: discord_user.username,
email: discord_user.email,
avatar_url: discord_user
.avatar
.map(|x| format!("https://cdn.discordapp.com/avatars/{}/{}.webp", id, x)),
avatar_url: discord_user.avatar.map(|x| {
format!(
"https://cdn.discordapp.com/avatars/{}/{}.webp",
id, x
)
}),
bio: None,
country: None,
}
@ -727,7 +753,8 @@ impl AuthProvider {
.text()
.await?;
let mut response: SteamResponse = serde_json::from_str(&response)?;
let mut response: SteamResponse =
serde_json::from_str(&response)?;
if let Some(player) = response.response.players.pop() {
let username = player
@ -827,9 +854,12 @@ impl AuthProvider {
value.map(|x| crate::database::models::UserId(x.id))
}
AuthProvider::Microsoft => {
let value = sqlx::query!("SELECT id FROM users WHERE microsoft_id = $1", id)
.fetch_optional(executor)
.await?;
let value = sqlx::query!(
"SELECT id FROM users WHERE microsoft_id = $1",
id
)
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
}
@ -845,9 +875,12 @@ impl AuthProvider {
value.map(|x| crate::database::models::UserId(x.id))
}
AuthProvider::Google => {
let value = sqlx::query!("SELECT id FROM users WHERE google_id = $1", id)
.fetch_optional(executor)
.await?;
let value = sqlx::query!(
"SELECT id FROM users WHERE google_id = $1",
id
)
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
}
@ -863,9 +896,12 @@ impl AuthProvider {
value.map(|x| crate::database::models::UserId(x.id))
}
AuthProvider::PayPal => {
let value = sqlx::query!("SELECT id FROM users WHERE paypal_id = $1", id)
.fetch_optional(executor)
.await?;
let value = sqlx::query!(
"SELECT id FROM users WHERE paypal_id = $1",
id
)
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
}
@ -1024,11 +1060,15 @@ pub async fn init(
redis: Data<RedisPool>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, AuthenticationError> {
let url = url::Url::parse(&info.url).map_err(|_| AuthenticationError::Url)?;
let url =
url::Url::parse(&info.url).map_err(|_| AuthenticationError::Url)?;
let allowed_callback_urls = parse_strings_from_var("ALLOWED_CALLBACK_URLS").unwrap_or_default();
let allowed_callback_urls =
parse_strings_from_var("ALLOWED_CALLBACK_URLS").unwrap_or_default();
let domain = url.host_str().ok_or(AuthenticationError::Url)?;
if !allowed_callback_urls.iter().any(|x| domain.ends_with(x)) && domain != "modrinth.com" {
if !allowed_callback_urls.iter().any(|x| domain.ends_with(x))
&& domain != "modrinth.com"
{
return Err(AuthenticationError::Url);
}
@ -1381,7 +1421,11 @@ pub async fn delete_auth_provider(
}
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id.into(), None)], &redis).await?;
crate::database::models::User::clear_caches(
&[(user.id.into(), None)],
&redis,
)
.await?;
Ok(HttpResponse::NoContent().finish())
}
@ -1431,24 +1475,28 @@ pub async fn create_account_with_password(
redis: Data<RedisPool>,
new_account: web::Json<NewAccount>,
) -> Result<HttpResponse, ApiError> {
new_account
.0
.validate()
.map_err(|err| ApiError::InvalidInput(validation_errors_to_string(err, None)))?;
new_account.0.validate().map_err(|err| {
ApiError::InvalidInput(validation_errors_to_string(err, None))
})?;
if !check_turnstile_captcha(&req, &new_account.challenge).await? {
return Err(ApiError::Turnstile);
}
if crate::database::models::User::get(&new_account.username, &**pool, &redis)
.await?
.is_some()
if crate::database::models::User::get(
&new_account.username,
&**pool,
&redis,
)
.await?
.is_some()
{
return Err(ApiError::InvalidInput("Username is taken!".to_string()));
}
let mut transaction = pool.begin().await?;
let user_id = crate::database::models::generate_user_id(&mut transaction).await?;
let user_id =
crate::database::models::generate_user_id(&mut transaction).await?;
let new_account = new_account.0;
@ -1459,10 +1507,13 @@ pub async fn create_account_with_password(
if score.score() < 3 {
return Err(ApiError::InvalidInput(
if let Some(feedback) = score.feedback().clone().and_then(|x| x.warning()) {
if let Some(feedback) =
score.feedback().clone().and_then(|x| x.warning())
{
format!("Password too weak: {}", feedback)
} else {
"Specified password is too weak! Please improve its strength.".to_string()
"Specified password is too weak! Please improve its strength."
.to_string()
},
));
}
@ -1554,13 +1605,15 @@ pub async fn login_password(
}
let user = if let Some(user) =
crate::database::models::User::get(&login.username, &**pool, &redis).await?
crate::database::models::User::get(&login.username, &**pool, &redis)
.await?
{
user
} else {
let user = crate::database::models::User::get_email(&login.username, &**pool)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let user =
crate::database::models::User::get_email(&login.username, &**pool)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
crate::database::models::User::get_id(user, &**pool, &redis)
.await?
@ -1591,7 +1644,8 @@ pub async fn login_password(
})))
} else {
let mut transaction = pool.begin().await?;
let session = issue_session(req, user.id, &mut transaction, &redis).await?;
let session =
issue_session(req, user.id, &mut transaction, &redis).await?;
let res = crate::models::sessions::Session::from(session, true, None);
transaction.commit().await?;
@ -1651,7 +1705,9 @@ async fn validate_2fa_code(
Ok(true)
} else if allow_backup {
let backup_codes = crate::database::models::User::get_backup_codes(user_id, pool).await?;
let backup_codes =
crate::database::models::User::get_backup_codes(user_id, pool)
.await?;
if !backup_codes.contains(&input) {
Ok(false)
@ -1669,7 +1725,11 @@ async fn validate_2fa_code(
.execute(&mut **transaction)
.await?;
crate::database::models::User::clear_caches(&[(user_id, None)], redis).await?;
crate::database::models::User::clear_caches(
&[(user_id, None)],
redis,
)
.await?;
Ok(true)
}
@ -1690,9 +1750,10 @@ pub async fn login_2fa(
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if let Flow::Login2FA { user_id } = flow {
let user = crate::database::models::User::get_id(user_id, &**pool, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let user =
crate::database::models::User::get_id(user_id, &**pool, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let mut transaction = pool.begin().await?;
if !validate_2fa_code(
@ -1713,7 +1774,8 @@ pub async fn login_2fa(
}
Flow::remove(&login.flow, &redis).await?;
let session = issue_session(req, user_id, &mut transaction, &redis).await?;
let session =
issue_session(req, user_id, &mut transaction, &redis).await?;
let res = crate::models::sessions::Session::from(session, true, None);
transaction.commit().await?;
@ -1870,7 +1932,11 @@ pub async fn finish_2fa_flow(
}
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id.into(), None)], &redis).await?;
crate::database::models::User::clear_caches(
&[(user.id.into(), None)],
&redis,
)
.await?;
Ok(HttpResponse::Ok().json(serde_json::json!({
"backup_codes": codes,
@ -1895,10 +1961,15 @@ pub async fn remove_2fa(
login: web::Json<Remove2FA>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let (scopes, user) =
get_user_record_from_bearer_token(&req, None, &**pool, &redis, &session_queue)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let (scopes, user) = get_user_record_from_bearer_token(
&req,
None,
&**pool,
&redis,
&session_queue,
)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if !scopes.contains(Scopes::USER_AUTH_WRITE) {
return Err(ApiError::Authentication(
@ -1911,7 +1982,9 @@ pub async fn remove_2fa(
if !validate_2fa_code(
login.code.clone(),
user.totp_secret.ok_or_else(|| {
ApiError::InvalidInput("User does not have 2FA enabled on the account!".to_string())
ApiError::InvalidInput(
"User does not have 2FA enabled on the account!".to_string(),
)
})?,
true,
user.id,
@ -1958,7 +2031,8 @@ pub async fn remove_2fa(
}
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis).await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis)
.await?;
Ok(HttpResponse::NoContent().finish())
}
@ -1980,12 +2054,20 @@ pub async fn reset_password_begin(
return Err(ApiError::Turnstile);
}
let user = if let Some(user_id) =
crate::database::models::User::get_email(&reset_password.username, &**pool).await?
let user = if let Some(user_id) = crate::database::models::User::get_email(
&reset_password.username,
&**pool,
)
.await?
{
crate::database::models::User::get_id(user_id, &**pool, &redis).await?
} else {
crate::database::models::User::get(&reset_password.username, &**pool, &redis).await?
crate::database::models::User::get(
&reset_password.username,
&**pool,
&redis,
)
.await?
};
if let Some(user) = user {
@ -2026,9 +2108,10 @@ pub async fn change_password(
let flow = Flow::get(flow, &redis).await?;
if let Some(Flow::ForgotPassword { user_id }) = flow {
let user = crate::database::models::User::get_id(user_id, &**pool, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let user =
crate::database::models::User::get_id(user_id, &**pool, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
Some(user)
} else {
@ -2041,10 +2124,15 @@ pub async fn change_password(
let user = if let Some(user) = user {
user
} else {
let (scopes, user) =
get_user_record_from_bearer_token(&req, None, &**pool, &redis, &session_queue)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let (scopes, user) = get_user_record_from_bearer_token(
&req,
None,
&**pool,
&redis,
&session_queue,
)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if !scopes.contains(Scopes::USER_AUTH_WRITE) {
return Err(ApiError::Authentication(
@ -2060,7 +2148,10 @@ pub async fn change_password(
})?;
let hasher = Argon2::default();
hasher.verify_password(old_password.as_bytes(), &PasswordHash::new(pass)?)?;
hasher.verify_password(
old_password.as_bytes(),
&PasswordHash::new(pass)?,
)?;
}
user
@ -2068,7 +2159,9 @@ pub async fn change_password(
let mut transaction = pool.begin().await?;
let update_password = if let Some(new_password) = &change_password.new_password {
let update_password = if let Some(new_password) =
&change_password.new_password
{
let score = zxcvbn::zxcvbn(
new_password,
&[&user.username, &user.email.clone().unwrap_or_default()],
@ -2076,7 +2169,9 @@ pub async fn change_password(
if score.score() < 3 {
return Err(ApiError::InvalidInput(
if let Some(feedback) = score.feedback().clone().and_then(|x| x.warning()) {
if let Some(feedback) =
score.feedback().clone().and_then(|x| x.warning())
{
format!("Password too weak: {}", feedback)
} else {
"Specified password is too weak! Please improve its strength.".to_string()
@ -2140,7 +2235,8 @@ pub async fn change_password(
}
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis).await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis)
.await?;
Ok(HttpResponse::Ok().finish())
}
@ -2160,10 +2256,9 @@ pub async fn set_email(
session_queue: Data<AuthQueue>,
stripe_client: Data<stripe::Client>,
) -> Result<HttpResponse, ApiError> {
email
.0
.validate()
.map_err(|err| ApiError::InvalidInput(validation_errors_to_string(err, None)))?;
email.0.validate().map_err(|err| {
ApiError::InvalidInput(validation_errors_to_string(err, None))
})?;
let user = get_user_from_headers(
&req,
@ -2229,7 +2324,11 @@ pub async fn set_email(
)?;
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id.into(), None)], &redis).await?;
crate::database::models::User::clear_caches(
&[(user.id.into(), None)],
&redis,
)
.await?;
Ok(HttpResponse::Ok().finish())
}
@ -2265,7 +2364,11 @@ pub async fn resend_verify_email(
.insert(Duration::hours(24), &redis)
.await?;
send_email_verify(email, flow, "We need to verify your email address.")?;
send_email_verify(
email,
flow,
"We need to verify your email address.",
)?;
Ok(HttpResponse::NoContent().finish())
} else {
@ -2293,9 +2396,10 @@ pub async fn verify_email(
confirm_email,
}) = flow
{
let user = crate::database::models::User::get_id(user_id, &**pool, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let user =
crate::database::models::User::get_id(user_id, &**pool, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if user.email != Some(confirm_email) {
return Err(ApiError::InvalidInput(
@ -2319,12 +2423,14 @@ pub async fn verify_email(
Flow::remove(&email.flow, &redis).await?;
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis).await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis)
.await?;
Ok(HttpResponse::NoContent().finish())
} else {
Err(ApiError::InvalidInput(
"Flow does not exist. Try re-requesting the verification link.".to_string(),
"Flow does not exist. Try re-requesting the verification link."
.to_string(),
))
}
}

View File

@ -29,13 +29,18 @@ pub async fn export(
let user_id = user.id.into();
let collection_ids = crate::database::models::User::get_collections(user_id, &**pool).await?;
let collections =
crate::database::models::Collection::get_many(&collection_ids, &**pool, &redis)
.await?
.into_iter()
.map(crate::models::collections::Collection::from)
.collect::<Vec<_>>();
let collection_ids =
crate::database::models::User::get_collections(user_id, &**pool)
.await?;
let collections = crate::database::models::Collection::get_many(
&collection_ids,
&**pool,
&redis,
)
.await?
.into_iter()
.map(crate::models::collections::Collection::from)
.collect::<Vec<_>>();
let follows = crate::database::models::User::get_follows(user_id, &**pool)
.await?
@ -43,22 +48,26 @@ pub async fn export(
.map(crate::models::ids::ProjectId::from)
.collect::<Vec<_>>();
let projects = crate::database::models::User::get_projects(user_id, &**pool, &redis)
let projects =
crate::database::models::User::get_projects(user_id, &**pool, &redis)
.await?
.into_iter()
.map(crate::models::ids::ProjectId::from)
.collect::<Vec<_>>();
let org_ids =
crate::database::models::User::get_organizations(user_id, &**pool)
.await?;
let orgs =
crate::database::models::organization_item::Organization::get_many_ids(
&org_ids, &**pool, &redis,
)
.await?
.into_iter()
.map(crate::models::ids::ProjectId::from)
// TODO: add team members
.map(|x| crate::models::organizations::Organization::from(x, vec![]))
.collect::<Vec<_>>();
let org_ids = crate::database::models::User::get_organizations(user_id, &**pool).await?;
let orgs = crate::database::models::organization_item::Organization::get_many_ids(
&org_ids, &**pool, &redis,
)
.await?
.into_iter()
// TODO: add team members
.map(|x| crate::models::organizations::Organization::from(x, vec![]))
.collect::<Vec<_>>();
let notifs = crate::database::models::notification_item::Notification::get_many_user(
user_id, &**pool, &redis,
)
@ -84,34 +93,46 @@ pub async fn export(
.map(crate::models::oauth_clients::OAuthClientAuthorization::from)
.collect::<Vec<_>>();
let pat_ids = crate::database::models::pat_item::PersonalAccessToken::get_user_pats(
user_id, &**pool, &redis,
)
.await?;
let pats = crate::database::models::pat_item::PersonalAccessToken::get_many_ids(
&pat_ids, &**pool, &redis,
let pat_ids =
crate::database::models::pat_item::PersonalAccessToken::get_user_pats(
user_id, &**pool, &redis,
)
.await?;
let pats =
crate::database::models::pat_item::PersonalAccessToken::get_many_ids(
&pat_ids, &**pool, &redis,
)
.await?
.into_iter()
.map(|x| crate::models::pats::PersonalAccessToken::from(x, false))
.collect::<Vec<_>>();
let payout_ids =
crate::database::models::payout_item::Payout::get_all_for_user(
user_id, &**pool,
)
.await?;
let payouts = crate::database::models::payout_item::Payout::get_many(
&payout_ids,
&**pool,
)
.await?
.into_iter()
.map(|x| crate::models::pats::PersonalAccessToken::from(x, false))
.map(crate::models::payouts::Payout::from)
.collect::<Vec<_>>();
let payout_ids =
crate::database::models::payout_item::Payout::get_all_for_user(user_id, &**pool).await?;
let payouts = crate::database::models::payout_item::Payout::get_many(&payout_ids, &**pool)
.await?
.into_iter()
.map(crate::models::payouts::Payout::from)
.collect::<Vec<_>>();
let report_ids =
crate::database::models::user_item::User::get_reports(user_id, &**pool).await?;
let reports = crate::database::models::report_item::Report::get_many(&report_ids, &**pool)
.await?
.into_iter()
.map(crate::models::reports::Report::from)
.collect::<Vec<_>>();
crate::database::models::user_item::User::get_reports(user_id, &**pool)
.await?;
let reports = crate::database::models::report_item::Report::get_many(
&report_ids,
&**pool,
)
.await?
.into_iter()
.map(crate::models::reports::Report::from)
.collect::<Vec<_>>();
let message_ids = sqlx::query!(
"
@ -126,11 +147,14 @@ pub async fn export(
.collect::<Vec<_>>();
let messages =
crate::database::models::thread_item::ThreadMessage::get_many(&message_ids, &**pool)
.await?
.into_iter()
.map(|x| crate::models::threads::ThreadMessage::from(x, &user))
.collect::<Vec<_>>();
crate::database::models::thread_item::ThreadMessage::get_many(
&message_ids,
&**pool,
)
.await?
.into_iter()
.map(|x| crate::models::threads::ThreadMessage::from(x, &user))
.collect::<Vec<_>>();
let uploaded_images_ids = sqlx::query!(
"SELECT id FROM uploaded_images WHERE owner_id = $1",
@ -142,12 +166,15 @@ pub async fn export(
.map(|x| crate::database::models::ids::ImageId(x.id))
.collect::<Vec<_>>();
let uploaded_images =
crate::database::models::image_item::Image::get_many(&uploaded_images_ids, &**pool, &redis)
.await?
.into_iter()
.map(crate::models::images::Image::from)
.collect::<Vec<_>>();
let uploaded_images = crate::database::models::image_item::Image::get_many(
&uploaded_images_ids,
&**pool,
&redis,
)
.await?
.into_iter()
.map(crate::models::images::Image::from)
.collect::<Vec<_>>();
let subscriptions =
crate::database::models::user_subscription_item::UserSubscriptionItem::get_all_user(

View File

@ -60,11 +60,12 @@ pub async fn get_projects(
.try_collect::<Vec<database::models::ProjectId>>()
.await?;
let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &**pool, &redis)
.await?
.into_iter()
.map(crate::models::projects::Project::from)
.collect();
let projects: Vec<_> =
database::Project::get_many_ids(&project_ids, &**pool, &redis)
.await?
.into_iter()
.map(crate::models::projects::Project::from)
.collect();
Ok(HttpResponse::Ok().json(projects))
}
@ -86,7 +87,8 @@ pub async fn get_project_meta(
.await?;
let project_id = info.into_inner().0;
let project = database::models::Project::get(&project_id, &**pool, &redis).await?;
let project =
database::models::Project::get(&project_id, &**pool, &redis).await?;
if let Some(project) = project {
let rows = sqlx::query!(
@ -122,7 +124,8 @@ pub async fn get_project_meta(
check_hashes.extend(merged.flame_files.keys().cloned());
check_hashes.extend(merged.unknown_files.keys().cloned());
check_flames.extend(merged.flame_files.values().map(|x| x.id as i32));
check_flames
.extend(merged.flame_files.values().map(|x| x.id as i32));
}
}

View File

@ -44,15 +44,17 @@ pub async fn get_pats(
.await?
.1;
let pat_ids = database::models::pat_item::PersonalAccessToken::get_user_pats(
user.id.into(),
&**pool,
&redis,
let pat_ids =
database::models::pat_item::PersonalAccessToken::get_user_pats(
user.id.into(),
&**pool,
&redis,
)
.await?;
let pats = database::models::pat_item::PersonalAccessToken::get_many_ids(
&pat_ids, &**pool, &redis,
)
.await?;
let pats =
database::models::pat_item::PersonalAccessToken::get_many_ids(&pat_ids, &**pool, &redis)
.await?;
Ok(HttpResponse::Ok().json(
pats.into_iter()
@ -77,9 +79,9 @@ pub async fn create_pat(
redis: Data<RedisPool>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
info.0
.validate()
.map_err(|err| ApiError::InvalidInput(validation_errors_to_string(err, None)))?;
info.0.validate().map_err(|err| {
ApiError::InvalidInput(validation_errors_to_string(err, None))
})?;
if info.scopes.is_restricted() {
return Err(ApiError::InvalidInput(
@ -174,7 +176,10 @@ pub async fn edit_pat(
.1;
let id = id.into_inner().0;
let pat = database::models::pat_item::PersonalAccessToken::get(&id, &**pool, &redis).await?;
let pat = database::models::pat_item::PersonalAccessToken::get(
&id, &**pool, &redis,
)
.await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
@ -262,13 +267,19 @@ pub async fn delete_pat(
.await?
.1;
let id = id.into_inner().0;
let pat = database::models::pat_item::PersonalAccessToken::get(&id, &**pool, &redis).await?;
let pat = database::models::pat_item::PersonalAccessToken::get(
&id, &**pool, &redis,
)
.await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
let mut transaction = pool.begin().await?;
database::models::pat_item::PersonalAccessToken::remove(pat.id, &mut transaction)
.await?;
database::models::pat_item::PersonalAccessToken::remove(
pat.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
database::models::pat_item::PersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],

View File

@ -152,7 +152,9 @@ pub async fn list(
.and_then(|x| x.to_str().ok())
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
let session_ids = DBSession::get_user_sessions(current_user.id.into(), &**pool, &redis).await?;
let session_ids =
DBSession::get_user_sessions(current_user.id.into(), &**pool, &redis)
.await?;
let sessions = DBSession::get_many_ids(&session_ids, &**pool, &redis)
.await?
.into_iter()
@ -210,19 +212,24 @@ pub async fn refresh(
redis: Data<RedisPool>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let current_user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
.await?
.1;
let current_user =
get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
.await?
.1;
let session = req
.headers()
.get(AUTHORIZATION)
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::Authentication(AuthenticationError::InvalidCredentials))?;
.ok_or_else(|| {
ApiError::Authentication(AuthenticationError::InvalidCredentials)
})?;
let session = DBSession::get(session, &**pool, &redis).await?;
if let Some(session) = session {
if current_user.id != session.user_id.into() || session.refresh_expires < Utc::now() {
if current_user.id != session.user_id.into()
|| session.refresh_expires < Utc::now()
{
return Err(ApiError::Authentication(
AuthenticationError::InvalidCredentials,
));
@ -231,7 +238,9 @@ pub async fn refresh(
let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?;
let new_session = issue_session(req, session.user_id, &mut transaction, &redis).await?;
let new_session =
issue_session(req, session.user_id, &mut transaction, &redis)
.await?;
transaction.commit().await?;
DBSession::clear_cache(
vec![(

View File

@ -77,7 +77,9 @@ pub async fn maven_metadata(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let project_id = params.into_inner().0;
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
let Some(project) =
database::models::Project::get(&project_id, &**pool, &redis).await?
else {
return Err(ApiError::NotFound);
};
@ -145,7 +147,11 @@ pub async fn maven_metadata(
versions: Versions {
versions: new_versions,
},
last_updated: project.inner.updated.format("%Y%m%d%H%M%S").to_string(),
last_updated: project
.inner
.updated
.format("%Y%m%d%H%M%S")
.to_string(),
},
};
@ -164,11 +170,16 @@ async fn find_version(
.ok()
.map(|x| x as i64);
let all_versions = database::models::Version::get_many(&project.versions, pool, redis).await?;
let all_versions =
database::models::Version::get_many(&project.versions, pool, redis)
.await?;
let exact_matches = all_versions
.iter()
.filter(|x| &x.inner.version_number == vcoords || Some(x.inner.id.0) == id_option)
.filter(|x| {
&x.inner.version_number == vcoords
|| Some(x.inner.id.0) == id_option
})
.collect::<Vec<_>>();
if exact_matches.len() == 1 {
@ -202,11 +213,10 @@ async fn find_version(
// For maven in particular, we will hardcode it to use GameVersions rather than generic loader fields, as this is minecraft-java exclusive
if !game_versions.is_empty() {
let version_game_versions = x
.version_fields
.clone()
.into_iter()
.find_map(|v| MinecraftGameVersion::try_from_version_field(&v).ok());
let version_game_versions =
x.version_fields.clone().into_iter().find_map(|v| {
MinecraftGameVersion::try_from_version_field(&v).ok()
});
if let Some(version_game_versions) = version_game_versions {
bool &= version_game_versions
.iter()
@ -231,7 +241,9 @@ fn find_file<'a>(
version: &'a QueryVersion,
file: &str,
) -> Option<&'a QueryFile> {
if let Some(selected_file) = version.files.iter().find(|x| x.filename == file) {
if let Some(selected_file) =
version.files.iter().find(|x| x.filename == file)
{
return Some(selected_file);
}
@ -271,7 +283,9 @@ pub async fn version_file(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let (project_id, vnum, file) = params.into_inner();
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
let Some(project) =
database::models::Project::get(&project_id, &**pool, &redis).await?
else {
return Err(ApiError::NotFound);
};
@ -290,7 +304,8 @@ pub async fn version_file(
return Err(ApiError::NotFound);
}
let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else {
let Some(version) = find_version(&project, &vnum, &pool, &redis).await?
else {
return Err(ApiError::NotFound);
};
@ -314,7 +329,9 @@ pub async fn version_file(
return Ok(HttpResponse::Ok()
.content_type("text/xml")
.body(yaserde::ser::to_string(&respdata).map_err(ApiError::Xml)?));
} else if let Some(selected_file) = find_file(&project_id, &vnum, &version, &file) {
} else if let Some(selected_file) =
find_file(&project_id, &vnum, &version, &file)
{
return Ok(HttpResponse::TemporaryRedirect()
.append_header(("location", &*selected_file.url))
.body(""));
@ -332,7 +349,9 @@ pub async fn version_file_sha1(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let (project_id, vnum, file) = params.into_inner();
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
let Some(project) =
database::models::Project::get(&project_id, &**pool, &redis).await?
else {
return Err(ApiError::NotFound);
};
@ -351,7 +370,8 @@ pub async fn version_file_sha1(
return Err(ApiError::NotFound);
}
let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else {
let Some(version) = find_version(&project, &vnum, &pool, &redis).await?
else {
return Err(ApiError::NotFound);
};
@ -374,7 +394,9 @@ pub async fn version_file_sha512(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let (project_id, vnum, file) = params.into_inner();
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
let Some(project) =
database::models::Project::get(&project_id, &**pool, &redis).await?
else {
return Err(ApiError::NotFound);
};
@ -393,7 +415,8 @@ pub async fn version_file_sha512(
return Err(ApiError::NotFound);
}
let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else {
let Some(version) = find_version(&project, &vnum, &pool, &redis).await?
else {
return Err(ApiError::NotFound);
};

View File

@ -39,11 +39,16 @@ pub fn root_config(cfg: &mut web::ServiceConfig) {
Cors::default()
.allowed_origin_fn(|origin, _req_head| {
let allowed_origins =
parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").unwrap_or_default();
parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS")
.unwrap_or_default();
allowed_origins.contains(&"*".to_string())
|| allowed_origins
.contains(&origin.to_str().unwrap_or_default().to_string())
|| allowed_origins.contains(
&origin
.to_str()
.unwrap_or_default()
.to_string(),
)
})
.allowed_methods(vec!["GET", "POST"])
.allowed_headers(vec![

View File

@ -61,7 +61,9 @@ pub async fn forge_updates(
return Err(ApiError::InvalidInput(ERROR.to_string()));
}
let versions = database::models::Version::get_many(&project.versions, &**pool, &redis).await?;
let versions =
database::models::Version::get_many(&project.versions, &**pool, &redis)
.await?;
let loaders = match &*neo.neoforge {
"only" => |x: &String| *x == "neoforge",
@ -105,7 +107,9 @@ pub async fn forge_updates(
.fields
.iter()
.find(|(key, _)| key.as_str() == MinecraftGameVersion::FIELD_NAME)
.and_then(|(_, value)| serde_json::from_value::<Vec<String>>(value.clone()).ok())
.and_then(|(_, value)| {
serde_json::from_value::<Vec<String>>(value.clone()).ok()
})
.unwrap_or_default();
if version.version_type == VersionType::Release {

View File

@ -43,7 +43,8 @@ pub async fn get_projects(
// Convert to V2 projects
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
Ok(project) => {
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
let legacy_projects =
LegacyProject::from_many(project, &**pool, &redis).await?;
Ok(HttpResponse::Ok().json(legacy_projects))
}
Err(response) => Ok(response),

View File

@ -65,9 +65,15 @@ pub async fn notification_get(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::notifications::notification_get(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::notifications::notification_get(
req,
info,
pool,
redis,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
match v2_reroute::extract_ok_json::<Notification>(response).await {
Ok(notification) => {
let notification = LegacyNotification::from(notification);
@ -100,9 +106,15 @@ pub async fn notification_delete(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
// Returns NoContent, so no need to convert
v3::notifications::notification_delete(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)
v3::notifications::notification_delete(
req,
info,
pool,
redis,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
#[patch("notifications")]

View File

@ -4,7 +4,9 @@ use crate::file_hosting::FileHost;
use crate::models;
use crate::models::ids::ImageId;
use crate::models::projects::{Loader, Project, ProjectStatus};
use crate::models::v2::projects::{DonationLink, LegacyProject, LegacySideType};
use crate::models::v2::projects::{
DonationLink, LegacyProject, LegacySideType,
};
use crate::queue::session::AuthQueue;
use crate::routes::v3::project_creation::default_project_type;
use crate::routes::v3::project_creation::{CreateError, NewGalleryItem};
@ -158,13 +160,22 @@ pub async fn project_create(
.into_iter()
.map(|v| {
let mut fields = HashMap::new();
fields.extend(v2_reroute::convert_side_types_v3(client_side, server_side));
fields.insert("game_versions".to_string(), json!(v.game_versions));
fields.extend(v2_reroute::convert_side_types_v3(
client_side,
server_side,
));
fields.insert(
"game_versions".to_string(),
json!(v.game_versions),
);
// Modpacks now use the "mrpack" loader, and loaders are converted to loader fields.
// Setting of 'project_type' directly is removed, it's loader-based now.
if project_type == "modpack" {
fields.insert("mrpack_loaders".to_string(), json!(v.loaders));
fields.insert(
"mrpack_loaders".to_string(),
json!(v.loaders),
);
}
let loaders = if project_type == "modpack" {
@ -248,7 +259,10 @@ pub async fn project_create(
match v2_reroute::extract_ok_json::<Project>(response).await {
Ok(project) => {
let version_item = match project.versions.first() {
Some(vid) => version_item::Version::get((*vid).into(), &**client, &redis).await?,
Some(vid) => {
version_item::Version::get((*vid).into(), &**client, &redis)
.await?
}
None => None,
};
let project = LegacyProject::from(project, version_item);

View File

@ -5,7 +5,9 @@ use crate::file_hosting::FileHost;
use crate::models::projects::{
Link, MonetizationStatus, Project, ProjectStatus, SearchRequest, Version,
};
use crate::models::v2::projects::{DonationLink, LegacyProject, LegacySideType, LegacyVersion};
use crate::models::v2::projects::{
DonationLink, LegacyProject, LegacySideType, LegacyVersion,
};
use crate::models::v2::search::LegacySearchResults;
use crate::queue::moderation::AutomatedModerationQueue;
use crate::queue::session::AuthQueue;
@ -71,7 +73,9 @@ pub async fn project_search(
facet
.into_iter()
.map(|facet| {
if let Some((key, operator, val)) = parse_facet(&facet) {
if let Some((key, operator, val)) =
parse_facet(&facet)
{
format!(
"{}{}{}",
match key.as_str() {
@ -155,15 +159,19 @@ pub async fn random_projects_get(
) -> Result<HttpResponse, ApiError> {
let count = v3::projects::RandomProjects { count: count.count };
let response =
v3::projects::random_projects_get(web::Query(count), pool.clone(), redis.clone())
.await
.or_else(v2_reroute::flatten_404_error)
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::projects::random_projects_get(
web::Query(count),
pool.clone(),
redis.clone(),
)
.await
.or_else(v2_reroute::flatten_404_error)
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
Ok(project) => {
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
let legacy_projects =
LegacyProject::from_many(project, &**pool, &redis).await?;
Ok(HttpResponse::Ok().json(legacy_projects))
}
Err(response) => Ok(response),
@ -193,7 +201,8 @@ pub async fn projects_get(
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
Ok(project) => {
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
let legacy_projects =
LegacyProject::from_many(project, &**pool, &redis).await?;
Ok(HttpResponse::Ok().json(legacy_projects))
}
Err(response) => Ok(response),
@ -210,15 +219,24 @@ pub async fn project_get(
) -> Result<HttpResponse, ApiError> {
// Convert V2 data to V3 data
// Call V3 project creation
let response = v3::projects::project_get(req, info, pool.clone(), redis.clone(), session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::projects::project_get(
req,
info,
pool.clone(),
redis.clone(),
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Project>(response).await {
Ok(project) => {
let version_item = match project.versions.first() {
Some(vid) => version_item::Version::get((*vid).into(), &**pool, &redis).await?,
Some(vid) => {
version_item::Version::get((*vid).into(), &**pool, &redis)
.await?
}
None => None,
};
let project = LegacyProject::from(project, version_item);
@ -256,16 +274,28 @@ pub async fn dependency_list(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
// TODO: tests, probably
let response =
v3::projects::dependency_list(req, info, pool.clone(), redis.clone(), session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::projects::dependency_list(
req,
info,
pool.clone(),
redis.clone(),
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
match v2_reroute::extract_ok_json::<crate::routes::v3::projects::DependencyInfo>(response).await
match v2_reroute::extract_ok_json::<
crate::routes::v3::projects::DependencyInfo,
>(response)
.await
{
Ok(dependency_info) => {
let converted_projects =
LegacyProject::from_many(dependency_info.projects, &**pool, &redis).await?;
let converted_projects = LegacyProject::from_many(
dependency_info.projects,
&**pool,
&redis,
)
.await?;
let converted_versions = dependency_info
.versions
.into_iter()
@ -443,7 +473,8 @@ pub async fn project_edit(
// (resetting to the new ones)
if let Some(donation_urls) = v2_new_project.donation_urls {
// Fetch current donation links from project so we know what to delete
let fetched_example_project = project_item::Project::get(&info.0, &**pool, &redis).await?;
let fetched_example_project =
project_item::Project::get(&info.0, &**pool, &redis).await?;
let donation_links = fetched_example_project
.map(|x| {
x.urls
@ -504,11 +535,19 @@ pub async fn project_edit(
// If client and server side were set, we will call
// the version setting route for each version to set the side types for each of them.
if response.status().is_success() && (client_side.is_some() || server_side.is_some()) {
let project_item =
project_item::Project::get(&new_slug.unwrap_or(project_id), &**pool, &redis).await?;
if response.status().is_success()
&& (client_side.is_some() || server_side.is_some())
{
let project_item = project_item::Project::get(
&new_slug.unwrap_or(project_id),
&**pool,
&redis,
)
.await?;
let version_ids = project_item.map(|x| x.versions).unwrap_or_default();
let versions = version_item::Version::get_many(&version_ids, &**pool, &redis).await?;
let versions =
version_item::Version::get_many(&version_ids, &**pool, &redis)
.await?;
for version in versions {
let version = Version::from(version);
let mut fields = version.fields;
@ -516,7 +555,10 @@ pub async fn project_edit(
v2_reroute::convert_side_types_v2(&fields, None);
let client_side = client_side.unwrap_or(current_client_side);
let server_side = server_side.unwrap_or(current_server_side);
fields.extend(v2_reroute::convert_side_types_v3(client_side, server_side));
fields.extend(v2_reroute::convert_side_types_v3(
client_side,
server_side,
));
response = v3::versions::version_edit_helper(
req.clone(),
@ -682,8 +724,10 @@ pub async fn projects_edit(
add_categories: bulk_edit_project.add_categories,
remove_categories: bulk_edit_project.remove_categories,
additional_categories: bulk_edit_project.additional_categories,
add_additional_categories: bulk_edit_project.add_additional_categories,
remove_additional_categories: bulk_edit_project.remove_additional_categories,
add_additional_categories: bulk_edit_project
.add_additional_categories,
remove_additional_categories: bulk_edit_project
.remove_additional_categories,
link_urls: Some(link_urls),
}),
redis,
@ -735,9 +779,16 @@ pub async fn delete_project_icon(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
// Returns NoContent, so no need to convert
v3::projects::delete_project_icon(req, info, pool, redis, file_host, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)
v3::projects::delete_project_icon(
req,
info,
pool,
redis,
file_host,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
#[derive(Serialize, Deserialize, Validate)]
@ -873,9 +924,16 @@ pub async fn project_delete(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
// Returns NoContent, so no need to convert
v3::projects::project_delete(req, info, pool, redis, search_config, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)
v3::projects::project_delete(
req,
info,
pool,
redis,
search_config,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
#[post("{id}/follow")]

View File

@ -25,9 +25,10 @@ pub async fn report_create(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::reports::report_create(req, pool, body, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response =
v3::reports::report_create(req, pool, body, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Report>(response).await {
@ -78,7 +79,8 @@ pub async fn reports(
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<Report>>(response).await {
Ok(reports) => {
let reports: Vec<_> = reports.into_iter().map(LegacyReport::from).collect();
let reports: Vec<_> =
reports.into_iter().map(LegacyReport::from).collect();
Ok(HttpResponse::Ok().json(reports))
}
Err(response) => Ok(response),
@ -111,7 +113,8 @@ pub async fn reports_get(
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<Report>>(response).await {
Ok(report_list) => {
let report_list: Vec<_> = report_list.into_iter().map(LegacyReport::from).collect();
let report_list: Vec<_> =
report_list.into_iter().map(LegacyReport::from).collect();
Ok(HttpResponse::Ok().json(report_list))
}
Err(response) => Ok(response),
@ -126,9 +129,10 @@ pub async fn report_get(
info: web::Path<(crate::models::reports::ReportId,)>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::reports::report_get(req, pool, redis, info, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response =
v3::reports::report_get(req, pool, redis, info, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Report>(response).await {

View File

@ -19,7 +19,9 @@ pub struct V2Stats {
}
#[get("statistics")]
pub async fn get_stats(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
pub async fn get_stats(
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let response = v3::statistics::get_stats(pool)
.await
.or_else(v2_reroute::flatten_404_error)?;

View File

@ -43,7 +43,9 @@ pub async fn category_list(
let response = v3::tags::category_list(pool, redis).await?;
// Convert to V2 format
match v2_reroute::extract_ok_json::<Vec<v3::tags::CategoryData>>(response).await {
match v2_reroute::extract_ok_json::<Vec<v3::tags::CategoryData>>(response)
.await
{
Ok(categories) => {
let categories = categories
.into_iter()
@ -75,7 +77,9 @@ pub async fn loader_list(
let response = v3::tags::loader_list(pool, redis).await?;
// Convert to V2 format
match v2_reroute::extract_ok_json::<Vec<v3::tags::LoaderData>>(response).await {
match v2_reroute::extract_ok_json::<Vec<v3::tags::LoaderData>>(response)
.await
{
Ok(loaders) => {
let loaders = loaders
.into_iter()
@ -86,12 +90,15 @@ pub async fn loader_list(
// a project type before any versions are set.
supported_project_types.push("project".to_string());
if ["forge", "fabric", "quilt", "neoforge"].contains(&&*l.name) {
if ["forge", "fabric", "quilt", "neoforge"]
.contains(&&*l.name)
{
supported_project_types.push("modpack".to_string());
}
if supported_project_types.contains(&"datapack".to_string())
|| supported_project_types.contains(&"plugin".to_string())
|| supported_project_types
.contains(&"plugin".to_string())
{
supported_project_types.push("mod".to_string());
}
@ -149,7 +156,9 @@ pub async fn game_version_list(
// Convert to V2 format
Ok(
match v2_reroute::extract_ok_json::<Vec<LoaderFieldEnumValue>>(response).await {
match v2_reroute::extract_ok_json::<Vec<LoaderFieldEnumValue>>(response)
.await
{
Ok(fields) => {
let fields = fields
.into_iter()
@ -187,7 +196,8 @@ pub async fn license_list() -> HttpResponse {
let response = v3::tags::license_list().await;
// Convert to V2 format
match v2_reroute::extract_ok_json::<Vec<v3::tags::License>>(response).await {
match v2_reroute::extract_ok_json::<Vec<v3::tags::License>>(response).await
{
Ok(licenses) => {
let licenses = licenses
.into_iter()
@ -209,14 +219,18 @@ pub struct LicenseText {
}
#[get("license/{id}")]
pub async fn license_text(params: web::Path<(String,)>) -> Result<HttpResponse, ApiError> {
pub async fn license_text(
params: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
let license = v3::tags::license_text(params)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert to V2 format
Ok(
match v2_reroute::extract_ok_json::<v3::tags::LicenseText>(license).await {
match v2_reroute::extract_ok_json::<v3::tags::LicenseText>(license)
.await
{
Ok(license) => HttpResponse::Ok().json(LicenseText {
title: license.title,
body: license.body,
@ -244,7 +258,11 @@ pub async fn donation_platform_list(
// Convert to V2 format
Ok(
match v2_reroute::extract_ok_json::<Vec<LinkPlatformQueryData>>(response).await {
match v2_reroute::extract_ok_json::<Vec<LinkPlatformQueryData>>(
response,
)
.await
{
Ok(platforms) => {
let platforms = platforms
.into_iter()

View File

@ -1,5 +1,7 @@
use crate::database::redis::RedisPool;
use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId, TeamMember};
use crate::models::teams::{
OrganizationPermissions, ProjectPermissions, TeamId, TeamMember,
};
use crate::models::users::UserId;
use crate::models::v2::teams::LegacyTeamMember;
use crate::queue::session::AuthQueue;
@ -36,9 +38,15 @@ pub async fn team_members_get_project(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::teams::team_members_get_project(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::teams::team_members_get_project(
req,
info,
pool,
redis,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<TeamMember>>(response).await {
Ok(members) => {
@ -61,9 +69,10 @@ pub async fn team_members_get(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::teams::team_members_get(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response =
v3::teams::team_members_get(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<TeamMember>>(response).await {
Ok(members) => {

View File

@ -110,7 +110,14 @@ pub async fn message_delete(
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
) -> Result<HttpResponse, ApiError> {
// Returns NoContent, so we don't need to convert the response
v3::threads::message_delete(req, info, pool, redis, session_queue, file_host)
.await
.or_else(v2_reroute::flatten_404_error)
v3::threads::message_delete(
req,
info,
pool,
redis,
session_queue,
file_host,
)
.await
.or_else(v2_reroute::flatten_404_error)
}

View File

@ -64,15 +64,19 @@ pub async fn users_get(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let response =
v3::users::users_get(web::Query(v3::users::UserIds { ids: ids.ids }), pool, redis)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::users::users_get(
web::Query(v3::users::UserIds { ids: ids.ids }),
pool,
redis,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<User>>(response).await {
Ok(users) => {
let legacy_users: Vec<LegacyUser> = users.into_iter().map(LegacyUser::from).collect();
let legacy_users: Vec<LegacyUser> =
users.into_iter().map(LegacyUser::from).collect();
Ok(HttpResponse::Ok().json(legacy_users))
}
Err(response) => Ok(response),
@ -107,14 +111,21 @@ pub async fn projects_list(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::users::projects_list(req, info, pool.clone(), redis.clone(), session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::users::projects_list(
req,
info,
pool.clone(),
redis.clone(),
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert to V2 projects
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
Ok(project) => {
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
let legacy_projects =
LegacyProject::from_many(project, &**pool, &redis).await?;
Ok(HttpResponse::Ok().json(legacy_projects))
}
Err(response) => Ok(response),
@ -230,14 +241,21 @@ pub async fn user_follows(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::users::user_follows(req, info, pool.clone(), redis.clone(), session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::users::user_follows(
req,
info,
pool.clone(),
redis.clone(),
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert to V2 projects
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
Ok(project) => {
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
let legacy_projects =
LegacyProject::from_many(project, &**pool, &redis).await?;
Ok(HttpResponse::Ok().json(legacy_projects))
}
Err(response) => Ok(response),
@ -252,9 +270,10 @@ pub async fn user_notifications(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::users::user_notifications(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response =
v3::users::user_notifications(req, info, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<Notification>>(response).await {
Ok(notifications) => {

View File

@ -4,7 +4,8 @@ use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::ImageId;
use crate::models::projects::{
Dependency, FileType, Loader, ProjectId, Version, VersionId, VersionStatus, VersionType,
Dependency, FileType, Loader, ProjectId, Version, VersionId, VersionStatus,
VersionType,
};
use crate::models::v2::projects::LegacyVersion;
use crate::queue::moderation::AutomatedModerationQueue;
@ -93,7 +94,8 @@ pub async fn version_create(
let payload = v2_reroute::alter_actix_multipart(
payload,
req.headers().clone(),
|legacy_create: InitialVersionData, content_dispositions: Vec<ContentDisposition>| {
|legacy_create: InitialVersionData,
content_dispositions: Vec<ContentDisposition>| {
let client = client.clone();
let redis = redis.clone();
async move {
@ -105,19 +107,27 @@ pub async fn version_create(
);
// Get all possible side-types for loaders given- we will use these to check if we need to convert/apply singleplayer, etc.
let loaders = match v3::tags::loader_list(client.clone(), redis.clone()).await {
Ok(loader_response) => {
(v2_reroute::extract_ok_json::<Vec<v3::tags::LoaderData>>(loader_response)
let loaders =
match v3::tags::loader_list(client.clone(), redis.clone())
.await
{
Ok(loader_response) => {
(v2_reroute::extract_ok_json::<
Vec<v3::tags::LoaderData>,
>(loader_response)
.await)
.unwrap_or_default()
}
Err(_) => vec![],
};
.unwrap_or_default()
}
Err(_) => vec![],
};
let loader_fields_aggregate = loaders
.into_iter()
.filter_map(|loader| {
if legacy_create.loaders.contains(&Loader(loader.name.clone())) {
if legacy_create
.loaders
.contains(&Loader(loader.name.clone()))
{
Some(loader.supported_fields)
} else {
None
@ -150,15 +160,29 @@ pub async fn version_create(
.map(|f| (f.to_string(), json!(false))),
);
if let Some(example_version_fields) =
get_example_version_fields(legacy_create.project_id, client, &redis).await?
get_example_version_fields(
legacy_create.project_id,
client,
&redis,
)
.await?
{
fields.extend(example_version_fields.into_iter().filter_map(|f| {
if side_type_loader_field_names.contains(&f.field_name.as_str()) {
Some((f.field_name, f.value.serialize_internal()))
} else {
None
}
}));
fields.extend(
example_version_fields.into_iter().filter_map(
|f| {
if side_type_loader_field_names
.contains(&f.field_name.as_str())
{
Some((
f.field_name,
f.value.serialize_internal(),
))
} else {
None
}
},
),
);
}
}
// Handle project type via file extension prediction
@ -180,9 +204,14 @@ pub async fn version_create(
// Similarly, check actual content disposition for mrpacks, in case file_parts is wrong
for content_disposition in content_dispositions {
// Uses version_create functions to get the file name and extension
let (_, file_extension) = version_creation::get_name_ext(&content_disposition)?;
let (_, file_extension) =
version_creation::get_name_ext(&content_disposition)?;
crate::util::ext::project_file_type(file_extension)
.ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?;
.ok_or_else(|| {
CreateError::InvalidFileType(
file_extension.to_string(),
)
})?;
if file_extension == "mrpack" {
project_type = Some("modpack");
@ -193,7 +222,10 @@ pub async fn version_create(
// Modpacks now use the "mrpack" loader, and loaders are converted to loader fields.
// Setting of 'project_type' directly is removed, it's loader-based now.
if project_type == Some("modpack") {
fields.insert("mrpack_loaders".to_string(), json!(legacy_create.loaders));
fields.insert(
"mrpack_loaders".to_string(),
json!(legacy_create.loaders),
);
}
let loaders = if project_type == Some("modpack") {
@ -257,18 +289,20 @@ async fn get_example_version_fields(
None => return Ok(None),
};
let vid = match project_item::Project::get_id(project_id.into(), &**pool, redis)
.await?
.and_then(|p| p.versions.first().cloned())
{
Some(vid) => vid,
None => return Ok(None),
};
let vid =
match project_item::Project::get_id(project_id.into(), &**pool, redis)
.await?
.and_then(|p| p.versions.first().cloned())
{
Some(vid) => vid,
None => return Ok(None),
};
let example_version = match version_item::Version::get(vid, &**pool, redis).await? {
Some(version) => version,
None => return Ok(None),
};
let example_version =
match version_item::Version::get(vid, &**pool, redis).await? {
Some(version) => version,
None => return Ok(None),
};
Ok(Some(example_version.version_fields))
}

View File

@ -38,10 +38,16 @@ pub async fn get_version_from_hash(
hash_query: web::Query<HashQuery>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response =
v3::version_file::get_version_from_hash(req, info, pool, redis, hash_query, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::version_file::get_version_from_hash(
req,
info,
pool,
redis,
hash_query,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Version>(response).await {
@ -64,9 +70,16 @@ pub async fn download_version(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
// Returns TemporaryRedirect, so no need to convert to V2
v3::version_file::download_version(req, info, pool, redis, hash_query, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)
v3::version_file::download_version(
req,
info,
pool,
redis,
hash_query,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
// under /api/v1/version_file/{hash}
@ -80,9 +93,16 @@ pub async fn delete_file(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
// Returns NoContent, so no need to convert to V2
v3::version_file::delete_file(req, info, pool, redis, hash_query, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)
v3::version_file::delete_file(
req,
info,
pool,
redis,
hash_query,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
#[derive(Serialize, Deserialize)]
@ -171,7 +191,9 @@ pub async fn get_versions_from_hashes(
.or_else(v2_reroute::flatten_404_error)?;
// Convert to V2
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response)
.await
{
Ok(versions) => {
let v2_versions = versions
.into_iter()
@ -210,7 +232,9 @@ pub async fn get_projects_from_hashes(
.or_else(v2_reroute::flatten_404_error)?;
// Convert to V2
match v2_reroute::extract_ok_json::<HashMap<String, Project>>(response).await {
match v2_reroute::extract_ok_json::<HashMap<String, Project>>(response)
.await
{
Ok(projects_hashes) => {
let hash_to_project_id = projects_hashes
.iter()
@ -219,14 +243,19 @@ pub async fn get_projects_from_hashes(
(hash.clone(), project_id)
})
.collect::<HashMap<_, _>>();
let legacy_projects =
LegacyProject::from_many(projects_hashes.into_values().collect(), &**pool, &redis)
.await?;
let legacy_projects = LegacyProject::from_many(
projects_hashes.into_values().collect(),
&**pool,
&redis,
)
.await?;
let legacy_projects_hashes = hash_to_project_id
.into_iter()
.filter_map(|(hash, project_id)| {
let legacy_project =
legacy_projects.iter().find(|x| x.id == project_id)?.clone();
let legacy_project = legacy_projects
.iter()
.find(|x| x.id == project_id)?
.clone();
Some((hash, legacy_project))
})
.collect::<HashMap<_, _>>();
@ -261,12 +290,15 @@ pub async fn update_files(
hashes: update_data.hashes,
};
let response = v3::version_file::update_files(pool, redis, web::Json(update_data))
.await
.or_else(v2_reroute::flatten_404_error)?;
let response =
v3::version_file::update_files(pool, redis, web::Json(update_data))
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response)
.await
{
Ok(returned_versions) => {
let v3_versions = returned_versions
.into_iter()
@ -316,7 +348,8 @@ pub async fn update_individual_files(
game_versions.push(serde_json::json!(gv.clone()));
}
if !game_versions.is_empty() {
loader_fields.insert("game_versions".to_string(), game_versions);
loader_fields
.insert("game_versions".to_string(), game_versions);
}
v3::version_file::FileUpdateData {
hash: x.hash.clone(),
@ -339,7 +372,9 @@ pub async fn update_individual_files(
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response)
.await
{
Ok(returned_versions) => {
let v3_versions = returned_versions
.into_iter()

View File

@ -4,7 +4,9 @@ use super::ApiError;
use crate::database::redis::RedisPool;
use crate::models;
use crate::models::ids::VersionId;
use crate::models::projects::{Dependency, FileType, Version, VersionStatus, VersionType};
use crate::models::projects::{
Dependency, FileType, Version, VersionStatus, VersionType,
};
use crate::models::v2::projects::LegacyVersion;
use crate::queue::session::AuthQueue;
use crate::routes::{v2_reroute, v3};
@ -67,7 +69,8 @@ pub async fn version_list(
for gv in versions {
game_versions.push(serde_json::json!(gv.clone()));
}
loader_fields.insert("game_versions".to_string(), game_versions);
loader_fields
.insert("game_versions".to_string(), game_versions);
if let Some(ref loaders) = loaders {
loader_fields.insert(
@ -94,10 +97,16 @@ pub async fn version_list(
offset: filters.offset,
};
let response =
v3::versions::version_list(req, info, web::Query(filters), pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::versions::version_list(
req,
info,
web::Query(filters),
pool,
redis,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<Version>>(response).await {
@ -122,9 +131,15 @@ pub async fn version_project_get(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let id = info.into_inner();
let response = v3::versions::version_project_get_helper(req, id, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::versions::version_project_get_helper(
req,
id,
pool,
redis,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Version>(response).await {
Ok(version) => {
@ -149,9 +164,15 @@ pub async fn versions_get(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let ids = v3::versions::VersionIds { ids: ids.ids };
let response = v3::versions::versions_get(req, web::Query(ids), pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response = v3::versions::versions_get(
req,
web::Query(ids),
pool,
redis,
session_queue,
)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Vec<Version>>(response).await {
@ -175,9 +196,10 @@ pub async fn version_get(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let id = info.into_inner().0;
let response = v3::versions::version_get_helper(req, id, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
let response =
v3::versions::version_get_helper(req, id, pool, redis, session_queue)
.await
.or_else(v2_reroute::flatten_404_error)?;
// Convert response to V2 format
match v2_reroute::extract_ok_json::<Version>(response).await {
Ok(version) => {
@ -252,16 +274,19 @@ pub async fn version_edit(
)
.await
.or_else(v2_reroute::flatten_404_error)?;
let old_version = match v2_reroute::extract_ok_json::<Version>(old_version).await {
Ok(version) => version,
Err(response) => return Ok(response),
};
let old_version =
match v2_reroute::extract_ok_json::<Version>(old_version).await {
Ok(version) => version,
Err(response) => return Ok(response),
};
// If this has 'mrpack_loaders' as a loader field previously, this is a modpack.
// Therefore, if we are modifying the 'loader' field in this case,
// we are actually modifying the 'mrpack_loaders' loader field
let mut loaders = new_version.loaders.clone();
if old_version.fields.contains_key("mrpack_loaders") && new_version.loaders.is_some() {
if old_version.fields.contains_key("mrpack_loaders")
&& new_version.loaders.is_some()
{
fields.insert(
"mrpack_loaders".to_string(),
serde_json::json!(new_version.loaders),
@ -315,7 +340,14 @@ pub async fn version_delete(
search_config: web::Data<SearchConfig>,
) -> Result<HttpResponse, ApiError> {
// Returns NoContent, so we don't need to convert the response
v3::versions::version_delete(req, info, pool, redis, session_queue, search_config)
.await
.or_else(v2_reroute::flatten_404_error)
v3::versions::version_delete(
req,
info,
pool,
redis,
session_queue,
search_config,
)
.await
.or_else(v2_reroute::flatten_404_error)
}

View File

@ -3,14 +3,20 @@ use std::collections::HashMap;
use super::v3::project_creation::CreateError;
use super::ApiError;
use crate::models::v2::projects::LegacySideType;
use crate::util::actix::{generate_multipart, MultipartSegment, MultipartSegmentData};
use crate::util::actix::{
generate_multipart, MultipartSegment, MultipartSegmentData,
};
use actix_multipart::Multipart;
use actix_web::http::header::{ContentDisposition, HeaderMap, TryIntoHeaderPair};
use actix_web::http::header::{
ContentDisposition, HeaderMap, TryIntoHeaderPair,
};
use actix_web::HttpResponse;
use futures::{stream, Future, StreamExt};
use serde_json::{json, Value};
pub async fn extract_ok_json<T>(response: HttpResponse) -> Result<T, HttpResponse>
pub async fn extract_ok_json<T>(
response: HttpResponse,
) -> Result<T, HttpResponse>
where
T: serde::de::DeserializeOwned,
{
@ -27,7 +33,8 @@ where
let bytes = actix_web::body::to_bytes(body)
.await
.map_err(|_| failure_http_response())?;
let json_value: T = serde_json::from_slice(&bytes).map_err(|_| failure_http_response())?;
let json_value: T = serde_json::from_slice(&bytes)
.map_err(|_| failure_http_response())?;
Ok(json_value)
} else {
Err(response)
@ -119,9 +126,10 @@ where
let json_value = json.ok_or(CreateError::InvalidInput(
"No json segment found in multipart.".to_string(),
))?;
let mut json_segment = json_segment.ok_or(CreateError::InvalidInput(
"No json segment found in multipart.".to_string(),
))?;
let mut json_segment =
json_segment.ok_or(CreateError::InvalidInput(
"No json segment found in multipart.".to_string(),
))?;
// Call closure, with the json value and names of the other segments
let json_value: U = closure(json_value, content_dispositions).await?;
@ -144,11 +152,15 @@ where
headers.insert(key, value);
}
Err(err) => {
CreateError::InvalidInput(format!("Error inserting test header: {:?}.", err));
CreateError::InvalidInput(format!(
"Error inserting test header: {:?}.",
err
));
}
};
let new_multipart = Multipart::new(&headers, stream::once(async { Ok(payload) }));
let new_multipart =
Multipart::new(&headers, stream::once(async { Ok(payload) }));
Ok(new_multipart)
}
@ -165,10 +177,10 @@ pub fn convert_side_types_v3(
|| server_side == Required
|| server_side == Optional;
let client_and_server = singleplayer;
let client_only =
(client_side == Required || client_side == Optional) && server_side != Required;
let server_only =
(server_side == Required || server_side == Optional) && client_side != Required;
let client_only = (client_side == Required || client_side == Optional)
&& server_side != Required;
let server_only = (server_side == Required || server_side == Optional)
&& client_side != Required;
let mut fields = HashMap::new();
fields.insert("singleplayer".to_string(), json!(singleplayer));
@ -181,7 +193,9 @@ pub fn convert_side_types_v3(
// Converts plugin loaders from v2 to v3, for search facets
// Within every 1st and 2nd level (the ones allowed in v2), we convert every instance of:
// "project_type:mod" to "project_type:plugin" OR "project_type:mod"
pub fn convert_plugin_loader_facets_v3(facets: Vec<Vec<String>>) -> Vec<Vec<String>> {
pub fn convert_plugin_loader_facets_v3(
facets: Vec<Vec<String>>,
) -> Vec<Vec<String>> {
facets
.into_iter()
.map(|inner_facets| {
@ -246,7 +260,8 @@ pub fn convert_side_types_v2_bools(
Some("shader") => (Required, Unsupported),
Some("resourcepack") => (Required, Unsupported),
_ => {
let singleplayer = singleplayer.or(client_and_server).unwrap_or(false);
let singleplayer =
singleplayer.or(client_and_server).unwrap_or(false);
match (singleplayer, client_only, server_only) {
// Only singleplayer
@ -282,7 +297,9 @@ pub fn capitalize_first(input: &str) -> String {
#[cfg(test)]
mod tests {
use super::*;
use crate::models::v2::projects::LegacySideType::{Optional, Required, Unsupported};
use crate::models::v2::projects::LegacySideType::{
Optional, Required, Unsupported,
};
#[test]
fn convert_types() {
@ -300,8 +317,10 @@ mod tests {
if lossy_pairs.contains(&(client_side, server_side)) {
continue;
}
let side_types = convert_side_types_v3(client_side, server_side);
let (client_side2, server_side2) = convert_side_types_v2(&side_types, None);
let side_types =
convert_side_types_v3(client_side, server_side);
let (client_side2, server_side2) =
convert_side_types_v2(&side_types, None);
assert_eq!(client_side, client_side2);
assert_eq!(server_side, server_side2);
}

View File

@ -98,7 +98,8 @@ pub async fn playtimes_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
let project_ids =
filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the views
let playtimes = crate::clickhouse::fetch_playtimes(
@ -164,7 +165,8 @@ pub async fn views_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
let project_ids =
filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the views
let views = crate::clickhouse::fetch_views(
@ -230,7 +232,9 @@ pub async fn downloads_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user_option, &pool, &redis, None).await?;
let project_ids =
filter_allowed_ids(project_ids, user_option, &pool, &redis, None)
.await?;
// Get the downloads
let downloads = crate::clickhouse::fetch_downloads(
@ -299,17 +303,26 @@ pub async fn revenue_get(
// Round end_date up to nearest resolution
let diff = end_date.timestamp() % (resolution_minutes as i64 * 60);
let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff);
let end_date =
end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff);
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids =
filter_allowed_ids(project_ids, user.clone(), &pool, &redis, Some(true)).await?;
let project_ids = filter_allowed_ids(
project_ids,
user.clone(),
&pool,
&redis,
Some(true),
)
.await?;
let duration: PgInterval = Duration::minutes(resolution_minutes as i64)
.try_into()
.map_err(|_| ApiError::InvalidInput("Invalid resolution_minutes".to_string()))?;
.map_err(|_| {
ApiError::InvalidInput("Invalid resolution_minutes".to_string())
})?;
// Get the revenue data
let project_ids = project_ids.unwrap_or_default();
@ -424,7 +437,8 @@ pub async fn countries_downloads_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
let project_ids =
filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the countries
let countries = crate::clickhouse::fetch_countries_downloads(
@ -496,7 +510,8 @@ pub async fn countries_views_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
let project_ids =
filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the countries
let countries = crate::clickhouse::fetch_countries_views(
@ -564,55 +579,68 @@ async fn filter_allowed_ids(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
let project_ids = if let Some(project_strings) = project_ids {
let projects_data =
database::models::Project::get_many(&project_strings, &***pool, redis).await?;
let projects_data = database::models::Project::get_many(
&project_strings,
&***pool,
redis,
)
.await?;
let team_ids = projects_data
.iter()
.map(|x| x.inner.team_id)
.collect::<Vec<database::models::TeamId>>();
let team_members =
database::models::TeamMember::get_from_team_full_many(&team_ids, &***pool, redis)
.await?;
database::models::TeamMember::get_from_team_full_many(
&team_ids, &***pool, redis,
)
.await?;
let organization_ids = projects_data
.iter()
.filter_map(|x| x.inner.organization_id)
.collect::<Vec<database::models::OrganizationId>>();
let organizations =
database::models::Organization::get_many_ids(&organization_ids, &***pool, redis)
.await?;
let organization_team_ids = organizations
.iter()
.map(|x| x.team_id)
.collect::<Vec<database::models::TeamId>>();
let organization_team_members = database::models::TeamMember::get_from_team_full_many(
&organization_team_ids,
let organizations = database::models::Organization::get_many_ids(
&organization_ids,
&***pool,
redis,
)
.await?;
let organization_team_ids = organizations
.iter()
.map(|x| x.team_id)
.collect::<Vec<database::models::TeamId>>();
let organization_team_members =
database::models::TeamMember::get_from_team_full_many(
&organization_team_ids,
&***pool,
redis,
)
.await?;
let ids = projects_data
.into_iter()
.filter(|project| {
let team_member = team_members
.iter()
.find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into());
let team_member = team_members.iter().find(|x| {
x.team_id == project.inner.team_id
&& x.user_id == user.id.into()
});
let organization = project
.inner
.organization_id
.and_then(|oid| organizations.iter().find(|x| x.id == oid));
let organization_team_member = if let Some(organization) = organization {
organization_team_members
.iter()
.find(|x| x.team_id == organization.team_id && x.user_id == user.id.into())
} else {
None
};
let organization_team_member =
if let Some(organization) = organization {
organization_team_members.iter().find(|x| {
x.team_id == organization.team_id
&& x.user_id == user.id.into()
})
} else {
None
};
let permissions = ProjectPermissions::get_permissions_by_role(
&user.role,

View File

@ -1,6 +1,8 @@
use crate::auth::checks::is_visible_collection;
use crate::auth::{filter_visible_collections, get_user_from_headers};
use crate::database::models::{collection_item, generate_collection_id, project_item};
use crate::database::models::{
collection_item, generate_collection_id, project_item,
};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::collections::{Collection, CollectionStatus};
@ -74,13 +76,14 @@ pub async fn collection_create(
.await?
.1;
collection_create_data
.validate()
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
collection_create_data.validate().map_err(|err| {
CreateError::InvalidInput(validation_errors_to_string(err, None))
})?;
let mut transaction = client.begin().await?;
let collection_id: CollectionId = generate_collection_id(&mut transaction).await?.into();
let collection_id: CollectionId =
generate_collection_id(&mut transaction).await?.into();
let initial_project_ids = project_item::Project::get_many(
&collection_create_data.projects,
@ -140,10 +143,13 @@ pub async fn collections_get(
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
let ids = ids
.into_iter()
.map(|x| parse_base62(x).map(|x| database::models::CollectionId(x as i64)))
.map(|x| {
parse_base62(x).map(|x| database::models::CollectionId(x as i64))
})
.collect::<Result<Vec<_>, _>>()?;
let collections_data = database::models::Collection::get_many(&ids, &**pool, &redis).await?;
let collections_data =
database::models::Collection::get_many(&ids, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
@ -156,7 +162,8 @@ pub async fn collections_get(
.map(|x| x.1)
.ok();
let collections = filter_visible_collections(collections_data, &user_option).await?;
let collections =
filter_visible_collections(collections_data, &user_option).await?;
Ok(HttpResponse::Ok().json(collections))
}
@ -171,7 +178,8 @@ pub async fn collection_get(
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let collection_data = database::models::Collection::get(id, &**pool, &redis).await?;
let collection_data =
database::models::Collection::get(id, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
@ -228,9 +236,9 @@ pub async fn collection_edit(
.await?
.1;
new_collection
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
new_collection.validate().map_err(|err| {
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
@ -275,7 +283,8 @@ pub async fn collection_edit(
if let Some(status) = &new_collection.status {
if !(user.role.is_mod()
|| collection_item.status.is_approved() && status.can_be_requested())
|| collection_item.status.is_approved()
&& status.can_be_requested())
{
return Err(ApiError::CustomAuthentication(
"You don't have permission to set this status!".to_string(),
@ -313,13 +322,14 @@ pub async fn collection_edit(
.collect_vec();
let mut validated_project_ids = Vec::new();
for project_id in new_project_ids {
let project = database::models::Project::get(project_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
let project =
database::models::Project::get(project_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"The specified project {project_id} does not exist!"
))
})?;
})?;
validated_project_ids.push(project.inner.id.0);
}
// Insert- don't throw an error if it already exists
@ -348,7 +358,8 @@ pub async fn collection_edit(
}
transaction.commit().await?;
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
database::models::Collection::clear_cache(collection_item.id, &redis)
.await?;
Ok(HttpResponse::NoContent().body(""))
} else {
@ -384,11 +395,14 @@ pub async fn collection_icon_edit(
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let collection_item = database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified collection does not exist!".to_string())
})?;
let collection_item =
database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified collection does not exist!".to_string(),
)
})?;
if !can_modify_collection(&collection_item, &user) {
return Ok(HttpResponse::Unauthorized().body(""));
@ -401,8 +415,12 @@ pub async fn collection_icon_edit(
)
.await?;
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let bytes = read_from_payload(
&mut payload,
262144,
"Icons must be smaller than 256KiB",
)
.await?;
let collection_id: CollectionId = collection_item.id.into();
let upload_result = crate::util::img::upload_image_optimized(
@ -432,7 +450,8 @@ pub async fn collection_icon_edit(
.await?;
transaction.commit().await?;
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
database::models::Collection::clear_cache(collection_item.id, &redis)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
@ -457,11 +476,14 @@ pub async fn delete_collection_icon(
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let collection_item = database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified collection does not exist!".to_string())
})?;
let collection_item =
database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified collection does not exist!".to_string(),
)
})?;
if !can_modify_collection(&collection_item, &user) {
return Ok(HttpResponse::Unauthorized().body(""));
}
@ -486,7 +508,8 @@ pub async fn delete_collection_icon(
.await?;
transaction.commit().await?;
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
database::models::Collection::clear_cache(collection_item.id, &redis)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
@ -513,15 +536,21 @@ pub async fn collection_delete(
let collection = database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified collection does not exist!".to_string())
ApiError::InvalidInput(
"The specified collection does not exist!".to_string(),
)
})?;
if !can_modify_collection(&collection, &user) {
return Ok(HttpResponse::Unauthorized().body(""));
}
let mut transaction = pool.begin().await?;
let result =
database::models::Collection::remove(collection.id, &mut transaction, &redis).await?;
let result = database::models::Collection::remove(
collection.id,
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
database::models::Collection::clear_cache(collection.id, &redis).await?;

View File

@ -4,7 +4,9 @@ use super::threads::is_authorized_thread;
use crate::auth::checks::{is_team_member_project, is_team_member_version};
use crate::auth::get_user_from_headers;
use crate::database;
use crate::database::models::{project_item, report_item, thread_item, version_item};
use crate::database::models::{
project_item, report_item, thread_item, version_item,
};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::{ThreadMessageId, VersionId};
@ -50,18 +52,31 @@ pub async fn images_add(
let scopes = vec![context.relevant_scope()];
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, Some(&scopes))
.await?
.1;
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&scopes),
)
.await?
.1;
// Attempt to associated a supplied id with the context
// If the context cannot be found, or the user is not authorized to upload images for the context, return an error
match &mut context {
ImageContext::Project { project_id } => {
if let Some(id) = data.project_id {
let project = project_item::Project::get(&id, &**pool, &redis).await?;
let project =
project_item::Project::get(&id, &**pool, &redis).await?;
if let Some(project) = project {
if is_team_member_project(&project.inner, &Some(user.clone()), &pool).await? {
if is_team_member_project(
&project.inner,
&Some(user.clone()),
&pool,
)
.await?
{
*project_id = Some(project.inner.id.into());
} else {
return Err(ApiError::CustomAuthentication(
@ -77,10 +92,17 @@ pub async fn images_add(
}
ImageContext::Version { version_id } => {
if let Some(id) = data.version_id {
let version = version_item::Version::get(id.into(), &**pool, &redis).await?;
let version =
version_item::Version::get(id.into(), &**pool, &redis)
.await?;
if let Some(version) = version {
if is_team_member_version(&version.inner, &Some(user.clone()), &pool, &redis)
.await?
if is_team_member_version(
&version.inner,
&Some(user.clone()),
&pool,
&redis,
)
.await?
{
*version_id = Some(version.inner.id.into());
} else {
@ -97,11 +119,15 @@ pub async fn images_add(
}
ImageContext::ThreadMessage { thread_message_id } => {
if let Some(id) = data.thread_message_id {
let thread_message = thread_item::ThreadMessage::get(id.into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The thread message could not found.".to_string())
})?;
let thread_message =
thread_item::ThreadMessage::get(id.into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The thread message could not found."
.to_string(),
)
})?;
let thread = thread_item::Thread::get(thread_message.thread_id, &**pool)
.await?
.ok_or_else(|| {
@ -125,7 +151,9 @@ pub async fn images_add(
let report = report_item::Report::get(id.into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The report could not be found.".to_string())
ApiError::InvalidInput(
"The report could not be found.".to_string(),
)
})?;
let thread = thread_item::Thread::get(report.thread_id, &**pool)
.await?
@ -151,8 +179,12 @@ pub async fn images_add(
}
// Upload the image to the file host
let bytes =
read_from_payload(&mut payload, 1_048_576, "Icons must be smaller than 1MiB").await?;
let bytes = read_from_payload(
&mut payload,
1_048_576,
"Icons must be smaller than 1MiB",
)
.await?;
let content_length = bytes.len();
let upload_result = upload_image_optimized(

View File

@ -55,8 +55,11 @@ pub async fn notifications_get(
.collect();
let notifications_data: Vec<DBNotification> =
database::models::notification_item::Notification::get_many(&notification_ids, &**pool)
.await?;
database::models::notification_item::Notification::get_many(
&notification_ids,
&**pool,
)
.await?;
let notifications: Vec<Notification> = notifications_data
.into_iter()
@ -87,7 +90,11 @@ pub async fn notification_get(
let id = info.into_inner().0;
let notification_data =
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
database::models::notification_item::Notification::get(
id.into(),
&**pool,
)
.await?;
if let Some(data) = notification_data {
if user.id == data.user_id.into() || user.role.is_admin() {
@ -120,7 +127,11 @@ pub async fn notification_read(
let id = info.into_inner().0;
let notification_data =
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
database::models::notification_item::Notification::get(
id.into(),
&**pool,
)
.await?;
if let Some(data) = notification_data {
if data.user_id == user.id.into() || user.role.is_admin() {
@ -166,7 +177,11 @@ pub async fn notification_delete(
let id = info.into_inner().0;
let notification_data =
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
database::models::notification_item::Notification::get(
id.into(),
&**pool,
)
.await?;
if let Some(data) = notification_data {
if data.user_id == user.id.into() || user.role.is_admin() {
@ -184,7 +199,8 @@ pub async fn notification_delete(
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::CustomAuthentication(
"You are not authorized to delete this notification!".to_string(),
"You are not authorized to delete this notification!"
.to_string(),
))
}
} else {
@ -209,18 +225,23 @@ pub async fn notifications_read(
.await?
.1;
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<_>>();
let notification_ids =
serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<_>>();
let mut transaction = pool.begin().await?;
let notifications_data =
database::models::notification_item::Notification::get_many(&notification_ids, &**pool)
.await?;
database::models::notification_item::Notification::get_many(
&notification_ids,
&**pool,
)
.await?;
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
let mut notifications: Vec<database::models::ids::NotificationId> =
Vec::new();
for notification in notifications_data {
if notification.user_id == user.id.into() || user.role.is_admin() {
@ -257,18 +278,23 @@ pub async fn notifications_delete(
.await?
.1;
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<_>>();
let notification_ids =
serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<_>>();
let mut transaction = pool.begin().await?;
let notifications_data =
database::models::notification_item::Notification::get_many(&notification_ids, &**pool)
.await?;
database::models::notification_item::Notification::get_many(
&notification_ids,
&**pool,
)
.await?;
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
let mut notifications: Vec<database::models::ids::NotificationId> =
Vec::new();
for notification in notifications_data {
if notification.user_id == user.id.into() || user.role.is_admin() {

View File

@ -36,7 +36,10 @@ use crate::{
};
use crate::{
file_hosting::FileHost,
models::{ids::base62_impl::parse_base62, oauth_clients::DeleteOAuthClientQueryParam},
models::{
ids::base62_impl::parse_base62,
oauth_clients::DeleteOAuthClientQueryParam,
},
util::routes::read_from_payload,
};
@ -80,13 +83,16 @@ pub async fn get_user_clients(
let target_user = User::get(&info.into_inner(), &**pool, &redis).await?;
if let Some(target_user) = target_user {
if target_user.id != current_user.id.into() && !current_user.role.is_admin() {
if target_user.id != current_user.id.into()
&& !current_user.role.is_admin()
{
return Err(ApiError::CustomAuthentication(
"You do not have permission to see the OAuth clients of this user!".to_string(),
));
}
let clients = OAuthClient::get_all_user_clients(target_user.id, &**pool).await?;
let clients =
OAuthClient::get_all_user_clients(target_user.id, &**pool).await?;
let response = clients
.into_iter()
@ -136,7 +142,9 @@ pub struct NewOAuthApp {
)]
pub name: String,
#[validate(custom(function = "crate::util::validate::validate_no_restricted_scopes"))]
#[validate(custom(
function = "crate::util::validate::validate_no_restricted_scopes"
))]
pub max_scopes: Scopes,
pub redirect_uris: Vec<String>,
@ -169,9 +177,9 @@ pub async fn oauth_client_create<'a>(
.await?
.1;
new_oauth_app
.validate()
.map_err(|e| CreateError::ValidationError(validation_errors_to_string(e, None)))?;
new_oauth_app.validate().map_err(|e| {
CreateError::ValidationError(validation_errors_to_string(e, None))
})?;
let mut transaction = pool.begin().await?;
@ -180,8 +188,12 @@ pub async fn oauth_client_create<'a>(
let client_secret = generate_oauth_client_secret();
let client_secret_hash = DBOAuthClient::hash_secret(&client_secret);
let redirect_uris =
create_redirect_uris(&new_oauth_app.redirect_uris, client_id, &mut transaction).await?;
let redirect_uris = create_redirect_uris(
&new_oauth_app.redirect_uris,
client_id,
&mut transaction,
)
.await?;
let client = OAuthClient {
id: client_id,
@ -226,7 +238,8 @@ pub async fn oauth_client_delete<'a>(
.await?
.1;
let client = OAuthClient::get(client_id.into_inner().into(), &**pool).await?;
let client =
OAuthClient::get(client_id.into_inner().into(), &**pool).await?;
if let Some(client) = client {
client.validate_authorized(Some(&current_user))?;
OAuthClient::remove(client.id, &**pool).await?;
@ -245,7 +258,9 @@ pub struct OAuthClientEdit {
)]
pub name: Option<String>,
#[validate(custom(function = "crate::util::validate::validate_no_restricted_scopes"))]
#[validate(custom(
function = "crate::util::validate::validate_no_restricted_scopes"
))]
pub max_scopes: Option<Scopes>,
#[validate(length(min = 1))]
@ -280,11 +295,13 @@ pub async fn oauth_client_edit(
.await?
.1;
client_updates
.validate()
.map_err(|e| ApiError::Validation(validation_errors_to_string(e, None)))?;
client_updates.validate().map_err(|e| {
ApiError::Validation(validation_errors_to_string(e, None))
})?;
if let Some(existing_client) = OAuthClient::get(client_id.into_inner().into(), &**pool).await? {
if let Some(existing_client) =
OAuthClient::get(client_id.into_inner().into(), &**pool).await?
{
existing_client.validate_authorized(Some(&current_user))?;
let mut updated_client = existing_client.clone();
@ -317,7 +334,8 @@ pub async fn oauth_client_edit(
.await?;
if let Some(redirects) = redirect_uris {
edit_redirects(redirects, &existing_client, &mut transaction).await?;
edit_redirects(redirects, &existing_client, &mut transaction)
.await?;
}
transaction.commit().await?;
@ -358,7 +376,9 @@ pub async fn oauth_client_icon_edit(
let client = OAuthClient::get((*client_id).into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified client does not exist!".to_string())
ApiError::InvalidInput(
"The specified client does not exist!".to_string(),
)
})?;
client.validate_authorized(Some(&user))?;
@ -370,8 +390,12 @@ pub async fn oauth_client_icon_edit(
)
.await?;
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let bytes = read_from_payload(
&mut payload,
262144,
"Icons must be smaller than 256KiB",
)
.await?;
let upload_result = upload_image_optimized(
&format!("data/{}", client_id),
bytes.freeze(),
@ -419,7 +443,9 @@ pub async fn oauth_client_icon_delete(
let client = OAuthClient::get((*client_id).into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified client does not exist!".to_string())
ApiError::InvalidInput(
"The specified client does not exist!".to_string(),
)
})?;
client.validate_authorized(Some(&user))?;
@ -461,8 +487,11 @@ pub async fn get_user_oauth_authorizations(
.await?
.1;
let authorizations =
OAuthClientAuthorization::get_all_for_user(current_user.id.into(), &**pool).await?;
let authorizations = OAuthClientAuthorization::get_all_for_user(
current_user.id.into(),
&**pool,
)
.await?;
let mapped: Vec<models::oauth_clients::OAuthClientAuthorization> =
authorizations.into_iter().map(|a| a.into()).collect_vec();
@ -488,8 +517,12 @@ pub async fn revoke_oauth_authorization(
.await?
.1;
OAuthClientAuthorization::remove(info.client_id.into(), current_user.id.into(), &**pool)
.await?;
OAuthClientAuthorization::remove(
info.client_id.into(),
current_user.id.into(),
&**pool,
)
.await?;
Ok(HttpResponse::Ok().body(""))
}
@ -538,12 +571,16 @@ async fn edit_redirects(
&mut *transaction,
)
.await?;
OAuthClient::insert_redirect_uris(&redirects_to_add, &mut **transaction).await?;
OAuthClient::insert_redirect_uris(&redirects_to_add, &mut **transaction)
.await?;
let mut redirects_to_remove = existing_client.redirect_uris.clone();
redirects_to_remove.retain(|r| !updated_redirects.contains(&r.uri));
OAuthClient::remove_redirect_uris(redirects_to_remove.iter().map(|r| r.id), &mut **transaction)
.await?;
OAuthClient::remove_redirect_uris(
redirects_to_remove.iter().map(|r| r.id),
&mut **transaction,
)
.await?;
Ok(())
}

View File

@ -4,7 +4,9 @@ use std::sync::Arc;
use super::ApiError;
use crate::auth::{filter_visible_projects, get_user_from_headers};
use crate::database::models::team_item::TeamMember;
use crate::database::models::{generate_organization_id, team_item, Organization};
use crate::database::models::{
generate_organization_id, team_item, Organization,
};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::base62_impl::parse_base62;
@ -83,10 +85,16 @@ pub async fn organization_projects_get(
.try_collect::<Vec<database::models::ProjectId>>()
.await?;
let projects_data =
crate::database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?;
let projects_data = crate::database::models::Project::get_many_ids(
&project_ids,
&**pool,
&redis,
)
.await?;
let projects = filter_visible_projects(projects_data, &current_user, &pool, true).await?;
let projects =
filter_visible_projects(projects_data, &current_user, &pool, true)
.await?;
Ok(HttpResponse::Ok().json(projects))
}
@ -121,9 +129,9 @@ pub async fn organization_create(
.await?
.1;
new_organization
.validate()
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
new_organization.validate().map_err(|err| {
CreateError::ValidationError(validation_errors_to_string(err, None))
})?;
let mut transaction = pool.begin().await?;
@ -135,7 +143,12 @@ pub async fn organization_create(
organization_strings.push(name_organization_id.to_string());
}
organization_strings.push(new_organization.slug.clone());
let results = Organization::get_many(&organization_strings, &mut *transaction, &redis).await?;
let results = Organization::get_many(
&organization_strings,
&mut *transaction,
&redis,
)
.await?;
if !results.is_empty() {
return Err(CreateError::SlugCollision);
}
@ -188,7 +201,8 @@ pub async fn organization_create(
));
};
let organization = models::organizations::Organization::from(organization, members_data);
let organization =
models::organizations::Organization::from(organization, members_data);
Ok(HttpResponse::Ok().json(organization))
}
@ -215,7 +229,9 @@ pub async fn organization_get(
let organization_data = Organization::get(&id, &**pool, &redis).await?;
if let Some(data) = organization_data {
let members_data = TeamMember::get_from_team_full(data.team_id, &**pool, &redis).await?;
let members_data =
TeamMember::get_from_team_full(data.team_id, &**pool, &redis)
.await?;
let users = crate::database::models::User::get_many_ids(
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
@ -237,17 +253,24 @@ pub async fn organization_get(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| y == x.user_id)
.map(|y: crate::database::models::UserId| {
y == x.user_id
})
.unwrap_or(false)
})
.flat_map(|data| {
users.iter().find(|x| x.id == data.user_id).map(|user| {
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
crate::models::teams::TeamMember::from(
data,
user.clone(),
!logged_in,
)
})
})
.collect();
let organization = models::organizations::Organization::from(data, team_members);
let organization =
models::organizations::Organization::from(data, team_members);
return Ok(HttpResponse::Ok().json(organization));
}
Err(ApiError::NotFound)
@ -266,13 +289,15 @@ pub async fn organizations_get(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
let organizations_data = Organization::get_many(&ids, &**pool, &redis).await?;
let organizations_data =
Organization::get_many(&ids, &**pool, &redis).await?;
let team_ids = organizations_data
.iter()
.map(|x| x.team_id)
.collect::<Vec<_>>();
let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
let teams_data =
TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
let users = crate::database::models::User::get_many_ids(
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
&**pool,
@ -316,17 +341,24 @@ pub async fn organizations_get(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| y == x.user_id)
.map(|y: crate::database::models::UserId| {
y == x.user_id
})
.unwrap_or(false)
})
.flat_map(|data| {
users.iter().find(|x| x.id == data.user_id).map(|user| {
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
crate::models::teams::TeamMember::from(
data,
user.clone(),
!logged_in,
)
})
})
.collect();
let organization = models::organizations::Organization::from(data, team_members);
let organization =
models::organizations::Organization::from(data, team_members);
organizations.push(organization);
}
@ -364,12 +396,13 @@ pub async fn organizations_edit(
.await?
.1;
new_organization
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
new_organization.validate().map_err(|err| {
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let string = info.into_inner().0;
let result = database::models::Organization::get(&string, &**pool, &redis).await?;
let result =
database::models::Organization::get(&string, &**pool, &redis).await?;
if let Some(organization_item) = result {
let id = organization_item.id;
@ -380,8 +413,10 @@ pub async fn organizations_edit(
)
.await?;
let permissions =
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member);
let permissions = OrganizationPermissions::get_permissions_by_role(
&user.role,
&team_member,
);
if let Some(perms) = permissions {
let mut transaction = pool.begin().await?;
@ -433,8 +468,10 @@ pub async fn organizations_edit(
));
}
let name_organization_id_option: Option<u64> = parse_base62(slug).ok();
if let Some(name_organization_id) = name_organization_id_option {
let name_organization_id_option: Option<u64> =
parse_base62(slug).ok();
if let Some(name_organization_id) = name_organization_id_option
{
let results = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1)
@ -446,7 +483,8 @@ pub async fn organizations_edit(
if results.exists.unwrap_or(true) {
return Err(ApiError::InvalidInput(
"slug collides with other organization's id!".to_string(),
"slug collides with other organization's id!"
.to_string(),
));
}
}
@ -465,7 +503,8 @@ pub async fn organizations_edit(
if results.exists.unwrap_or(true) {
return Err(ApiError::InvalidInput(
"slug collides with other organization's id!".to_string(),
"slug collides with other organization's id!"
.to_string(),
));
}
}
@ -494,7 +533,8 @@ pub async fn organizations_edit(
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::CustomAuthentication(
"You do not have permission to edit this organization!".to_string(),
"You do not have permission to edit this organization!"
.to_string(),
))
}
} else {
@ -520,32 +560,41 @@ pub async fn organization_delete(
.1;
let string = info.into_inner().0;
let organization = database::models::Organization::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified organization does not exist!".to_string())
})?;
let organization =
database::models::Organization::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified organization does not exist!".to_string(),
)
})?;
if !user.role.is_admin() {
let team_member = database::models::TeamMember::get_from_user_id_organization(
organization.id,
user.id.into(),
false,
&**pool,
)
.await
.map_err(ApiError::Database)?
.ok_or_else(|| {
ApiError::InvalidInput("The specified organization does not exist!".to_string())
})?;
let team_member =
database::models::TeamMember::get_from_user_id_organization(
organization.id,
user.id.into(),
false,
&**pool,
)
.await
.map_err(ApiError::Database)?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified organization does not exist!".to_string(),
)
})?;
let permissions =
OrganizationPermissions::get_permissions_by_role(&user.role, &Some(team_member))
.unwrap_or_default();
let permissions = OrganizationPermissions::get_permissions_by_role(
&user.role,
&Some(team_member),
)
.unwrap_or_default();
if !permissions.contains(OrganizationPermissions::DELETE_ORGANIZATION) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to delete this organization!".to_string(),
"You don't have permission to delete this organization!"
.to_string(),
));
}
}
@ -582,8 +631,10 @@ pub async fn organization_delete(
.await?;
for organization_project_team in organization_project_teams.iter() {
let new_id =
crate::database::models::ids::generate_team_member_id(&mut transaction).await?;
let new_id = crate::database::models::ids::generate_team_member_id(
&mut transaction,
)
.await?;
let member = TeamMember {
id: new_id,
team_id: *organization_project_team,
@ -599,13 +650,21 @@ pub async fn organization_delete(
member.insert(&mut transaction).await?;
}
// Safely remove the organization
let result =
database::models::Organization::remove(organization.id, &mut transaction, &redis).await?;
let result = database::models::Organization::remove(
organization.id,
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
database::models::Organization::clear_cache(organization.id, Some(organization.slug), &redis)
.await?;
database::models::Organization::clear_cache(
organization.id,
Some(organization.slug),
&redis,
)
.await?;
for team_id in organization_project_teams {
database::models::TeamMember::clear_cache(team_id, &redis).await?;
@ -641,41 +700,59 @@ pub async fn organization_projects_add(
.await?
.1;
let organization = database::models::Organization::get(&info, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified organization does not exist!".to_string())
})?;
let organization =
database::models::Organization::get(&info, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified organization does not exist!".to_string(),
)
})?;
let project_item = database::models::Project::get(&project_info.project_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
})?;
if project_item.inner.organization_id.is_some() {
return Err(ApiError::InvalidInput(
"The specified project is already owned by an organization!".to_string(),
));
}
let project_team_member = database::models::TeamMember::get_from_user_id_project(
project_item.inner.id,
current_user.id.into(),
false,
&**pool,
)
.await?
.ok_or_else(|| ApiError::InvalidInput("You are not a member of this project!".to_string()))?;
let organization_team_member = database::models::TeamMember::get_from_user_id_organization(
organization.id,
current_user.id.into(),
false,
let project_item = database::models::Project::get(
&project_info.project_id,
&**pool,
&redis,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("You are not a member of this organization!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if project_item.inner.organization_id.is_some() {
return Err(ApiError::InvalidInput(
"The specified project is already owned by an organization!"
.to_string(),
));
}
let project_team_member =
database::models::TeamMember::get_from_user_id_project(
project_item.inner.id,
current_user.id.into(),
false,
&**pool,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"You are not a member of this project!".to_string(),
)
})?;
let organization_team_member =
database::models::TeamMember::get_from_user_id_organization(
organization.id,
current_user.id.into(),
false,
&**pool,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"You are not a member of this organization!".to_string(),
)
})?;
// Require ownership of a project to add it to an organization
if !current_user.role.is_admin() && !project_team_member.is_owner {
@ -734,8 +811,16 @@ pub async fn organization_projects_add(
transaction.commit().await?;
database::models::User::clear_project_cache(&[current_user.id.into()], &redis).await?;
database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?;
database::models::User::clear_project_cache(
&[current_user.id.into()],
&redis,
)
.await?;
database::models::TeamMember::clear_cache(
project_item.inner.team_id,
&redis,
)
.await?;
database::models::Project::clear_cache(
project_item.inner.id,
project_item.inner.slug,
@ -745,7 +830,8 @@ pub async fn organization_projects_add(
.await?;
} else {
return Err(ApiError::CustomAuthentication(
"You do not have permission to add projects to this organization!".to_string(),
"You do not have permission to add projects to this organization!"
.to_string(),
));
}
Ok(HttpResponse::Ok().finish())
@ -777,17 +863,23 @@ pub async fn organization_projects_remove(
.await?
.1;
let organization = database::models::Organization::get(&organization_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified organization does not exist!".to_string())
})?;
let organization =
database::models::Organization::get(&organization_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified organization does not exist!".to_string(),
)
})?;
let project_item = database::models::Project::get(&project_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
})?;
let project_item =
database::models::Project::get(&project_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if !project_item
.inner
@ -795,20 +887,24 @@ pub async fn organization_projects_remove(
.eq(&Some(organization.id))
{
return Err(ApiError::InvalidInput(
"The specified project is not owned by this organization!".to_string(),
"The specified project is not owned by this organization!"
.to_string(),
));
}
let organization_team_member = database::models::TeamMember::get_from_user_id_organization(
organization.id,
current_user.id.into(),
false,
&**pool,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("You are not a member of this organization!".to_string())
})?;
let organization_team_member =
database::models::TeamMember::get_from_user_id_organization(
organization.id,
current_user.id.into(),
false,
&**pool,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"You are not a member of this organization!".to_string(),
)
})?;
let permissions = OrganizationPermissions::get_permissions_by_role(
&current_user.role,
@ -826,7 +922,8 @@ pub async fn organization_projects_remove(
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified user is not a member of this organization!".to_string(),
"The specified user is not a member of this organization!"
.to_string(),
)
})?;
@ -847,7 +944,10 @@ pub async fn organization_projects_remove(
Some(new_owner) => new_owner,
None => {
let new_id =
crate::database::models::ids::generate_team_member_id(&mut transaction).await?;
crate::database::models::ids::generate_team_member_id(
&mut transaction,
)
.await?;
let member = TeamMember {
id: new_id,
team_id: project_item.inner.team_id,
@ -895,8 +995,16 @@ pub async fn organization_projects_remove(
.await?;
transaction.commit().await?;
database::models::User::clear_project_cache(&[current_user.id.into()], &redis).await?;
database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?;
database::models::User::clear_project_cache(
&[current_user.id.into()],
&redis,
)
.await?;
database::models::TeamMember::clear_cache(
project_item.inner.team_id,
&redis,
)
.await?;
database::models::Project::clear_cache(
project_item.inner.id,
project_item.inner.slug,
@ -906,7 +1014,8 @@ pub async fn organization_projects_remove(
.await?;
} else {
return Err(ApiError::CustomAuthentication(
"You do not have permission to add projects to this organization!".to_string(),
"You do not have permission to add projects to this organization!"
.to_string(),
));
}
Ok(HttpResponse::Ok().finish())
@ -939,11 +1048,14 @@ pub async fn organization_icon_edit(
.1;
let string = info.into_inner().0;
let organization_item = database::models::Organization::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified organization does not exist!".to_string())
})?;
let organization_item =
database::models::Organization::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified organization does not exist!".to_string(),
)
})?;
if !user.role.is_mod() {
let team_member = database::models::TeamMember::get_from_user_id(
@ -954,13 +1066,16 @@ pub async fn organization_icon_edit(
.await
.map_err(ApiError::Database)?;
let permissions =
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member)
.unwrap_or_default();
let permissions = OrganizationPermissions::get_permissions_by_role(
&user.role,
&team_member,
)
.unwrap_or_default();
if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this organization's icon.".to_string(),
"You don't have permission to edit this organization's icon."
.to_string(),
));
}
}
@ -972,8 +1087,12 @@ pub async fn organization_icon_edit(
)
.await?;
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let bytes = read_from_payload(
&mut payload,
262144,
"Icons must be smaller than 256KiB",
)
.await?;
let organization_id: OrganizationId = organization_item.id.into();
let upload_result = crate::util::img::upload_image_optimized(
@ -1032,11 +1151,14 @@ pub async fn delete_organization_icon(
.1;
let string = info.into_inner().0;
let organization_item = database::models::Organization::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified organization does not exist!".to_string())
})?;
let organization_item =
database::models::Organization::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The specified organization does not exist!".to_string(),
)
})?;
if !user.role.is_mod() {
let team_member = database::models::TeamMember::get_from_user_id(
@ -1047,13 +1169,16 @@ pub async fn delete_organization_icon(
.await
.map_err(ApiError::Database)?;
let permissions =
OrganizationPermissions::get_permissions_by_role(&user.role, &team_member)
.unwrap_or_default();
let permissions = OrganizationPermissions::get_permissions_by_role(
&user.role,
&team_member,
)
.unwrap_or_default();
if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this organization's icon.".to_string(),
"You don't have permission to edit this organization's icon."
.to_string(),
));
}
}

View File

@ -46,27 +46,37 @@ pub async fn paypal_webhook(
.headers()
.get("PAYPAL-AUTH-ALGO")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing auth algo".to_string()))?;
.ok_or_else(|| {
ApiError::InvalidInput("missing auth algo".to_string())
})?;
let cert_url = req
.headers()
.get("PAYPAL-CERT-URL")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing cert url".to_string()))?;
.ok_or_else(|| {
ApiError::InvalidInput("missing cert url".to_string())
})?;
let transmission_id = req
.headers()
.get("PAYPAL-TRANSMISSION-ID")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing transmission ID".to_string()))?;
.ok_or_else(|| {
ApiError::InvalidInput("missing transmission ID".to_string())
})?;
let transmission_sig = req
.headers()
.get("PAYPAL-TRANSMISSION-SIG")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing transmission sig".to_string()))?;
.ok_or_else(|| {
ApiError::InvalidInput("missing transmission sig".to_string())
})?;
let transmission_time = req
.headers()
.get("PAYPAL-TRANSMISSION-TIME")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing transmission time".to_string()))?;
.ok_or_else(|| {
ApiError::InvalidInput("missing transmission time".to_string())
})?;
#[derive(Deserialize)]
struct WebHookResponse {
@ -190,11 +200,14 @@ pub async fn tremendous_webhook(
.get("Tremendous-Webhook-Signature")
.and_then(|x| x.to_str().ok())
.and_then(|x| x.split('=').next_back())
.ok_or_else(|| ApiError::InvalidInput("missing webhook signature".to_string()))?;
.ok_or_else(|| {
ApiError::InvalidInput("missing webhook signature".to_string())
})?;
let mut mac: Hmac<Sha256> =
Hmac::new_from_slice(dotenvy::var("TREMENDOUS_PRIVATE_KEY")?.as_bytes())
.map_err(|_| ApiError::Payments("error initializing HMAC".to_string()))?;
let mut mac: Hmac<Sha256> = Hmac::new_from_slice(
dotenvy::var("TREMENDOUS_PRIVATE_KEY")?.as_bytes(),
)
.map_err(|_| ApiError::Payments("error initializing HMAC".to_string()))?;
mac.update(body.as_bytes());
let request_signature = mac.finalize().into_bytes().encode_hex::<String>();
@ -300,10 +313,16 @@ pub async fn user_payouts(
.1;
let payout_ids =
crate::database::models::payout_item::Payout::get_all_for_user(user.id.into(), &**pool)
.await?;
let payouts =
crate::database::models::payout_item::Payout::get_many(&payout_ids, &**pool).await?;
crate::database::models::payout_item::Payout::get_all_for_user(
user.id.into(),
&**pool,
)
.await?;
let payouts = crate::database::models::payout_item::Payout::get_many(
&payout_ids,
&**pool,
)
.await?;
Ok(HttpResponse::Ok().json(
payouts
@ -330,10 +349,17 @@ pub async fn create_payout(
session_queue: web::Data<AuthQueue>,
payouts_queue: web::Data<PayoutsQueue>,
) -> Result<HttpResponse, ApiError> {
let (scopes, user) =
get_user_record_from_bearer_token(&req, None, &**pool, &redis, &session_queue)
.await?
.ok_or_else(|| ApiError::Authentication(AuthenticationError::InvalidCredentials))?;
let (scopes, user) = get_user_record_from_bearer_token(
&req,
None,
&**pool,
&redis,
&session_queue,
)
.await?
.ok_or_else(|| {
ApiError::Authentication(AuthenticationError::InvalidCredentials)
})?;
if !scopes.contains(Scopes::PAYOUTS_WRITE) {
return Err(ApiError::Authentication(
@ -364,7 +390,11 @@ pub async fn create_payout(
.await?
.into_iter()
.find(|x| x.id == body.method_id)
.ok_or_else(|| ApiError::InvalidInput("Invalid payment method specified!".to_string()))?;
.ok_or_else(|| {
ApiError::InvalidInput(
"Invalid payment method specified!".to_string(),
)
})?;
let fee = std::cmp::min(
std::cmp::max(
@ -385,43 +415,50 @@ pub async fn create_payout(
let payout_item = match body.method {
PayoutMethodType::Venmo | PayoutMethodType::PayPal => {
let (wallet, wallet_type, address, display_address) =
if body.method == PayoutMethodType::Venmo {
if let Some(venmo) = user.venmo_handle {
("Venmo", "user_handle", venmo.clone(), venmo)
} else {
return Err(ApiError::InvalidInput(
"Venmo address has not been set for account!".to_string(),
));
}
} else if let Some(paypal_id) = user.paypal_id {
if let Some(paypal_country) = user.paypal_country {
if &*paypal_country == "US" && &*body.method_id != "paypal_us" {
return Err(ApiError::InvalidInput(
"Please use the US PayPal transfer option!".to_string(),
));
} else if &*paypal_country != "US" && &*body.method_id == "paypal_us" {
return Err(ApiError::InvalidInput(
"Please use the International PayPal transfer option!".to_string(),
));
}
(
"PayPal",
"paypal_id",
paypal_id.clone(),
user.paypal_email.unwrap_or(paypal_id),
)
} else {
return Err(ApiError::InvalidInput(
"Please re-link your PayPal account!".to_string(),
));
}
let (wallet, wallet_type, address, display_address) = if body.method
== PayoutMethodType::Venmo
{
if let Some(venmo) = user.venmo_handle {
("Venmo", "user_handle", venmo.clone(), venmo)
} else {
return Err(ApiError::InvalidInput(
"You have not linked a PayPal account!".to_string(),
"Venmo address has not been set for account!"
.to_string(),
));
};
}
} else if let Some(paypal_id) = user.paypal_id {
if let Some(paypal_country) = user.paypal_country {
if &*paypal_country == "US"
&& &*body.method_id != "paypal_us"
{
return Err(ApiError::InvalidInput(
"Please use the US PayPal transfer option!"
.to_string(),
));
} else if &*paypal_country != "US"
&& &*body.method_id == "paypal_us"
{
return Err(ApiError::InvalidInput(
"Please use the International PayPal transfer option!".to_string(),
));
}
(
"PayPal",
"paypal_id",
paypal_id.clone(),
user.paypal_email.unwrap_or(paypal_id),
)
} else {
return Err(ApiError::InvalidInput(
"Please re-link your PayPal account!".to_string(),
));
}
} else {
return Err(ApiError::InvalidInput(
"You have not linked a PayPal account!".to_string(),
));
};
#[derive(Deserialize)]
struct PayPalLink {
@ -433,17 +470,18 @@ pub async fn create_payout(
pub links: Vec<PayPalLink>,
}
let mut payout_item = crate::database::models::payout_item::Payout {
id: payout_id,
user_id: user.id,
created: Utc::now(),
status: PayoutStatus::InTransit,
amount: transfer,
fee: Some(fee),
method: Some(body.method),
method_address: Some(display_address),
platform_id: None,
};
let mut payout_item =
crate::database::models::payout_item::Payout {
id: payout_id,
user_id: user.id,
created: Utc::now(),
status: PayoutStatus::InTransit,
amount: transfer,
fee: Some(fee),
method: Some(body.method),
method_address: Some(display_address),
platform_id: None,
};
let res: PayoutsResponse = payouts_queue.make_paypal_request(
Method::POST,
@ -494,7 +532,8 @@ pub async fn create_payout(
.await
{
if let Some(data) = res.items.first() {
payout_item.platform_id = Some(data.payout_item_id.clone());
payout_item.platform_id =
Some(data.payout_item_id.clone());
}
}
}
@ -504,17 +543,18 @@ pub async fn create_payout(
PayoutMethodType::Tremendous => {
if let Some(email) = user.email {
if user.email_verified {
let mut payout_item = crate::database::models::payout_item::Payout {
id: payout_id,
user_id: user.id,
created: Utc::now(),
status: PayoutStatus::InTransit,
amount: transfer,
fee: Some(fee),
method: Some(PayoutMethodType::Tremendous),
method_address: Some(email.clone()),
platform_id: None,
};
let mut payout_item =
crate::database::models::payout_item::Payout {
id: payout_id,
user_id: user.id,
created: Utc::now(),
status: PayoutStatus::InTransit,
amount: transfer,
fee: Some(fee),
method: Some(PayoutMethodType::Tremendous),
method_address: Some(email.clone()),
platform_id: None,
};
#[derive(Deserialize)]
struct Reward {
@ -566,12 +606,14 @@ pub async fn create_payout(
payout_item
} else {
return Err(ApiError::InvalidInput(
"You must verify your account email to proceed!".to_string(),
"You must verify your account email to proceed!"
.to_string(),
));
}
} else {
return Err(ApiError::InvalidInput(
"You must add an email to your account to proceed!".to_string(),
"You must add an email to your account to proceed!"
.to_string(),
));
}
}
@ -585,7 +627,8 @@ pub async fn create_payout(
payout_item.insert(&mut transaction).await?;
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis).await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis)
.await?;
Ok(HttpResponse::NoContent().finish())
}
@ -610,7 +653,9 @@ pub async fn cancel_payout(
.1;
let id = info.into_inner().0;
let payout = crate::database::models::payout_item::Payout::get(id.into(), &**pool).await?;
let payout =
crate::database::models::payout_item::Payout::get(id.into(), &**pool)
.await?;
if let Some(payout) = payout {
if payout.user_id != user.id.into() && !user.role.is_admin() {
@ -630,7 +675,10 @@ pub async fn cancel_payout(
payouts
.make_paypal_request::<(), ()>(
Method::POST,
&format!("payments/payouts-item/{}/cancel", platform_id),
&format!(
"payments/payouts-item/{}/cancel",
platform_id
),
None,
None,
None,
@ -792,7 +840,9 @@ async fn get_user_balance(
.unwrap_or((Decimal::ZERO, Decimal::ZERO));
Ok(UserBalance {
available: available.round_dp(16) - withdrawn.round_dp(16) - fees.round_dp(16),
available: available.round_dp(16)
- withdrawn.round_dp(16)
- fees.round_dp(16),
pending,
})
}
@ -837,14 +887,19 @@ pub async fn platform_revenue(
.and_then(|x| x.sum)
.unwrap_or(Decimal::ZERO);
let points =
make_aditude_request(&["METRIC_REVENUE", "METRIC_IMPRESSIONS"], "30d", "1d").await?;
let points = make_aditude_request(
&["METRIC_REVENUE", "METRIC_IMPRESSIONS"],
"30d",
"1d",
)
.await?;
let mut points_map = HashMap::new();
for point in points {
for point in point.points_list {
let entry = points_map.entry(point.time.seconds).or_insert((None, None));
let entry =
points_map.entry(point.time.seconds).or_insert((None, None));
if let Some(revenue) = point.metric.revenue {
entry.0 = Some(revenue);
@ -868,7 +923,8 @@ pub async fn platform_revenue(
.and_utc()
.timestamp();
if let Some((revenue, impressions)) = points_map.remove(&(start as u64)) {
if let Some((revenue, impressions)) = points_map.remove(&(start as u64))
{
// Before 9/5/24, when legacy payouts were in effect.
if start >= 1725494400 {
let revenue = revenue.unwrap_or(Decimal::ZERO);
@ -879,8 +935,9 @@ pub async fn platform_revenue(
// Clean.io fee (ad antimalware). Per 1000 impressions.
let clean_io_fee = Decimal::from(8) / Decimal::from(1000);
let net_revenue =
revenue - (clean_io_fee * Decimal::from(impressions) / Decimal::from(1000));
let net_revenue = revenue
- (clean_io_fee * Decimal::from(impressions)
/ Decimal::from(1000));
let payout = net_revenue * (Decimal::from(1) - modrinth_cut);
@ -903,7 +960,12 @@ pub async fn platform_revenue(
};
redis
.set_serialized_to_json(PLATFORM_REVENUE_NAMESPACE, 0, &res, Some(60 * 60))
.set_serialized_to_json(
PLATFORM_REVENUE_NAMESPACE,
0,
&res,
Some(60 * 60),
)
.await?;
Ok(HttpResponse::Ok().json(res))
@ -918,7 +980,8 @@ fn get_legacy_data_point(timestamp: u64) -> RevenueData {
let weekdays = Decimal::from(20);
let weekend_bonus = Decimal::from(5) / Decimal::from(4);
let weekday_amount = old_payouts_budget / (weekdays + (weekend_bonus) * (days - weekdays));
let weekday_amount =
old_payouts_budget / (weekdays + (weekend_bonus) * (days - weekdays));
let weekend_amount = weekday_amount * weekend_bonus;
let payout = match start.weekday() {

View File

@ -1,6 +1,8 @@
use super::version_creation::{try_create_version_fields, InitialVersionData};
use crate::auth::{get_user_from_headers, AuthenticationError};
use crate::database::models::loader_fields::{Loader, LoaderField, LoaderFieldEnumValue};
use crate::database::models::loader_fields::{
Loader, LoaderField, LoaderFieldEnumValue,
};
use crate::database::models::thread_item::ThreadBuilder;
use crate::database::models::{self, image_item, User};
use crate::database::redis::RedisPool;
@ -11,7 +13,8 @@ use crate::models::ids::{ImageId, OrganizationId};
use crate::models::images::{Image, ImageContext};
use crate::models::pats::Scopes;
use crate::models::projects::{
License, Link, MonetizationStatus, ProjectId, ProjectStatus, VersionId, VersionStatus,
License, Link, MonetizationStatus, ProjectId, ProjectStatus, VersionId,
VersionStatus,
};
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
use crate::models::threads::ThreadType;
@ -91,10 +94,14 @@ impl actix_web::ResponseError for CreateError {
fn status_code(&self) -> StatusCode {
match self {
CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::SqlxDatabaseError(..) => {
StatusCode::INTERNAL_SERVER_ERROR
}
CreateError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::IndexingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::FileHostingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::FileHostingError(..) => {
StatusCode::INTERNAL_SERVER_ERROR
}
CreateError::SerDeError(..) => StatusCode::BAD_REQUEST,
CreateError::MultipartError(..) => StatusCode::BAD_REQUEST,
CreateError::MissingValueError(..) => StatusCode::BAD_REQUEST,
@ -105,7 +112,9 @@ impl actix_web::ResponseError for CreateError {
CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidFileType(..) => StatusCode::BAD_REQUEST,
CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED,
CreateError::CustomAuthenticationError(..) => StatusCode::UNAUTHORIZED,
CreateError::CustomAuthenticationError(..) => {
StatusCode::UNAUTHORIZED
}
CreateError::SlugCollision => StatusCode::BAD_REQUEST,
CreateError::ValidationError(..) => StatusCode::BAD_REQUEST,
CreateError::FileValidationError(..) => StatusCode::BAD_REQUEST,
@ -192,7 +201,9 @@ pub struct ProjectCreateData {
/// An optional link to the project's license page
pub license_url: Option<String>,
/// An optional list of all donation links the project has
#[validate(custom(function = "crate::util::validate::validate_url_hashmap_values"))]
#[validate(custom(
function = "crate::util::validate::validate_url_hashmap_values"
))]
#[serde(default)]
pub link_urls: HashMap<String, String>,
@ -343,8 +354,10 @@ async fn project_create_inner(
.await?
.1;
let project_id: ProjectId = models::generate_project_id(transaction).await?.into();
let all_loaders = models::loader_fields::Loader::list(&mut **transaction, redis).await?;
let project_id: ProjectId =
models::generate_project_id(transaction).await?.into();
let all_loaders =
models::loader_fields::Loader::list(&mut **transaction, redis).await?;
let project_create_data: ProjectCreateData;
let mut versions;
@ -365,9 +378,9 @@ async fn project_create_inner(
})?;
let content_disposition = field.content_disposition();
let name = content_disposition
.get_name()
.ok_or_else(|| CreateError::MissingValueError(String::from("Missing content name")))?;
let name = content_disposition.get_name().ok_or_else(|| {
CreateError::MissingValueError(String::from("Missing content name"))
})?;
if name != "data" {
return Err(CreateError::InvalidInput(String::from(
@ -377,19 +390,22 @@ async fn project_create_inner(
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
data.extend_from_slice(
&chunk.map_err(CreateError::MultipartError)?,
);
}
let create_data: ProjectCreateData = serde_json::from_slice(&data)?;
create_data
.validate()
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
create_data.validate().map_err(|err| {
CreateError::InvalidInput(validation_errors_to_string(err, None))
})?;
let slug_project_id_option: Option<ProjectId> =
serde_json::from_str(&format!("\"{}\"", create_data.slug)).ok();
if let Some(slug_project_id) = slug_project_id_option {
let slug_project_id: models::ids::ProjectId = slug_project_id.into();
let slug_project_id: models::ids::ProjectId =
slug_project_id.into();
let results = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)
@ -602,9 +618,14 @@ async fn project_create_inner(
}
// Convert the list of category names to actual categories
let mut categories = Vec::with_capacity(project_create_data.categories.len());
let mut categories =
Vec::with_capacity(project_create_data.categories.len());
for category in &project_create_data.categories {
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
let ids = models::categories::Category::get_ids(
category,
&mut **transaction,
)
.await?;
if ids.is_empty() {
return Err(CreateError::InvalidCategory(category.clone()));
}
@ -617,7 +638,11 @@ async fn project_create_inner(
let mut additional_categories =
Vec::with_capacity(project_create_data.additional_categories.len());
for category in &project_create_data.additional_categories {
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
let ids = models::categories::Category::get_ids(
category,
&mut **transaction,
)
.await?;
if ids.is_empty() {
return Err(CreateError::InvalidCategory(category.clone()));
}
@ -629,18 +654,29 @@ async fn project_create_inner(
let mut members = vec![];
if let Some(organization_id) = project_create_data.organization_id {
let org = models::Organization::get_id(organization_id.into(), pool, redis)
.await?
.ok_or_else(|| {
CreateError::InvalidInput("Invalid organization ID specified!".to_string())
})?;
let org = models::Organization::get_id(
organization_id.into(),
pool,
redis,
)
.await?
.ok_or_else(|| {
CreateError::InvalidInput(
"Invalid organization ID specified!".to_string(),
)
})?;
let team_member =
models::TeamMember::get_from_user_id(org.team_id, current_user.id.into(), pool)
.await?;
let team_member = models::TeamMember::get_from_user_id(
org.team_id,
current_user.id.into(),
pool,
)
.await?;
let perms =
OrganizationPermissions::get_permissions_by_role(&current_user.role, &team_member);
let perms = OrganizationPermissions::get_permissions_by_role(
&current_user.role,
&team_member,
);
if !perms
.map(|x| x.contains(OrganizationPermissions::ADD_PROJECT))
@ -679,25 +715,32 @@ async fn project_create_inner(
}
}
let license_id =
spdx::Expression::parse(&project_create_data.license_id).map_err(|err| {
CreateError::InvalidInput(format!("Invalid SPDX license identifier: {err}"))
})?;
let license_id = spdx::Expression::parse(
&project_create_data.license_id,
)
.map_err(|err| {
CreateError::InvalidInput(format!(
"Invalid SPDX license identifier: {err}"
))
})?;
let mut link_urls = vec![];
let link_platforms =
models::categories::LinkPlatform::list(&mut **transaction, redis).await?;
models::categories::LinkPlatform::list(&mut **transaction, redis)
.await?;
for (platform, url) in &project_create_data.link_urls {
let platform_id =
models::categories::LinkPlatform::get_id(platform, &mut **transaction)
.await?
.ok_or_else(|| {
CreateError::InvalidInput(format!(
"Link platform {} does not exist.",
platform.clone()
))
})?;
let platform_id = models::categories::LinkPlatform::get_id(
platform,
&mut **transaction,
)
.await?
.ok_or_else(|| {
CreateError::InvalidInput(format!(
"Link platform {} does not exist.",
platform.clone()
))
})?;
let link_platform = link_platforms
.iter()
.find(|x| x.id == platform_id)
@ -718,7 +761,9 @@ async fn project_create_inner(
let project_builder_actual = models::project_item::ProjectBuilder {
project_id: project_id.into(),
team_id,
organization_id: project_create_data.organization_id.map(|x| x.into()),
organization_id: project_create_data
.organization_id
.map(|x| x.into()),
name: project_create_data.name,
summary: project_create_data.summary,
description: project_create_data.description,
@ -757,8 +802,12 @@ async fn project_create_inner(
User::clear_project_cache(&[current_user.id.into()], redis).await?;
for image_id in project_create_data.uploaded_images {
if let Some(db_image) =
image_item::Image::get(image_id.into(), &mut **transaction, redis).await?
if let Some(db_image) = image_item::Image::get(
image_id.into(),
&mut **transaction,
redis,
)
.await?
{
let image: Image = db_image.into();
if !matches!(image.context, ImageContext::Project { .. })
@ -884,12 +933,13 @@ async fn create_initial_version(
)));
}
version_data
.validate()
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
version_data.validate().map_err(|err| {
CreateError::ValidationError(validation_errors_to_string(err, None))
})?;
// Randomly generate a new id to be used for the version
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
let version_id: VersionId =
models::generate_version_id(transaction).await?.into();
let loaders = version_data
.loaders
@ -903,10 +953,15 @@ async fn create_initial_version(
})
.collect::<Result<Vec<models::LoaderId>, CreateError>>()?;
let loader_fields = LoaderField::get_fields(&loaders, &mut **transaction, redis).await?;
let loader_fields =
LoaderField::get_fields(&loaders, &mut **transaction, redis).await?;
let mut loader_field_enum_values =
LoaderFieldEnumValue::list_many_loader_fields(&loader_fields, &mut **transaction, redis)
.await?;
LoaderFieldEnumValue::list_many_loader_fields(
&loader_fields,
&mut **transaction,
redis,
)
.await?;
let version_fields = try_create_version_fields(
version_id,
@ -954,7 +1009,12 @@ async fn process_icon_upload(
file_host: &dyn FileHost,
mut field: Field,
) -> Result<(String, String, Option<u32>), CreateError> {
let data = read_from_field(&mut field, 262144, "Icons must be smaller than 256KiB").await?;
let data = read_from_field(
&mut field,
262144,
"Icons must be smaller than 256KiB",
)
.await?;
let upload_result = crate::util::img::upload_image_optimized(
&format!("data/{}", to_base62(id)),
data.freeze(),

View File

@ -64,7 +64,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
"members",
web::get().to(super::teams::team_members_get_project),
)
.route("version", web::get().to(super::versions::version_list))
.route(
"version",
web::get().to(super::versions::version_list),
)
.route(
"version/{slug}",
web::get().to(super::versions::version_project_get),
@ -85,9 +88,9 @@ pub async fn random_projects_get(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
count
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
count.validate().map_err(|err| {
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let project_ids = sqlx::query!(
"
@ -104,11 +107,12 @@ pub async fn random_projects_get(
.try_collect::<Vec<_>>()
.await?;
let projects_data = db_models::Project::get_many_ids(&project_ids, &**pool, &redis)
.await?
.into_iter()
.map(Project::from)
.collect::<Vec<_>>();
let projects_data =
db_models::Project::get_many_ids(&project_ids, &**pool, &redis)
.await?
.into_iter()
.map(Project::from)
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(projects_data))
}
@ -126,7 +130,8 @@ pub async fn projects_get(
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
let projects_data = db_models::Project::get_many(&ids, &**pool, &redis).await?;
let projects_data =
db_models::Project::get_many(&ids, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
@ -139,7 +144,9 @@ pub async fn projects_get(
.map(|x| x.1)
.ok();
let projects = filter_visible_projects(projects_data, &user_option, &pool, false).await?;
let projects =
filter_visible_projects(projects_data, &user_option, &pool, false)
.await?;
Ok(HttpResponse::Ok().json(projects))
}
@ -153,7 +160,8 @@ pub async fn project_get(
) -> Result<HttpResponse, ApiError> {
let string = info.into_inner().0;
let project_data = db_models::Project::get(&string, &**pool, &redis).await?;
let project_data =
db_models::Project::get(&string, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
@ -198,7 +206,9 @@ pub struct EditProject {
length(max = 2048)
)]
pub license_url: Option<Option<String>>,
#[validate(custom(function = "crate::util::validate::validate_url_hashmap_optional_values"))]
#[validate(custom(
function = "crate::util::validate::validate_url_hashmap_optional_values"
))]
// <name, url> (leave url empty to delete)
pub link_urls: Option<HashMap<String, Option<String>>>,
pub license_id: Option<String>,
@ -252,9 +262,9 @@ pub async fn project_edit(
.await?
.1;
new_project
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
new_project.validate().map_err(|err| {
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let string = info.into_inner().0;
let result = db_models::Project::get(&string, &**pool, &redis).await?;
@ -331,10 +341,12 @@ pub async fn project_edit(
if !(user.role.is_mod()
|| !project_item.inner.status.is_approved()
&& status == &ProjectStatus::Processing
|| project_item.inner.status.is_approved() && status.can_be_requested())
|| project_item.inner.status.is_approved()
&& status.can_be_requested())
{
return Err(ApiError::CustomAuthentication(
"You don't have permission to set this status!".to_string(),
"You don't have permission to set this status!"
.to_string(),
));
}
@ -361,7 +373,9 @@ pub async fn project_edit(
.insert(project_item.inner.id.into());
}
if status.is_approved() && !project_item.inner.status.is_approved() {
if status.is_approved()
&& !project_item.inner.status.is_approved()
{
sqlx::query!(
"
UPDATE mods
@ -374,7 +388,9 @@ pub async fn project_edit(
.await?;
}
if status.is_searchable() && !project_item.inner.webhook_sent {
if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") {
if let Ok(webhook_url) =
dotenvy::var("PUBLIC_DISCORD_WEBHOOK")
{
crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(),
&pool,
@ -399,7 +415,9 @@ pub async fn project_edit(
}
if user.role.is_mod() {
if let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK") {
if let Ok(webhook_url) =
dotenvy::var("MODERATION_SLACK_WEBHOOK")
{
crate::util::webhook::send_slack_webhook(
project_item.inner.id.into(),
&pool,
@ -471,7 +489,9 @@ pub async fn project_edit(
.execute(&mut *transaction)
.await?;
if project_item.inner.status.is_searchable() && !status.is_searchable() {
if project_item.inner.status.is_searchable()
&& !status.is_searchable()
{
remove_documents(
&project_item
.versions
@ -591,7 +611,8 @@ pub async fn project_edit(
));
}
let slug_project_id_option: Option<u64> = parse_base62(slug).ok();
let slug_project_id_option: Option<u64> =
parse_base62(slug).ok();
if let Some(slug_project_id) = slug_project_id_option {
let results = sqlx::query!(
"
@ -604,14 +625,20 @@ pub async fn project_edit(
if results.exists.unwrap_or(true) {
return Err(ApiError::InvalidInput(
"Slug collides with other project's id!".to_string(),
"Slug collides with other project's id!"
.to_string(),
));
}
}
// Make sure the new slug is different from the old one
// We are able to unwrap here because the slug is always set
if !slug.eq(&project_item.inner.slug.clone().unwrap_or_default()) {
if !slug.eq(&project_item
.inner
.slug
.clone()
.unwrap_or_default())
{
let results = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1))
@ -623,7 +650,8 @@ pub async fn project_edit(
if results.exists.unwrap_or(true) {
return Err(ApiError::InvalidInput(
"Slug collides with other project's id!".to_string(),
"Slug collides with other project's id!"
.to_string(),
));
}
}
@ -656,7 +684,9 @@ pub async fn project_edit(
}
spdx::Expression::parse(&license).map_err(|err| {
ApiError::InvalidInput(format!("Invalid SPDX license identifier: {err}"))
ApiError::InvalidInput(format!(
"Invalid SPDX license identifier: {err}"
))
})?;
sqlx::query!(
@ -700,17 +730,20 @@ pub async fn project_edit(
for (platform, url) in links {
if let Some(url) = url {
let platform_id = db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
})?;
let platform_id =
db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(
|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
},
)?;
sqlx::query!(
"
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
@ -728,7 +761,8 @@ pub async fn project_edit(
}
if let Some(moderation_message) = &new_project.moderation_message {
if !user.role.is_mod()
&& (!project_item.inner.status.is_approved() || moderation_message.is_some())
&& (!project_item.inner.status.is_approved()
|| moderation_message.is_some())
{
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the moderation message of this project!"
@ -749,7 +783,9 @@ pub async fn project_edit(
.await?;
}
if let Some(moderation_message_body) = &new_project.moderation_message_body {
if let Some(moderation_message_body) =
&new_project.moderation_message_body
{
if !user.role.is_mod()
&& (!project_item.inner.status.is_approved()
|| moderation_message_body.is_some())
@ -794,7 +830,8 @@ pub async fn project_edit(
.await?;
}
if let Some(monetization_status) = &new_project.monetization_status {
if let Some(monetization_status) = &new_project.monetization_status
{
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the monetization status of this project!"
@ -802,7 +839,8 @@ pub async fn project_edit(
));
}
if (*monetization_status == MonetizationStatus::ForceDemonetized
if (*monetization_status
== MonetizationStatus::ForceDemonetized
|| project_item.inner.monetization_status
== MonetizationStatus::ForceDemonetized)
&& !user.role.is_mod()
@ -828,16 +866,23 @@ pub async fn project_edit(
// check new description and body for links to associated images
// if they no longer exist in the description or body, delete them
let checkable_strings: Vec<&str> = vec![&new_project.description, &new_project.summary]
.into_iter()
.filter_map(|x| x.as_ref().map(|y| y.as_str()))
.collect();
let checkable_strings: Vec<&str> =
vec![&new_project.description, &new_project.summary]
.into_iter()
.filter_map(|x| x.as_ref().map(|y| y.as_str()))
.collect();
let context = ImageContext::Project {
project_id: Some(id.into()),
};
img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?;
img::delete_unused_images(
context,
checkable_strings,
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
db_models::Project::clear_cache(
@ -875,8 +920,11 @@ pub async fn edit_project_categories(
let mut mod_categories = Vec::new();
for category in categories {
let category_ids =
db_models::categories::Category::get_ids(category, &mut **transaction).await?;
let category_ids = db_models::categories::Category::get_ids(
category,
&mut **transaction,
)
.await?;
// TODO: We should filter out categories that don't match the project type of any of the versions
// ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version
@ -969,12 +1017,18 @@ pub async fn dependency_list(
.ok();
if let Some(project) = result {
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
if !is_visible_project(&project.inner, &user_option, &pool, false)
.await?
{
return Err(ApiError::NotFound);
}
let dependencies =
database::Project::get_dependencies(project.inner.id, &**pool, &redis).await?;
let dependencies = database::Project::get_dependencies(
project.inner.id,
&**pool,
&redis,
)
.await?;
let project_ids = dependencies
.iter()
.filter_map(|x| {
@ -1002,10 +1056,20 @@ pub async fn dependency_list(
)
.await?;
let mut projects =
filter_visible_projects(projects_result, &user_option, &pool, false).await?;
let mut versions =
filter_visible_versions(versions_result, &user_option, &pool, &redis).await?;
let mut projects = filter_visible_projects(
projects_result,
&user_option,
&pool,
false,
)
.await?;
let mut versions = filter_visible_versions(
versions_result,
&user_option,
&pool,
&redis,
)
.await?;
projects.sort_by(|a, b| b.published.cmp(&a.published));
projects.dedup_by(|a, b| a.id == b.id);
@ -1040,7 +1104,9 @@ pub struct BulkEditProject {
pub add_additional_categories: Option<Vec<String>>,
pub remove_additional_categories: Option<Vec<String>>,
#[validate(custom(function = " crate::util::validate::validate_url_hashmap_optional_values"))]
#[validate(custom(
function = " crate::util::validate::validate_url_hashmap_optional_values"
))]
pub link_urls: Option<HashMap<String, Option<String>>>,
}
@ -1062,16 +1128,18 @@ pub async fn projects_edit(
.await?
.1;
bulk_edit_project
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
bulk_edit_project.validate().map_err(|err| {
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let project_ids: Vec<db_ids::ProjectId> = serde_json::from_str::<Vec<ProjectId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
let project_ids: Vec<db_ids::ProjectId> =
serde_json::from_str::<Vec<ProjectId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
let projects_data = db_models::Project::get_many_ids(&project_ids, &**pool, &redis).await?;
let projects_data =
db_models::Project::get_many_ids(&project_ids, &**pool, &redis).await?;
if let Some(id) = project_ids
.iter()
@ -1087,47 +1155,62 @@ pub async fn projects_edit(
.iter()
.map(|x| x.inner.team_id)
.collect::<Vec<db_models::TeamId>>();
let team_members =
db_models::TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
let team_members = db_models::TeamMember::get_from_team_full_many(
&team_ids, &**pool, &redis,
)
.await?;
let organization_ids = projects_data
.iter()
.filter_map(|x| x.inner.organization_id)
.collect::<Vec<db_models::OrganizationId>>();
let organizations =
db_models::Organization::get_many_ids(&organization_ids, &**pool, &redis).await?;
let organizations = db_models::Organization::get_many_ids(
&organization_ids,
&**pool,
&redis,
)
.await?;
let organization_team_ids = organizations
.iter()
.map(|x| x.team_id)
.collect::<Vec<db_models::TeamId>>();
let organization_team_members =
db_models::TeamMember::get_from_team_full_many(&organization_team_ids, &**pool, &redis)
.await?;
db_models::TeamMember::get_from_team_full_many(
&organization_team_ids,
&**pool,
&redis,
)
.await?;
let categories = db_models::categories::Category::list(&**pool, &redis).await?;
let link_platforms = db_models::categories::LinkPlatform::list(&**pool, &redis).await?;
let categories =
db_models::categories::Category::list(&**pool, &redis).await?;
let link_platforms =
db_models::categories::LinkPlatform::list(&**pool, &redis).await?;
let mut transaction = pool.begin().await?;
for project in projects_data {
if !user.role.is_mod() {
let team_member = team_members
.iter()
.find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into());
let team_member = team_members.iter().find(|x| {
x.team_id == project.inner.team_id
&& x.user_id == user.id.into()
});
let organization = project
.inner
.organization_id
.and_then(|oid| organizations.iter().find(|x| x.id == oid));
let organization_team_member = if let Some(organization) = organization {
organization_team_members
.iter()
.find(|x| x.team_id == organization.team_id && x.user_id == user.id.into())
} else {
None
};
let organization_team_member =
if let Some(organization) = organization {
organization_team_members.iter().find(|x| {
x.team_id == organization.team_id
&& x.user_id == user.id.into()
})
} else {
None
};
let permissions = ProjectPermissions::get_permissions_by_role(
&user.role,
@ -1232,7 +1315,13 @@ pub async fn projects_edit(
}
}
db_models::Project::clear_cache(project.inner.id, project.inner.slug, None, &redis).await?;
db_models::Project::clear_cache(
project.inner.id,
project.inner.slug,
None,
&redis,
)
.await?;
}
transaction.commit().await?;
@ -1249,15 +1338,17 @@ pub async fn bulk_edit_project_categories(
is_additional: bool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), ApiError> {
let mut set_categories = if let Some(categories) = bulk_changes.categories.clone() {
categories
} else {
project_categories.clone()
};
let mut set_categories =
if let Some(categories) = bulk_changes.categories.clone() {
categories
} else {
project_categories.clone()
};
if let Some(delete_categories) = &bulk_changes.remove_categories {
for category in delete_categories {
if let Some(pos) = set_categories.iter().position(|x| x == category) {
if let Some(pos) = set_categories.iter().position(|x| x == category)
{
set_categories.remove(pos);
}
}
@ -1291,10 +1382,17 @@ pub async fn bulk_edit_project_categories(
.iter()
.find(|x| x.category == category)
.ok_or_else(|| {
ApiError::InvalidInput(format!("Category {} does not exist.", category.clone()))
ApiError::InvalidInput(format!(
"Category {} does not exist.",
category.clone()
))
})?
.id;
mod_categories.push(ModCategory::new(project_id, category_id, is_additional));
mod_categories.push(ModCategory::new(
project_id,
category_id,
is_additional,
));
}
ModCategory::insert_many(mod_categories, &mut *transaction).await?;
}
@ -1332,7 +1430,9 @@ pub async fn project_icon_edit(
let project_item = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if !user.role.is_mod() {
@ -1360,7 +1460,8 @@ pub async fn project_icon_edit(
if !permissions.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this project's icon.".to_string(),
"You don't have permission to edit this project's icon."
.to_string(),
));
}
}
@ -1372,8 +1473,12 @@ pub async fn project_icon_edit(
)
.await?;
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let bytes = read_from_payload(
&mut payload,
262144,
"Icons must be smaller than 256KiB",
)
.await?;
let project_id: ProjectId = project_item.inner.id.into();
let upload_result = upload_image_optimized(
@ -1403,8 +1508,13 @@ pub async fn project_icon_edit(
.await?;
transaction.commit().await?;
db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis)
.await?;
db_models::Project::clear_cache(
project_item.inner.id,
project_item.inner.slug,
None,
&redis,
)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
@ -1431,7 +1541,9 @@ pub async fn delete_project_icon(
let project_item = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if !user.role.is_mod() {
@ -1458,7 +1570,8 @@ pub async fn delete_project_icon(
if !permissions.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this project's icon.".to_string(),
"You don't have permission to edit this project's icon."
.to_string(),
));
}
}
@ -1484,8 +1597,13 @@ pub async fn delete_project_icon(
.await?;
transaction.commit().await?;
db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis)
.await?;
db_models::Project::clear_cache(
project_item.inner.id,
project_item.inner.slug,
None,
&redis,
)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
@ -1512,8 +1630,9 @@ pub async fn add_gallery_item(
mut payload: web::Payload,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
item.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
item.validate().map_err(|err| {
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let user = get_user_from_headers(
&req,
@ -1529,12 +1648,15 @@ pub async fn add_gallery_item(
let project_item = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if project_item.gallery_items.len() > 64 {
return Err(ApiError::CustomAuthentication(
"You have reached the maximum of gallery images to upload.".to_string(),
"You have reached the maximum of gallery images to upload."
.to_string(),
));
}
@ -1563,7 +1685,8 @@ pub async fn add_gallery_item(
if !permissions.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this project's gallery.".to_string(),
"You don't have permission to edit this project's gallery."
.to_string(),
));
}
}
@ -1621,11 +1744,21 @@ pub async fn add_gallery_item(
created: Utc::now(),
ordering: item.ordering.unwrap_or(0),
}];
GalleryItem::insert_many(gallery_item, project_item.inner.id, &mut transaction).await?;
GalleryItem::insert_many(
gallery_item,
project_item.inner.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis)
.await?;
db_models::Project::clear_cache(
project_item.inner.id,
project_item.inner.slug,
None,
&redis,
)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
@ -1671,13 +1804,16 @@ pub async fn edit_gallery_item(
.1;
let string = info.into_inner().0;
item.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
item.validate().map_err(|err| {
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let project_item = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if !user.role.is_mod() {
@ -1704,7 +1840,8 @@ pub async fn edit_gallery_item(
if !permissions.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this project's gallery.".to_string(),
"You don't have permission to edit this project's gallery."
.to_string(),
));
}
}
@ -1798,8 +1935,13 @@ pub async fn edit_gallery_item(
transaction.commit().await?;
db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis)
.await?;
db_models::Project::clear_cache(
project_item.inner.id,
project_item.inner.slug,
None,
&redis,
)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
@ -1832,7 +1974,9 @@ pub async fn delete_gallery_item(
let project_item = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if !user.role.is_mod() {
@ -1860,7 +2004,8 @@ pub async fn delete_gallery_item(
if !permissions.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this project's gallery.".to_string(),
"You don't have permission to edit this project's gallery."
.to_string(),
));
}
}
@ -1903,8 +2048,13 @@ pub async fn delete_gallery_item(
transaction.commit().await?;
db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis)
.await?;
db_models::Project::clear_cache(
project_item.inner.id,
project_item.inner.slug,
None,
&redis,
)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
@ -1931,7 +2081,9 @@ pub async fn project_delete(
let project = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if !user.role.is_admin() {
@ -1968,7 +2120,8 @@ pub async fn project_delete(
let context = ImageContext::Project {
project_id: Some(project.inner.id.into()),
};
let uploaded_images = db_models::Image::get_many_contexted(context, &mut transaction).await?;
let uploaded_images =
db_models::Image::get_many_contexted(context, &mut transaction).await?;
for image in uploaded_images {
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
}
@ -1983,7 +2136,9 @@ pub async fn project_delete(
.execute(&mut *transaction)
.await?;
let result = db_models::Project::remove(project.inner.id, &mut transaction, &redis).await?;
let result =
db_models::Project::remove(project.inner.id, &mut transaction, &redis)
.await?;
transaction.commit().await?;
@ -2025,7 +2180,9 @@ pub async fn project_follow(
let result = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
let user_id: db_ids::UserId = user.id.into();
@ -2103,7 +2260,9 @@ pub async fn project_unfollow(
let result = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
let user_id: db_ids::UserId = user.id.into();
@ -2179,7 +2338,9 @@ pub async fn project_get_organization(
let result = db_models::Project::get(&string, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified project does not exist!".to_string())
ApiError::InvalidInput(
"The specified project does not exist!".to_string(),
)
})?;
if !is_visible_project(&result.inner, &current_user, &pool, false).await? {
@ -2187,14 +2348,21 @@ pub async fn project_get_organization(
"The specified project does not exist!".to_string(),
))
} else if let Some(organization_id) = result.inner.organization_id {
let organization = db_models::Organization::get_id(organization_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The attached organization does not exist!".to_string())
})?;
let organization =
db_models::Organization::get_id(organization_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The attached organization does not exist!".to_string(),
)
})?;
let members_data =
TeamMember::get_from_team_full(organization.team_id, &**pool, &redis).await?;
let members_data = TeamMember::get_from_team_full(
organization.team_id,
&**pool,
&redis,
)
.await?;
let users = crate::database::models::User::get_many_ids(
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
@ -2216,17 +2384,26 @@ pub async fn project_get_organization(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| y == x.user_id)
.map(|y: crate::database::models::UserId| {
y == x.user_id
})
.unwrap_or(false)
})
.flat_map(|data| {
users.iter().find(|x| x.id == data.user_id).map(|user| {
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
crate::models::teams::TeamMember::from(
data,
user.clone(),
!logged_in,
)
})
})
.collect();
let organization = models::organizations::Organization::from(organization, team_members);
let organization = models::organizations::Organization::from(
organization,
team_members,
);
return Ok(HttpResponse::Ok().json(organization));
} else {
Err(ApiError::NotFound)

View File

@ -1,10 +1,14 @@
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
use crate::database;
use crate::database::models::image_item;
use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder};
use crate::database::models::thread_item::{
ThreadBuilder, ThreadMessageBuilder,
};
use crate::database::redis::RedisPool;
use crate::models::ids::ImageId;
use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId};
use crate::models::ids::{
base62_impl::parse_base62, ProjectId, UserId, VersionId,
};
use crate::models::images::{Image, ImageContext};
use crate::models::pats::Scopes;
use crate::models::reports::{ItemType, Report};
@ -62,19 +66,25 @@ pub async fn report_create(
let mut bytes = web::BytesMut::new();
while let Some(item) = body.next().await {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInput("Error while parsing request payload!".to_string())
ApiError::InvalidInput(
"Error while parsing request payload!".to_string(),
)
})?);
}
let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?;
let id = crate::database::models::generate_report_id(&mut transaction).await?;
let id =
crate::database::models::generate_report_id(&mut transaction).await?;
let report_type = crate::database::models::categories::ReportType::get_id(
&new_report.report_type,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!("Invalid report type: {}", new_report.report_type))
ApiError::InvalidInput(format!(
"Invalid report type: {}",
new_report.report_type
))
})?;
let mut report = crate::database::models::report_item::Report {
@ -91,7 +101,8 @@ pub async fn report_create(
match new_report.item_type {
ItemType::Project => {
let project_id = ProjectId(parse_base62(new_report.item_id.as_str())?);
let project_id =
ProjectId(parse_base62(new_report.item_id.as_str())?);
let result = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM mods WHERE id = $1)",
@ -110,7 +121,8 @@ pub async fn report_create(
report.project_id = Some(project_id.into())
}
ItemType::Version => {
let version_id = VersionId(parse_base62(new_report.item_id.as_str())?);
let version_id =
VersionId(parse_base62(new_report.item_id.as_str())?);
let result = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)",
@ -159,7 +171,8 @@ pub async fn report_create(
for image_id in new_report.uploaded_images {
if let Some(db_image) =
image_item::Image::get(image_id.into(), &mut *transaction, &redis).await?
image_item::Image::get(image_id.into(), &mut *transaction, &redis)
.await?
{
let image: Image = db_image.into();
if !matches!(image.context, ImageContext::Report { .. })
@ -281,8 +294,11 @@ pub async fn reports(
.await?
};
let query_reports =
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
let query_reports = crate::database::models::report_item::Report::get_many(
&report_ids,
&**pool,
)
.await?;
let mut reports: Vec<Report> = Vec::new();
@ -311,8 +327,11 @@ pub async fn reports_get(
.map(|x| x.into())
.collect();
let reports_data =
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
let reports_data = crate::database::models::report_item::Report::get_many(
&report_ids,
&**pool,
)
.await?;
let user = get_user_from_headers(
&req,
@ -351,7 +370,8 @@ pub async fn report_get(
.1;
let id = info.into_inner().0.into();
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
let report =
crate::database::models::report_item::Report::get(id, &**pool).await?;
if let Some(report) = report {
if !user.role.is_mod() && report.reporter != user.id.into() {
@ -391,7 +411,8 @@ pub async fn report_edit(
.1;
let id = info.into_inner().0.into();
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
let report =
crate::database::models::report_item::Report::get(id, &**pool).await?;
if let Some(report) = report {
if !user.role.is_mod() && report.reporter != user.id.into() {
@ -455,8 +476,13 @@ pub async fn report_edit(
let image_context = ImageContext::Report {
report_id: Some(id.into()),
};
img::delete_unused_images(image_context, checkable_strings, &mut transaction, &redis)
.await?;
img::delete_unused_images(
image_context,
checkable_strings,
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
@ -489,14 +515,17 @@ pub async fn report_delete(
report_id: Some(id),
};
let uploaded_images =
database::models::Image::get_many_contexted(context, &mut transaction).await?;
database::models::Image::get_many_contexted(context, &mut transaction)
.await?;
for image in uploaded_images {
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
}
let result =
crate::database::models::report_item::Report::remove_full(id.into(), &mut transaction)
.await?;
let result = crate::database::models::report_item::Report::remove_full(
id.into(),
&mut transaction,
)
.await?;
transaction.commit().await?;
if result.is_some() {

Some files were not shown because too many files have changed in this diff Show More