Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
072fa47129 |
884
Cargo.lock
generated
884
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -11,14 +11,14 @@ name = "labrinth"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "4.4.1"
|
||||
actix-rt = "2.9.0"
|
||||
actix-multipart = "0.6.1"
|
||||
actix-cors = "0.7.0"
|
||||
actix-ws = "0.3.0"
|
||||
actix-files = "0.6.5"
|
||||
actix-web-prom = { version = "0.8.0", features = ["process"] }
|
||||
governor = "0.6.3"
|
||||
ntex = { version = "2.0", features = ["tokio", "compress"] }
|
||||
# actix-rt = "2.9.0"
|
||||
ntex-multipart = "2.0.0"
|
||||
ntex-cors = "2.0.0"
|
||||
# actix-ws = "0.3.0"
|
||||
ntex-files = "2.0.0"
|
||||
# actix-web-prom = { version = "0.8.0", features = ["process"] }
|
||||
governor = "0.8.0"
|
||||
|
||||
tokio = { version = "1.35.1", features = ["sync"] }
|
||||
tokio-stream = "0.1.14"
|
||||
@ -97,15 +97,15 @@ maxminddb = "0.24.0"
|
||||
flate2 = "1.0.25"
|
||||
tar = "0.4.38"
|
||||
|
||||
sentry = { version = "0.34.0", default-features = false, features = [
|
||||
"backtrace",
|
||||
"contexts",
|
||||
"debug-images",
|
||||
"panic",
|
||||
"rustls",
|
||||
"reqwest",
|
||||
] }
|
||||
sentry-actix = "0.34.0"
|
||||
#sentry = { version = "0.34.0", default-features = false, features = [
|
||||
# "backtrace",
|
||||
# "contexts",
|
||||
# "debug-images",
|
||||
# "panic",
|
||||
# "rustls",
|
||||
# "reqwest",
|
||||
#] }
|
||||
#sentry-actix = "0.34.0"
|
||||
|
||||
image = "0.24.6"
|
||||
color-thief = "0.2.2"
|
||||
|
||||
@ -15,8 +15,8 @@ pub use validate::{check_is_moderator_from_headers, get_user_from_headers};
|
||||
|
||||
use crate::file_hosting::FileHostingError;
|
||||
use crate::models::error::ApiError;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::HttpResponse;
|
||||
use ntex::http::StatusCode;
|
||||
use ntex::web::{HttpRequest, HttpResponse};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
@ -51,7 +51,7 @@ pub enum AuthenticationError {
|
||||
Url,
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for AuthenticationError {
|
||||
impl ntex::web::WebResponseError for AuthenticationError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
AuthenticationError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
@ -77,8 +77,8 @@ impl actix_web::ResponseError for AuthenticationError {
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(ApiError {
|
||||
fn error_response(&self, _req: &HttpRequest) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(&ApiError {
|
||||
error: self.error_name(),
|
||||
description: self.to_string(),
|
||||
})
|
||||
|
||||
@ -2,8 +2,8 @@ use super::ValidatedRedirectUri;
|
||||
use crate::auth::AuthenticationError;
|
||||
use crate::models::error::ApiError;
|
||||
use crate::models::ids::DecodingError;
|
||||
use actix_web::http::{header::LOCATION, StatusCode};
|
||||
use actix_web::HttpResponse;
|
||||
use ntex::http::{header::LOCATION, StatusCode};
|
||||
use ntex::web::{HttpRequest, HttpResponse};
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
#[error("{}", .error_type)]
|
||||
@ -55,7 +55,7 @@ impl OAuthError {
|
||||
}
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for OAuthError {
|
||||
impl ntex::web::WebResponseError for OAuthError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self.error_type {
|
||||
OAuthErrorType::AuthenticationError(_)
|
||||
@ -83,7 +83,7 @@ impl actix_web::ResponseError for OAuthError {
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
fn error_response(&self, _req: &HttpRequest) -> HttpResponse {
|
||||
if let Some(ValidatedRedirectUri(mut redirect_uri)) =
|
||||
self.valid_redirect_uri.clone()
|
||||
{
|
||||
@ -99,10 +99,10 @@ impl actix_web::ResponseError for OAuthError {
|
||||
}
|
||||
|
||||
HttpResponse::Ok()
|
||||
.append_header((LOCATION, redirect_uri.clone()))
|
||||
.header(LOCATION, redirect_uri.clone())
|
||||
.body(redirect_uri)
|
||||
} else {
|
||||
HttpResponse::build(self.status_code()).json(ApiError {
|
||||
HttpResponse::build(self.status_code()).json(&ApiError {
|
||||
error: &self.error_type.error_name(),
|
||||
description: self.error_type.to_string(),
|
||||
})
|
||||
|
||||
@ -14,14 +14,14 @@ use crate::models;
|
||||
use crate::models::ids::OAuthClientId;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use actix_web::http::header::LOCATION;
|
||||
use actix_web::web::{Data, Query, ServiceConfig};
|
||||
use actix_web::{get, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::Duration;
|
||||
use ntex::http::header::LOCATION;
|
||||
use ntex::http::header::{CACHE_CONTROL, PRAGMA};
|
||||
use ntex::web::ServiceConfig;
|
||||
use ntex::web::{self, get, post, HttpRequest, HttpResponse};
|
||||
use rand::distributions::Alphanumeric;
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use reqwest::header::{CACHE_CONTROL, PRAGMA};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
@ -59,14 +59,14 @@ pub struct OAuthClientAccessRequest {
|
||||
#[get("authorize")]
|
||||
pub async fn init_oauth(
|
||||
req: HttpRequest,
|
||||
Query(oauth_info): Query<OAuthInit>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
web::types::Query(oauth_info): web::types::Query<OAuthInit>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, OAuthError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_AUTH_WRITE]),
|
||||
@ -75,7 +75,7 @@ pub async fn init_oauth(
|
||||
.1;
|
||||
|
||||
let client_id = oauth_info.client_id.into();
|
||||
let client = DBOAuthClient::get(client_id, &**pool).await?;
|
||||
let client = DBOAuthClient::get(client_id, &*pool).await?;
|
||||
|
||||
if let Some(client) = client {
|
||||
let redirect_uri = ValidatedRedirectUri::validate(
|
||||
@ -107,7 +107,7 @@ pub async fn init_oauth(
|
||||
}
|
||||
|
||||
let existing_authorization =
|
||||
OAuthClientAuthorization::get(client.id, user.id.into(), &**pool)
|
||||
OAuthClientAuthorization::get(client.id, user.id.into(), &*pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
OAuthError::redirect(e, &oauth_info.state, &redirect_uri)
|
||||
@ -154,7 +154,7 @@ pub async fn init_oauth(
|
||||
flow_id,
|
||||
requested_scopes,
|
||||
};
|
||||
Ok(HttpResponse::Ok().json(access_request))
|
||||
Ok(HttpResponse::Ok().json(&access_request))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -172,10 +172,10 @@ pub struct RespondToOAuthClientScopes {
|
||||
#[post("accept")]
|
||||
pub async fn accept_client_scopes(
|
||||
req: HttpRequest,
|
||||
accept_body: web::Json<RespondToOAuthClientScopes>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
accept_body: web::types::Json<RespondToOAuthClientScopes>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, OAuthError> {
|
||||
accept_or_reject_client_scopes(
|
||||
true,
|
||||
@ -191,10 +191,10 @@ pub async fn accept_client_scopes(
|
||||
#[post("reject")]
|
||||
pub async fn reject_client_scopes(
|
||||
req: HttpRequest,
|
||||
body: web::Json<RespondToOAuthClientScopes>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
body: web::types::Json<RespondToOAuthClientScopes>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, OAuthError> {
|
||||
accept_or_reject_client_scopes(false, req, body, pool, redis, session_queue)
|
||||
.await
|
||||
@ -221,12 +221,12 @@ pub struct TokenResponse {
|
||||
/// Per IETF RFC6749 Section 4.1.3 (https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3)
|
||||
pub async fn request_token(
|
||||
req: HttpRequest,
|
||||
req_params: web::Form<TokenRequest>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
req_params: web::types::Form<TokenRequest>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, OAuthError> {
|
||||
let req_client_id = req_params.client_id;
|
||||
let client = DBOAuthClient::get(req_client_id.into(), &**pool).await?;
|
||||
let client = DBOAuthClient::get(req_client_id.into(), &*pool).await?;
|
||||
if let Some(client) = client {
|
||||
authenticate_client_token_request(&req, &client)?;
|
||||
|
||||
@ -294,9 +294,9 @@ pub async fn request_token(
|
||||
|
||||
// IETF RFC6749 Section 5.1 (https://datatracker.ietf.org/doc/html/rfc6749#section-5.1)
|
||||
Ok(HttpResponse::Ok()
|
||||
.append_header((CACHE_CONTROL, "no-store"))
|
||||
.append_header((PRAGMA, "no-cache"))
|
||||
.json(TokenResponse {
|
||||
.header(CACHE_CONTROL, "no-store")
|
||||
.header(PRAGMA, "no-cache")
|
||||
.json(&TokenResponse {
|
||||
access_token: token,
|
||||
token_type: "Bearer".to_string(),
|
||||
expires_in: time_until_expiration.num_seconds(),
|
||||
@ -314,14 +314,14 @@ pub async fn request_token(
|
||||
pub async fn accept_or_reject_client_scopes(
|
||||
accept: bool,
|
||||
req: HttpRequest,
|
||||
body: web::Json<RespondToOAuthClientScopes>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
body: web::types::Json<RespondToOAuthClientScopes>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, OAuthError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -449,7 +449,7 @@ async fn init_oauth_code_flow(
|
||||
|
||||
// IETF RFC 6749 Section 4.1.2 (https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.2)
|
||||
Ok(HttpResponse::Ok()
|
||||
.append_header((LOCATION, redirect_uri.clone()))
|
||||
.header(LOCATION, redirect_uri.clone())
|
||||
.body(redirect_uri))
|
||||
}
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use crate::auth::AuthenticationError;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::{HttpResponse, ResponseError};
|
||||
use ntex::http::StatusCode;
|
||||
use ntex::web::{HttpRequest, HttpResponse, WebResponseError};
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
|
||||
pub struct Success<'a> {
|
||||
@ -13,7 +13,7 @@ impl Success<'_> {
|
||||
let html = include_str!("success.html");
|
||||
|
||||
HttpResponse::Ok()
|
||||
.append_header(("Content-Type", "text/html; charset=utf-8"))
|
||||
.header("Content-Type", "text/html; charset=utf-8")
|
||||
.body(
|
||||
html.replace("{{ icon }}", self.icon)
|
||||
.replace("{{ name }}", self.name),
|
||||
@ -41,17 +41,17 @@ impl Display for ErrorPage {
|
||||
impl ErrorPage {
|
||||
pub fn render(&self) -> HttpResponse {
|
||||
HttpResponse::Ok()
|
||||
.append_header(("Content-Type", "text/html; charset=utf-8"))
|
||||
.header("Content-Type", "text/html; charset=utf-8")
|
||||
.body(self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for ErrorPage {
|
||||
impl WebResponseError for ErrorPage {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
self.code
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
fn error_response(&self, _req: &HttpRequest) -> HttpResponse {
|
||||
self.render()
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,9 +6,9 @@ use crate::models::pats::Scopes;
|
||||
use crate::models::users::User;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::internal::session::get_session_metadata;
|
||||
use actix_web::http::header::{HeaderValue, AUTHORIZATION};
|
||||
use actix_web::HttpRequest;
|
||||
use chrono::Utc;
|
||||
use ntex::http::header::{HeaderValue, AUTHORIZATION};
|
||||
use ntex::web::HttpRequest;
|
||||
|
||||
pub async fn get_user_from_headers<'a, E>(
|
||||
req: &HttpRequest,
|
||||
@ -18,7 +18,7 @@ pub async fn get_user_from_headers<'a, E>(
|
||||
required_scopes: Option<&[Scopes]>,
|
||||
) -> Result<(Scopes, User), AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync,
|
||||
{
|
||||
// Fetch DB user record and minos user from headers
|
||||
let (scopes, db_user) = get_user_record_from_bearer_token(
|
||||
@ -52,7 +52,7 @@ pub async fn get_user_record_from_bearer_token<'a, 'b, E>(
|
||||
session_queue: &AuthQueue,
|
||||
) -> Result<Option<(Scopes, user_item::User)>, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync,
|
||||
{
|
||||
let token = if let Some(token) = token {
|
||||
token
|
||||
@ -174,7 +174,7 @@ pub async fn check_is_moderator_from_headers<'a, 'b, E>(
|
||||
required_scopes: Option<&[Scopes]>,
|
||||
) -> Result<User, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync,
|
||||
{
|
||||
let user = get_user_from_headers(
|
||||
req,
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{models::ids::ProjectId, routes::ApiError};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -25,7 +23,7 @@ pub async fn fetch_playtimes(
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
resolution_minute: u32,
|
||||
client: Arc<clickhouse::Client>,
|
||||
client: &clickhouse::Client,
|
||||
) -> Result<Vec<ReturnIntervals>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
@ -56,7 +54,7 @@ pub async fn fetch_views(
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
resolution_minutes: u32,
|
||||
client: Arc<clickhouse::Client>,
|
||||
client: &clickhouse::Client,
|
||||
) -> Result<Vec<ReturnIntervals>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
@ -86,7 +84,7 @@ pub async fn fetch_downloads(
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
resolution_minutes: u32,
|
||||
client: Arc<clickhouse::Client>,
|
||||
client: &clickhouse::Client,
|
||||
) -> Result<Vec<ReturnIntervals>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
@ -113,7 +111,7 @@ pub async fn fetch_countries_downloads(
|
||||
projects: Vec<ProjectId>,
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
client: Arc<clickhouse::Client>,
|
||||
client: &clickhouse::Client,
|
||||
) -> Result<Vec<ReturnCountry>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
@ -140,7 +138,7 @@ pub async fn fetch_countries_views(
|
||||
projects: Vec<ProjectId>,
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
client: Arc<clickhouse::Client>,
|
||||
client: &clickhouse::Client,
|
||||
) -> Result<Vec<ReturnCountry>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
|
||||
@ -49,7 +49,7 @@ macro_rules! generate_bulk_ids {
|
||||
count: usize,
|
||||
con: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Vec<$return_type>, DatabaseError> {
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut rng = ChaCha20Rng::from_entropy();
|
||||
let mut retry_count = 0;
|
||||
|
||||
// Check if ID is unique
|
||||
|
||||
@ -67,7 +67,7 @@ impl Organization {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<Self>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
Self::get_many(&[string], exec, redis)
|
||||
.await
|
||||
@ -80,7 +80,7 @@ impl Organization {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<Self>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
Self::get_many_ids(&[id], exec, redis)
|
||||
.await
|
||||
@ -93,7 +93,7 @@ impl Organization {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Self>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
let ids = organization_ids
|
||||
.iter()
|
||||
@ -105,14 +105,14 @@ impl Organization {
|
||||
pub async fn get_many<
|
||||
'a,
|
||||
E,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
>(
|
||||
organization_strings: &[T],
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Self>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
let val = redis
|
||||
.get_cached_keys_with_slug(
|
||||
|
||||
@ -58,14 +58,14 @@ impl PersonalAccessToken {
|
||||
pub async fn get<
|
||||
'a,
|
||||
E,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
>(
|
||||
id: T,
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<PersonalAccessToken>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
Self::get_many(&[id], exec, redis)
|
||||
.await
|
||||
@ -78,7 +78,7 @@ impl PersonalAccessToken {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<PersonalAccessToken>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
let ids = pat_ids
|
||||
.iter()
|
||||
@ -90,14 +90,14 @@ impl PersonalAccessToken {
|
||||
pub async fn get_many<
|
||||
'a,
|
||||
E,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
>(
|
||||
pat_strings: &[T],
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<PersonalAccessToken>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
let val = redis
|
||||
.get_cached_keys_with_slug(
|
||||
|
||||
@ -14,6 +14,7 @@ use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::future::Future;
|
||||
use std::hash::Hash;
|
||||
|
||||
pub const PROJECTS_NAMESPACE: &str = "projects";
|
||||
@ -473,82 +474,102 @@ impl Project {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get<'a, 'b, E>(
|
||||
string: &str,
|
||||
/// What's going on here? See: https://github.com/launchbadge/sqlx/issues/1015#issuecomment-767787777
|
||||
#[allow(clippy::manual_async_fn)]
|
||||
pub fn get<'a, 'c, E>(
|
||||
string: &'a str,
|
||||
executor: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<QueryProject>, DatabaseError>
|
||||
redis: &'a RedisPool,
|
||||
) -> impl Future<Output = Result<Option<QueryProject>, DatabaseError>> + Send + 'a
|
||||
where
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a,
|
||||
{
|
||||
Project::get_many(&[string], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
async move {
|
||||
Project::get_many(&[string], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_id<'a, 'b, E>(
|
||||
#[allow(clippy::manual_async_fn)]
|
||||
pub fn get_id<'a, 'c, E>(
|
||||
id: ProjectId,
|
||||
executor: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<QueryProject>, DatabaseError>
|
||||
redis: &'a RedisPool,
|
||||
) -> impl Future<Output = Result<Option<QueryProject>, DatabaseError>> + Send + 'a
|
||||
where
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a,
|
||||
{
|
||||
Project::get_many(
|
||||
&[crate::models::ids::ProjectId::from(id)],
|
||||
executor,
|
||||
redis,
|
||||
)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
async move {
|
||||
Project::get_many(
|
||||
&[crate::models::ids::ProjectId::from(id)],
|
||||
executor,
|
||||
redis,
|
||||
)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_many_ids<'a, E>(
|
||||
project_ids: &[ProjectId],
|
||||
#[allow(clippy::manual_async_fn)]
|
||||
pub fn get_many_ids<'a, 'c, E>(
|
||||
project_ids: &'a [ProjectId],
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<QueryProject>, DatabaseError>
|
||||
redis: &'a RedisPool,
|
||||
) -> impl Future<Output = Result<Vec<QueryProject>, DatabaseError>> + Send + 'a
|
||||
where
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a,
|
||||
{
|
||||
let ids = project_ids
|
||||
.iter()
|
||||
.map(|x| crate::models::ids::ProjectId::from(*x))
|
||||
.collect::<Vec<_>>();
|
||||
Project::get_many(&ids, exec, redis).await
|
||||
async move {
|
||||
let ids = project_ids
|
||||
.iter()
|
||||
.map(|x| crate::models::ids::ProjectId::from(*x))
|
||||
.collect::<Vec<_>>();
|
||||
Project::get_many(&ids, exec, redis).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_many<
|
||||
#[allow(clippy::manual_async_fn)]
|
||||
pub fn get_many<
|
||||
'a,
|
||||
'c,
|
||||
E,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
T: Display
|
||||
+ Hash
|
||||
+ Eq
|
||||
+ PartialEq
|
||||
+ Clone
|
||||
+ Debug
|
||||
+ std::marker::Sync
|
||||
+ std::marker::Send,
|
||||
>(
|
||||
project_strings: &[T],
|
||||
project_strings: &'a [T],
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<QueryProject>, DatabaseError>
|
||||
redis: &'a RedisPool,
|
||||
) -> impl Future<Output = Result<Vec<QueryProject>, DatabaseError>> + Send + 'a
|
||||
where
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a,
|
||||
{
|
||||
let val = redis.get_cached_keys_with_slug(
|
||||
PROJECTS_NAMESPACE,
|
||||
PROJECTS_SLUGS_NAMESPACE,
|
||||
false,
|
||||
project_strings,
|
||||
|ids| async move {
|
||||
let mut exec = exec.acquire().await?;
|
||||
let project_ids_parsed: Vec<i64> = ids
|
||||
.iter()
|
||||
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
||||
.map(|x| x as i64)
|
||||
.collect();
|
||||
let slugs = ids
|
||||
.into_iter()
|
||||
.map(|x| x.to_string().to_lowercase())
|
||||
.collect::<Vec<_>>();
|
||||
async move {
|
||||
let val = redis.get_cached_keys_with_slug(
|
||||
PROJECTS_NAMESPACE,
|
||||
PROJECTS_SLUGS_NAMESPACE,
|
||||
false,
|
||||
project_strings,
|
||||
|ids| async move {
|
||||
let mut exec = exec.acquire().await?;
|
||||
let project_ids_parsed: Vec<i64> = ids
|
||||
.iter()
|
||||
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
||||
.map(|x| x as i64)
|
||||
.collect();
|
||||
let slugs = ids
|
||||
.into_iter()
|
||||
.map(|x| x.to_string().to_lowercase())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let all_version_ids = DashSet::new();
|
||||
let versions: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>> = sqlx::query!(
|
||||
let all_version_ids = DashSet::new();
|
||||
let versions: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id, v.id as id, date_published
|
||||
FROM mods m
|
||||
@ -562,23 +583,23 @@ impl Project {
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(
|
||||
DashMap::new(),
|
||||
|acc: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>>, m| {
|
||||
let version_id = VersionId(m.id);
|
||||
let date_published = m.date_published;
|
||||
all_version_ids.insert(version_id);
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push((version_id, date_published));
|
||||
async move { Ok(acc) }
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(
|
||||
DashMap::new(),
|
||||
|acc: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>>, m| {
|
||||
let version_id = VersionId(m.id);
|
||||
let date_published = m.date_published;
|
||||
all_version_ids.insert(version_id);
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push((version_id, date_published));
|
||||
async move { Ok(acc) }
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<ProjectId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<ProjectId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id, version_id, field_id, int_value, enum_value, string_value
|
||||
FROM versions v
|
||||
@ -587,29 +608,29 @@ impl Project {
|
||||
",
|
||||
&all_version_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(
|
||||
DashMap::new(),
|
||||
|acc: DashMap<ProjectId, Vec<QueryVersionField>>, m| {
|
||||
let qvf = QueryVersionField {
|
||||
version_id: VersionId(m.version_id),
|
||||
field_id: LoaderFieldId(m.field_id),
|
||||
int_value: m.int_value,
|
||||
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
|
||||
string_value: m.string_value,
|
||||
};
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(
|
||||
DashMap::new(),
|
||||
|acc: DashMap<ProjectId, Vec<QueryVersionField>>, m| {
|
||||
let qvf = QueryVersionField {
|
||||
version_id: VersionId(m.version_id),
|
||||
field_id: LoaderFieldId(m.field_id),
|
||||
int_value: m.int_value,
|
||||
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
|
||||
string_value: m.string_value,
|
||||
};
|
||||
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
|
||||
acc.entry(ProjectId(m.mod_id)).or_default().push(qvf);
|
||||
async move { Ok(acc) }
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
acc.entry(ProjectId(m.mod_id)).or_default().push(qvf);
|
||||
async move { Ok(acc) }
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let loader_field_enum_values: Vec<QueryLoaderFieldEnumValue> = sqlx::query!(
|
||||
let loader_field_enum_values: Vec<QueryLoaderFieldEnumValue> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, enum_id, value, ordering, created, metadata
|
||||
FROM loader_field_enum_values lfev
|
||||
@ -621,19 +642,19 @@ impl Project {
|
||||
.map(|x| x.0)
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderFieldEnumValue {
|
||||
id: LoaderFieldEnumValueId(m.id),
|
||||
enum_id: LoaderFieldEnumId(m.enum_id),
|
||||
value: m.value,
|
||||
ordering: m.ordering,
|
||||
created: m.created,
|
||||
metadata: m.metadata,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderFieldEnumValue {
|
||||
id: LoaderFieldEnumValueId(m.id),
|
||||
enum_id: LoaderFieldEnumId(m.enum_id),
|
||||
value: m.value,
|
||||
ordering: m.ordering,
|
||||
created: m.created,
|
||||
metadata: m.metadata,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let mods_gallery: DashMap<ProjectId, Vec<GalleryItem>> = sqlx::query!(
|
||||
let mods_gallery: DashMap<ProjectId, Vec<GalleryItem>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id, mg.image_url, mg.raw_image_url, mg.featured, mg.name, mg.description, mg.created, mg.ordering
|
||||
FROM mods_gallery mg
|
||||
@ -643,23 +664,23 @@ impl Project {
|
||||
&project_ids_parsed,
|
||||
&slugs
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<GalleryItem>>, m| {
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(GalleryItem {
|
||||
image_url: m.image_url,
|
||||
raw_image_url: m.raw_image_url,
|
||||
featured: m.featured.unwrap_or(false),
|
||||
name: m.name,
|
||||
description: m.description,
|
||||
created: m.created,
|
||||
ordering: m.ordering,
|
||||
});
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<GalleryItem>>, m| {
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(GalleryItem {
|
||||
image_url: m.image_url,
|
||||
raw_image_url: m.raw_image_url,
|
||||
featured: m.featured.unwrap_or(false),
|
||||
name: m.name,
|
||||
description: m.description,
|
||||
created: m.created,
|
||||
ordering: m.ordering,
|
||||
});
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
let links: DashMap<ProjectId, Vec<LinkUrl>> = sqlx::query!(
|
||||
let links: DashMap<ProjectId, Vec<LinkUrl>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT joining_mod_id as mod_id, joining_platform_id as platform_id, lp.name as platform_name, url, lp.donation as donation
|
||||
FROM mods_links ml
|
||||
@ -670,29 +691,29 @@ impl Project {
|
||||
&project_ids_parsed,
|
||||
&slugs
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<LinkUrl>>, m| {
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(LinkUrl {
|
||||
platform_id: LinkPlatformId(m.platform_id),
|
||||
platform_name: m.platform_name,
|
||||
url: m.url,
|
||||
donation: m.donation,
|
||||
});
|
||||
async move { Ok(acc) }
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<LinkUrl>>, m| {
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(LinkUrl {
|
||||
platform_id: LinkPlatformId(m.platform_id),
|
||||
platform_name: m.platform_name,
|
||||
url: m.url,
|
||||
donation: m.donation,
|
||||
});
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
#[derive(Default)]
|
||||
struct VersionLoaderData {
|
||||
loaders: Vec<String>,
|
||||
project_types: Vec<String>,
|
||||
games: Vec<String>,
|
||||
loader_loader_field_ids: Vec<LoaderFieldId>,
|
||||
}
|
||||
).await?;
|
||||
|
||||
#[derive(Default)]
|
||||
struct VersionLoaderData {
|
||||
loaders: Vec<String>,
|
||||
project_types: Vec<String>,
|
||||
games: Vec<String>,
|
||||
loader_loader_field_ids: Vec<LoaderFieldId>,
|
||||
}
|
||||
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loaders_ptypes_games: DashMap<ProjectId, VersionLoaderData> = sqlx::query!(
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loaders_ptypes_games: DashMap<ProjectId, VersionLoaderData> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
@ -712,29 +733,29 @@ impl Project {
|
||||
",
|
||||
&all_version_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
).fetch(&mut *exec)
|
||||
.map_ok(|m| {
|
||||
let project_id = ProjectId(m.mod_id);
|
||||
.map_ok(|m| {
|
||||
let project_id = ProjectId(m.mod_id);
|
||||
|
||||
// Add loader fields to the set we need to fetch
|
||||
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
|
||||
for loader_field_id in loader_loader_field_ids.iter() {
|
||||
loader_field_ids.insert(*loader_field_id);
|
||||
}
|
||||
|
||||
// Add loader + loader associated data to the map
|
||||
let version_loader_data = VersionLoaderData {
|
||||
loaders: m.loaders.unwrap_or_default(),
|
||||
project_types: m.project_types.unwrap_or_default(),
|
||||
games: m.games.unwrap_or_default(),
|
||||
loader_loader_field_ids,
|
||||
};
|
||||
|
||||
(project_id, version_loader_data)
|
||||
|
||||
// Add loader fields to the set we need to fetch
|
||||
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
|
||||
for loader_field_id in loader_loader_field_ids.iter() {
|
||||
loader_field_ids.insert(*loader_field_id);
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
// Add loader + loader associated data to the map
|
||||
let version_loader_data = VersionLoaderData {
|
||||
loaders: m.loaders.unwrap_or_default(),
|
||||
project_types: m.project_types.unwrap_or_default(),
|
||||
games: m.games.unwrap_or_default(),
|
||||
loader_loader_field_ids,
|
||||
};
|
||||
|
||||
(project_id, version_loader_data)
|
||||
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional
|
||||
FROM loader_fields lf
|
||||
@ -742,20 +763,20 @@ impl Project {
|
||||
",
|
||||
&loader_field_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let projects = sqlx::query!(
|
||||
let projects = sqlx::query!(
|
||||
"
|
||||
SELECT m.id id, m.name name, m.summary summary, m.downloads downloads, m.follows follows,
|
||||
m.icon_url icon_url, m.raw_icon_url raw_icon_url, m.description description, m.published published,
|
||||
@ -776,87 +797,88 @@ impl Project {
|
||||
&project_ids_parsed,
|
||||
&slugs,
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc, m| {
|
||||
let id = m.id;
|
||||
let project_id = ProjectId(id);
|
||||
let VersionLoaderData {
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
loader_loader_field_ids,
|
||||
} = loaders_ptypes_games.remove(&project_id).map(|x|x.1).unwrap_or_default();
|
||||
let mut versions = versions.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let mut gallery = mods_gallery.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let urls = links.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
|
||||
let loader_fields = loader_fields.iter()
|
||||
.filter(|x| loader_loader_field_ids.contains(&x.id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = QueryProject {
|
||||
inner: Project {
|
||||
id: ProjectId(id),
|
||||
team_id: TeamId(m.team_id),
|
||||
organization_id: m.organization_id.map(OrganizationId),
|
||||
name: m.name.clone(),
|
||||
summary: m.summary.clone(),
|
||||
downloads: m.downloads,
|
||||
icon_url: m.icon_url.clone(),
|
||||
raw_icon_url: m.raw_icon_url.clone(),
|
||||
published: m.published,
|
||||
updated: m.updated,
|
||||
license_url: m.license_url.clone(),
|
||||
status: ProjectStatus::from_string(
|
||||
&m.status,
|
||||
),
|
||||
requested_status: m.requested_status.map(|x| ProjectStatus::from_string(
|
||||
&x,
|
||||
)),
|
||||
license: m.license.clone(),
|
||||
slug: m.slug.clone(),
|
||||
description: m.description.clone(),
|
||||
follows: m.follows,
|
||||
moderation_message: m.moderation_message,
|
||||
moderation_message_body: m.moderation_message_body,
|
||||
approved: m.approved,
|
||||
webhook_sent: m.webhook_sent,
|
||||
color: m.color.map(|x| x as u32),
|
||||
queued: m.queued,
|
||||
monetization_status: MonetizationStatus::from_string(
|
||||
&m.monetization_status,
|
||||
),
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc, m| {
|
||||
let id = m.id;
|
||||
let project_id = ProjectId(id);
|
||||
let VersionLoaderData {
|
||||
loaders,
|
||||
},
|
||||
categories: m.categories.unwrap_or_default(),
|
||||
additional_categories: m.additional_categories.unwrap_or_default(),
|
||||
project_types,
|
||||
games,
|
||||
versions: {
|
||||
// Each version is a tuple of (VersionId, DateTime<Utc>)
|
||||
versions.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
versions.into_iter().map(|x| x.0).collect()
|
||||
},
|
||||
gallery_items: {
|
||||
gallery.sort_by(|a, b| a.ordering.cmp(&b.ordering));
|
||||
gallery
|
||||
},
|
||||
urls,
|
||||
aggregate_version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, true),
|
||||
thread_id: ThreadId(m.thread_id),
|
||||
};
|
||||
project_types,
|
||||
games,
|
||||
loader_loader_field_ids,
|
||||
} = loaders_ptypes_games.remove(&project_id).map(|x|x.1).unwrap_or_default();
|
||||
let mut versions = versions.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let mut gallery = mods_gallery.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let urls = links.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
|
||||
acc.insert(m.id, (m.slug, project));
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
let loader_fields = loader_fields.iter()
|
||||
.filter(|x| loader_loader_field_ids.contains(&x.id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(projects)
|
||||
},
|
||||
).await?;
|
||||
let project = QueryProject {
|
||||
inner: Project {
|
||||
id: ProjectId(id),
|
||||
team_id: TeamId(m.team_id),
|
||||
organization_id: m.organization_id.map(OrganizationId),
|
||||
name: m.name.clone(),
|
||||
summary: m.summary.clone(),
|
||||
downloads: m.downloads,
|
||||
icon_url: m.icon_url.clone(),
|
||||
raw_icon_url: m.raw_icon_url.clone(),
|
||||
published: m.published,
|
||||
updated: m.updated,
|
||||
license_url: m.license_url.clone(),
|
||||
status: ProjectStatus::from_string(
|
||||
&m.status,
|
||||
),
|
||||
requested_status: m.requested_status.map(|x| ProjectStatus::from_string(
|
||||
&x,
|
||||
)),
|
||||
license: m.license.clone(),
|
||||
slug: m.slug.clone(),
|
||||
description: m.description.clone(),
|
||||
follows: m.follows,
|
||||
moderation_message: m.moderation_message,
|
||||
moderation_message_body: m.moderation_message_body,
|
||||
approved: m.approved,
|
||||
webhook_sent: m.webhook_sent,
|
||||
color: m.color.map(|x| x as u32),
|
||||
queued: m.queued,
|
||||
monetization_status: MonetizationStatus::from_string(
|
||||
&m.monetization_status,
|
||||
),
|
||||
loaders,
|
||||
},
|
||||
categories: m.categories.unwrap_or_default(),
|
||||
additional_categories: m.additional_categories.unwrap_or_default(),
|
||||
project_types,
|
||||
games,
|
||||
versions: {
|
||||
// Each version is a tuple of (VersionId, DateTime<Utc>)
|
||||
versions.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
versions.into_iter().map(|x| x.0).collect()
|
||||
},
|
||||
gallery_items: {
|
||||
gallery.sort_by(|a, b| a.ordering.cmp(&b.ordering));
|
||||
gallery
|
||||
},
|
||||
urls,
|
||||
aggregate_version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, true),
|
||||
thread_id: ThreadId(m.thread_id),
|
||||
};
|
||||
|
||||
Ok(val)
|
||||
acc.insert(m.id, (m.slug, project));
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(projects)
|
||||
},
|
||||
).await?;
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_dependencies<'a, E>(
|
||||
|
||||
@ -85,14 +85,14 @@ impl Session {
|
||||
pub async fn get<
|
||||
'a,
|
||||
E,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
>(
|
||||
id: T,
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
Self::get_many(&[id], exec, redis)
|
||||
.await
|
||||
@ -105,7 +105,7 @@ impl Session {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
Session::get_many(
|
||||
&[crate::models::ids::SessionId::from(id)],
|
||||
@ -122,7 +122,7 @@ impl Session {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
let ids = session_ids
|
||||
.iter()
|
||||
@ -134,14 +134,14 @@ impl Session {
|
||||
pub async fn get_many<
|
||||
'a,
|
||||
E,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
>(
|
||||
session_strings: &[T],
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
use futures::TryStreamExt;
|
||||
|
||||
|
||||
@ -194,7 +194,7 @@ impl TeamMember {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<TeamMember>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync,
|
||||
{
|
||||
Self::get_from_team_full_many(&[id], executor, redis).await
|
||||
}
|
||||
@ -205,7 +205,7 @@ impl TeamMember {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<TeamMember>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync,
|
||||
{
|
||||
if team_ids.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
|
||||
@ -103,7 +103,7 @@ impl User {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
User::get_many(&[string], executor, redis)
|
||||
.await
|
||||
@ -116,7 +116,7 @@ impl User {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
User::get_many(&[crate::models::ids::UserId::from(id)], executor, redis)
|
||||
.await
|
||||
@ -129,7 +129,7 @@ impl User {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
let ids = user_ids
|
||||
.iter()
|
||||
@ -141,14 +141,14 @@ impl User {
|
||||
pub async fn get_many<
|
||||
'a,
|
||||
E,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
T: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
>(
|
||||
users_strings: &[T],
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Send + Sync,
|
||||
{
|
||||
use futures::TryStreamExt;
|
||||
|
||||
|
||||
@ -13,6 +13,7 @@ use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::future::Future;
|
||||
use std::iter;
|
||||
|
||||
pub const VERSIONS_NAMESPACE: &str = "versions";
|
||||
@ -455,35 +456,40 @@ impl Version {
|
||||
Ok(Some(()))
|
||||
}
|
||||
|
||||
pub async fn get<'a, 'b, E>(
|
||||
#[allow(clippy::manual_async_fn)]
|
||||
pub fn get<'a, 'c, E>(
|
||||
id: VersionId,
|
||||
executor: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<QueryVersion>, DatabaseError>
|
||||
redis: &'a RedisPool,
|
||||
) -> impl Future<Output = Result<Option<QueryVersion>, DatabaseError>> + Send + 'a
|
||||
where
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a,
|
||||
{
|
||||
Self::get_many(&[id], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
async move {
|
||||
Self::get_many(&[id], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_many<'a, E>(
|
||||
version_ids: &[VersionId],
|
||||
#[allow(clippy::manual_async_fn)]
|
||||
pub fn get_many<'a, 'c, E>(
|
||||
version_ids: &'a [VersionId],
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<QueryVersion>, DatabaseError>
|
||||
redis: &'a RedisPool,
|
||||
) -> impl Future<Output = Result<Vec<QueryVersion>, DatabaseError>> + Send + 'a
|
||||
where
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a,
|
||||
{
|
||||
let mut val = redis.get_cached_keys(
|
||||
VERSIONS_NAMESPACE,
|
||||
&version_ids.iter().map(|x| x.0).collect::<Vec<_>>(),
|
||||
|version_ids| async move {
|
||||
let mut exec = exec.acquire().await?;
|
||||
async move {
|
||||
let mut val = redis.get_cached_keys(
|
||||
VERSIONS_NAMESPACE,
|
||||
&version_ids.iter().map(|x| x.0).collect::<Vec<_>>(),
|
||||
|version_ids| async move {
|
||||
let mut exec = exec.acquire().await?;
|
||||
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<VersionId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<VersionId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
"
|
||||
SELECT version_id, field_id, int_value, enum_value, string_value
|
||||
FROM version_fields
|
||||
@ -491,38 +497,38 @@ impl Version {
|
||||
",
|
||||
&version_ids
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(
|
||||
DashMap::new(),
|
||||
|acc: DashMap<VersionId, Vec<QueryVersionField>>, m| {
|
||||
let qvf = QueryVersionField {
|
||||
version_id: VersionId(m.version_id),
|
||||
field_id: LoaderFieldId(m.field_id),
|
||||
int_value: m.int_value,
|
||||
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
|
||||
string_value: m.string_value,
|
||||
};
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(
|
||||
DashMap::new(),
|
||||
|acc: DashMap<VersionId, Vec<QueryVersionField>>, m| {
|
||||
let qvf = QueryVersionField {
|
||||
version_id: VersionId(m.version_id),
|
||||
field_id: LoaderFieldId(m.field_id),
|
||||
int_value: m.int_value,
|
||||
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
|
||||
string_value: m.string_value,
|
||||
};
|
||||
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
|
||||
acc.entry(VersionId(m.version_id)).or_default().push(qvf);
|
||||
async move { Ok(acc) }
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
acc.entry(VersionId(m.version_id)).or_default().push(qvf);
|
||||
async move { Ok(acc) }
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
#[derive(Default)]
|
||||
struct VersionLoaderData {
|
||||
loaders: Vec<String>,
|
||||
project_types: Vec<String>,
|
||||
games: Vec<String>,
|
||||
loader_loader_field_ids: Vec<LoaderFieldId>,
|
||||
}
|
||||
#[derive(Default)]
|
||||
struct VersionLoaderData {
|
||||
loaders: Vec<String>,
|
||||
project_types: Vec<String>,
|
||||
games: Vec<String>,
|
||||
loader_loader_field_ids: Vec<LoaderFieldId>,
|
||||
}
|
||||
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loaders_ptypes_games: DashMap<VersionId, VersionLoaderData> = sqlx::query!(
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loaders_ptypes_games: DashMap<VersionId, VersionLoaderData> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT version_id,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
@ -542,29 +548,29 @@ impl Version {
|
||||
",
|
||||
&version_ids
|
||||
).fetch(&mut *exec)
|
||||
.map_ok(|m| {
|
||||
let version_id = VersionId(m.version_id);
|
||||
.map_ok(|m| {
|
||||
let version_id = VersionId(m.version_id);
|
||||
|
||||
// Add loader fields to the set we need to fetch
|
||||
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
|
||||
for loader_field_id in loader_loader_field_ids.iter() {
|
||||
loader_field_ids.insert(*loader_field_id);
|
||||
}
|
||||
|
||||
// Add loader + loader associated data to the map
|
||||
let version_loader_data = VersionLoaderData {
|
||||
loaders: m.loaders.unwrap_or_default(),
|
||||
project_types: m.project_types.unwrap_or_default(),
|
||||
games: m.games.unwrap_or_default(),
|
||||
loader_loader_field_ids,
|
||||
};
|
||||
(version_id,version_loader_data)
|
||||
|
||||
// Add loader fields to the set we need to fetch
|
||||
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
|
||||
for loader_field_id in loader_loader_field_ids.iter() {
|
||||
loader_field_ids.insert(*loader_field_id);
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
// Add loader + loader associated data to the map
|
||||
let version_loader_data = VersionLoaderData {
|
||||
loaders: m.loaders.unwrap_or_default(),
|
||||
project_types: m.project_types.unwrap_or_default(),
|
||||
games: m.games.unwrap_or_default(),
|
||||
loader_loader_field_ids,
|
||||
};
|
||||
(version_id,version_loader_data)
|
||||
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
// Fetch all loader fields from any version
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
// Fetch all loader fields from any version
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional
|
||||
FROM loader_fields lf
|
||||
@ -572,20 +578,20 @@ impl Version {
|
||||
",
|
||||
&loader_field_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let loader_field_enum_values: Vec<QueryLoaderFieldEnumValue> = sqlx::query!(
|
||||
let loader_field_enum_values: Vec<QueryLoaderFieldEnumValue> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, enum_id, value, ordering, created, metadata
|
||||
FROM loader_field_enum_values lfev
|
||||
@ -597,38 +603,38 @@ impl Version {
|
||||
.map(|x| x.0)
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderFieldEnumValue {
|
||||
id: LoaderFieldEnumValueId(m.id),
|
||||
enum_id: LoaderFieldEnumId(m.enum_id),
|
||||
value: m.value,
|
||||
ordering: m.ordering,
|
||||
created: m.created,
|
||||
metadata: m.metadata,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderFieldEnumValue {
|
||||
id: LoaderFieldEnumValueId(m.id),
|
||||
enum_id: LoaderFieldEnumId(m.enum_id),
|
||||
value: m.value,
|
||||
ordering: m.ordering,
|
||||
created: m.created,
|
||||
metadata: m.metadata,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
let file_ids = DashSet::new();
|
||||
let reverse_file_map = DashMap::new();
|
||||
let files : DashMap<VersionId, Vec<File>> = sqlx::query!(
|
||||
let file_ids = DashSet::new();
|
||||
let reverse_file_map = DashMap::new();
|
||||
let files : DashMap<VersionId, Vec<File>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT version_id, f.id, f.url, f.filename, f.is_primary, f.size, f.file_type
|
||||
FROM files f
|
||||
@ -636,27 +642,27 @@ impl Version {
|
||||
",
|
||||
&version_ids
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<File>>, m| {
|
||||
let file = File {
|
||||
id: FileId(m.id),
|
||||
url: m.url,
|
||||
filename: m.filename,
|
||||
primary: m.is_primary,
|
||||
size: m.size as u32,
|
||||
file_type: m.file_type.map(|x| FileType::from_string(&x)),
|
||||
};
|
||||
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<File>>, m| {
|
||||
let file = File {
|
||||
id: FileId(m.id),
|
||||
url: m.url,
|
||||
filename: m.filename,
|
||||
primary: m.is_primary,
|
||||
size: m.size as u32,
|
||||
file_type: m.file_type.map(|x| FileType::from_string(&x)),
|
||||
};
|
||||
|
||||
file_ids.insert(FileId(m.id));
|
||||
reverse_file_map.insert(FileId(m.id), VersionId(m.version_id));
|
||||
file_ids.insert(FileId(m.id));
|
||||
reverse_file_map.insert(FileId(m.id), VersionId(m.version_id));
|
||||
|
||||
acc.entry(VersionId(m.version_id))
|
||||
.or_default()
|
||||
.push(file);
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
acc.entry(VersionId(m.version_id))
|
||||
.or_default()
|
||||
.push(file);
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
let hashes: DashMap<VersionId, Vec<Hash>> = sqlx::query!(
|
||||
let hashes: DashMap<VersionId, Vec<Hash>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT file_id, algorithm, encode(hash, 'escape') hash
|
||||
FROM hashes
|
||||
@ -664,24 +670,24 @@ impl Version {
|
||||
",
|
||||
&file_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc: DashMap<VersionId, Vec<Hash>>, m| {
|
||||
if let Some(found_hash) = m.hash {
|
||||
let hash = Hash {
|
||||
file_id: FileId(m.file_id),
|
||||
algorithm: m.algorithm,
|
||||
hash: found_hash,
|
||||
};
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc: DashMap<VersionId, Vec<Hash>>, m| {
|
||||
if let Some(found_hash) = m.hash {
|
||||
let hash = Hash {
|
||||
file_id: FileId(m.file_id),
|
||||
algorithm: m.algorithm,
|
||||
hash: found_hash,
|
||||
};
|
||||
|
||||
if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) {
|
||||
acc.entry(*version_id).or_default().push(hash);
|
||||
if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) {
|
||||
acc.entry(*version_id).or_default().push(hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
let dependencies : DashMap<VersionId, Vec<QueryDependency>> = sqlx::query!(
|
||||
let dependencies : DashMap<VersionId, Vec<QueryDependency>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT dependent_id as version_id, d.mod_dependency_id as dependency_project_id, d.dependency_id as dependency_version_id, d.dependency_file_name as file_name, d.dependency_type as dependency_type
|
||||
FROM dependencies d
|
||||
@ -689,22 +695,22 @@ impl Version {
|
||||
",
|
||||
&version_ids
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<QueryDependency>>, m| {
|
||||
let dependency = QueryDependency {
|
||||
project_id: m.dependency_project_id.map(ProjectId),
|
||||
version_id: m.dependency_version_id.map(VersionId),
|
||||
file_name: m.file_name,
|
||||
dependency_type: m.dependency_type,
|
||||
};
|
||||
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<QueryDependency>>, m| {
|
||||
let dependency = QueryDependency {
|
||||
project_id: m.dependency_project_id.map(ProjectId),
|
||||
version_id: m.dependency_version_id.map(VersionId),
|
||||
file_name: m.file_name,
|
||||
dependency_type: m.dependency_type,
|
||||
};
|
||||
|
||||
acc.entry(VersionId(m.version_id))
|
||||
.or_default()
|
||||
.push(dependency);
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
acc.entry(VersionId(m.version_id))
|
||||
.or_default()
|
||||
.push(dependency);
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
let res = sqlx::query!(
|
||||
let res = sqlx::query!(
|
||||
"
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.date_published date_published, v.downloads downloads,
|
||||
@ -714,96 +720,97 @@ impl Version {
|
||||
",
|
||||
&version_ids
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc, v| {
|
||||
let version_id = VersionId(v.id);
|
||||
let VersionLoaderData {
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
loader_loader_field_ids,
|
||||
} = loaders_ptypes_games.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let files = files.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let hashes = hashes.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let dependencies = dependencies.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc, v| {
|
||||
let version_id = VersionId(v.id);
|
||||
let VersionLoaderData {
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
loader_loader_field_ids,
|
||||
} = loaders_ptypes_games.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let files = files.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let hashes = hashes.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let dependencies = dependencies.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
|
||||
let loader_fields = loader_fields.iter()
|
||||
.filter(|x| loader_loader_field_ids.contains(&x.id))
|
||||
.collect::<Vec<_>>();
|
||||
let loader_fields = loader_fields.iter()
|
||||
.filter(|x| loader_loader_field_ids.contains(&x.id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let query_version = QueryVersion {
|
||||
inner: Version {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
author_id: UserId(v.author_id),
|
||||
name: v.version_name,
|
||||
version_number: v.version_number,
|
||||
changelog: v.changelog,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
version_type: v.version_type,
|
||||
featured: v.featured,
|
||||
status: VersionStatus::from_string(&v.status),
|
||||
requested_status: v.requested_status
|
||||
.map(|x| VersionStatus::from_string(&x)),
|
||||
ordering: v.ordering,
|
||||
},
|
||||
files: {
|
||||
let mut files = files.into_iter().map(|x| {
|
||||
let mut file_hashes = HashMap::new();
|
||||
let query_version = QueryVersion {
|
||||
inner: Version {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
author_id: UserId(v.author_id),
|
||||
name: v.version_name,
|
||||
version_number: v.version_number,
|
||||
changelog: v.changelog,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
version_type: v.version_type,
|
||||
featured: v.featured,
|
||||
status: VersionStatus::from_string(&v.status),
|
||||
requested_status: v.requested_status
|
||||
.map(|x| VersionStatus::from_string(&x)),
|
||||
ordering: v.ordering,
|
||||
},
|
||||
files: {
|
||||
let mut files = files.into_iter().map(|x| {
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in hashes.iter() {
|
||||
if hash.file_id == x.id {
|
||||
file_hashes.insert(
|
||||
hash.algorithm.clone(),
|
||||
hash.hash.clone(),
|
||||
);
|
||||
for hash in hashes.iter() {
|
||||
if hash.file_id == x.id {
|
||||
file_hashes.insert(
|
||||
hash.algorithm.clone(),
|
||||
hash.hash.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
QueryFile {
|
||||
id: x.id,
|
||||
url: x.url.clone(),
|
||||
filename: x.filename.clone(),
|
||||
hashes: file_hashes,
|
||||
primary: x.primary,
|
||||
size: x.size,
|
||||
file_type: x.file_type,
|
||||
}
|
||||
}).collect::<Vec<_>>();
|
||||
QueryFile {
|
||||
id: x.id,
|
||||
url: x.url.clone(),
|
||||
filename: x.filename.clone(),
|
||||
hashes: file_hashes,
|
||||
primary: x.primary,
|
||||
size: x.size,
|
||||
file_type: x.file_type,
|
||||
}
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
files.sort_by(|a, b| {
|
||||
if a.primary {
|
||||
Ordering::Less
|
||||
} else if b.primary {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
a.filename.cmp(&b.filename)
|
||||
}
|
||||
});
|
||||
files.sort_by(|a, b| {
|
||||
if a.primary {
|
||||
Ordering::Less
|
||||
} else if b.primary {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
a.filename.cmp(&b.filename)
|
||||
}
|
||||
});
|
||||
|
||||
files
|
||||
},
|
||||
version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, false),
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
dependencies,
|
||||
};
|
||||
files
|
||||
},
|
||||
version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, false),
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
dependencies,
|
||||
};
|
||||
|
||||
acc.insert(v.id, query_version);
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
acc.insert(v.id, query_version);
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(res)
|
||||
},
|
||||
).await?;
|
||||
Ok(res)
|
||||
},
|
||||
).await?;
|
||||
|
||||
val.sort();
|
||||
val.sort();
|
||||
|
||||
Ok(val)
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_file_from_hash<'a, 'b, E>(
|
||||
@ -814,7 +821,7 @@ impl Version {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<SingleFile>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync,
|
||||
{
|
||||
Self::get_files_from_hash(algo, &[hash], executor, redis)
|
||||
.await
|
||||
@ -831,7 +838,7 @@ impl Version {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<SingleFile>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync,
|
||||
{
|
||||
let val = redis.get_cached_keys(
|
||||
VERSION_FILES_NAMESPACE,
|
||||
|
||||
@ -67,9 +67,9 @@ impl RedisPool {
|
||||
closure: F,
|
||||
) -> Result<Vec<T>, DatabaseError>
|
||||
where
|
||||
F: FnOnce(Vec<K>) -> Fut,
|
||||
Fut: Future<Output = Result<DashMap<K, T>, DatabaseError>>,
|
||||
T: Serialize + DeserializeOwned,
|
||||
F: FnOnce(Vec<K>) -> Fut + Send,
|
||||
Fut: Future<Output = Result<DashMap<K, T>, DatabaseError>> + Send,
|
||||
T: Serialize + DeserializeOwned + Send,
|
||||
K: Display
|
||||
+ Hash
|
||||
+ Eq
|
||||
@ -77,7 +77,8 @@ impl RedisPool {
|
||||
+ Clone
|
||||
+ DeserializeOwned
|
||||
+ Serialize
|
||||
+ Debug,
|
||||
+ Debug
|
||||
+ Send,
|
||||
{
|
||||
Ok(self
|
||||
.get_cached_keys_raw(namespace, keys, closure)
|
||||
@ -94,9 +95,9 @@ impl RedisPool {
|
||||
closure: F,
|
||||
) -> Result<HashMap<K, T>, DatabaseError>
|
||||
where
|
||||
F: FnOnce(Vec<K>) -> Fut,
|
||||
Fut: Future<Output = Result<DashMap<K, T>, DatabaseError>>,
|
||||
T: Serialize + DeserializeOwned,
|
||||
F: FnOnce(Vec<K>) -> Fut + Send,
|
||||
Fut: Future<Output = Result<DashMap<K, T>, DatabaseError>> + Send,
|
||||
T: Serialize + DeserializeOwned + Send,
|
||||
K: Display
|
||||
+ Hash
|
||||
+ Eq
|
||||
@ -104,7 +105,8 @@ impl RedisPool {
|
||||
+ Clone
|
||||
+ DeserializeOwned
|
||||
+ Serialize
|
||||
+ Debug,
|
||||
+ Debug
|
||||
+ Send,
|
||||
{
|
||||
self.get_cached_keys_raw_with_slug(
|
||||
namespace,
|
||||
@ -131,18 +133,20 @@ impl RedisPool {
|
||||
closure: F,
|
||||
) -> Result<Vec<T>, DatabaseError>
|
||||
where
|
||||
F: FnOnce(Vec<I>) -> Fut,
|
||||
Fut: Future<Output = Result<DashMap<K, (Option<S>, T)>, DatabaseError>>,
|
||||
T: Serialize + DeserializeOwned,
|
||||
I: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
F: FnOnce(Vec<I>) -> Fut + Send,
|
||||
Fut: Future<Output = Result<DashMap<K, (Option<S>, T)>, DatabaseError>>
|
||||
+ Send,
|
||||
T: Serialize + DeserializeOwned + Send,
|
||||
I: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
K: Display
|
||||
+ Hash
|
||||
+ Eq
|
||||
+ PartialEq
|
||||
+ Clone
|
||||
+ DeserializeOwned
|
||||
+ Serialize,
|
||||
S: Display + Clone + DeserializeOwned + Serialize + Debug,
|
||||
+ Serialize
|
||||
+ Send,
|
||||
S: Display + Clone + DeserializeOwned + Serialize + Debug + Send,
|
||||
{
|
||||
Ok(self
|
||||
.get_cached_keys_raw_with_slug(
|
||||
@ -167,18 +171,20 @@ impl RedisPool {
|
||||
closure: F,
|
||||
) -> Result<HashMap<K, T>, DatabaseError>
|
||||
where
|
||||
F: FnOnce(Vec<I>) -> Fut,
|
||||
Fut: Future<Output = Result<DashMap<K, (Option<S>, T)>, DatabaseError>>,
|
||||
T: Serialize + DeserializeOwned,
|
||||
I: Display + Hash + Eq + PartialEq + Clone + Debug,
|
||||
F: FnOnce(Vec<I>) -> Fut + Send,
|
||||
Fut: Future<Output = Result<DashMap<K, (Option<S>, T)>, DatabaseError>>
|
||||
+ Send,
|
||||
T: Serialize + DeserializeOwned + Send,
|
||||
I: Display + Hash + Eq + PartialEq + Clone + Debug + Send,
|
||||
K: Display
|
||||
+ Hash
|
||||
+ Eq
|
||||
+ PartialEq
|
||||
+ Clone
|
||||
+ DeserializeOwned
|
||||
+ Serialize,
|
||||
S: Display + Clone + DeserializeOwned + Serialize + Debug,
|
||||
+ Serialize
|
||||
+ Send,
|
||||
S: Display + Clone + DeserializeOwned + Serialize + Debug + Send,
|
||||
{
|
||||
let connection = self.connect().await?.connection;
|
||||
|
||||
@ -338,11 +344,11 @@ impl RedisPool {
|
||||
Pin<
|
||||
Box<
|
||||
dyn Future<
|
||||
Output = Result<
|
||||
HashMap<K, RedisValue<T, K, S>>,
|
||||
DatabaseError,
|
||||
>,
|
||||
>,
|
||||
Output = Result<
|
||||
HashMap<K, RedisValue<T, K, S>>,
|
||||
DatabaseError,
|
||||
>,
|
||||
> + Send,
|
||||
>,
|
||||
>,
|
||||
> = Vec::new();
|
||||
@ -547,23 +553,6 @@ impl RedisConnection {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn get_many(
|
||||
&mut self,
|
||||
namespace: &str,
|
||||
ids: &[String],
|
||||
) -> Result<Vec<Option<String>>, DatabaseError> {
|
||||
let mut cmd = cmd("MGET");
|
||||
redis_args(
|
||||
&mut cmd,
|
||||
ids.iter()
|
||||
.map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x))
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
);
|
||||
let res = redis_execute(&mut cmd, &mut self.connection).await?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn get_deserialized_from_json<R>(
|
||||
&mut self,
|
||||
namespace: &str,
|
||||
@ -578,22 +567,6 @@ impl RedisConnection {
|
||||
.and_then(|x| serde_json::from_str(&x).ok()))
|
||||
}
|
||||
|
||||
pub async fn get_many_deserialized_from_json<R>(
|
||||
&mut self,
|
||||
namespace: &str,
|
||||
ids: &[String],
|
||||
) -> Result<Vec<Option<R>>, DatabaseError>
|
||||
where
|
||||
R: for<'a> serde::Deserialize<'a>,
|
||||
{
|
||||
Ok(self
|
||||
.get_many(namespace, ids)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| x.and_then(|val| serde_json::from_str::<R>(&val).ok()))
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
pub async fn delete<T1>(
|
||||
&mut self,
|
||||
namespace: &str,
|
||||
|
||||
@ -2,9 +2,9 @@ use std::num::NonZeroU32;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::web;
|
||||
use database::redis::RedisPool;
|
||||
use log::{info, warn};
|
||||
use ntex::{web, ServiceFactory};
|
||||
use queue::{
|
||||
analytics::AnalyticsQueue, payouts::PayoutsQueue, session::AuthQueue,
|
||||
socket::ActiveSockets,
|
||||
@ -15,9 +15,11 @@ extern crate clickhouse as clickhouse_crate;
|
||||
use clickhouse_crate::Client;
|
||||
use governor::middleware::StateInformationMiddleware;
|
||||
use governor::{Quota, RateLimiter};
|
||||
use ntex::web::{App, DefaultError, ErrorRenderer, WebRequest};
|
||||
use util::cors::default_cors;
|
||||
|
||||
use crate::queue::moderation::AutomatedModerationQueue;
|
||||
use crate::scheduler::schedule;
|
||||
use crate::util::ratelimit::KeyedRateLimiter;
|
||||
use crate::{
|
||||
queue::payouts::process_payout,
|
||||
@ -49,14 +51,13 @@ pub struct LabrinthConfig {
|
||||
pub clickhouse: Client,
|
||||
pub file_host: Arc<dyn file_hosting::FileHost + Send + Sync>,
|
||||
pub maxmind: Arc<queue::maxmind::MaxMindIndexer>,
|
||||
pub scheduler: Arc<scheduler::Scheduler>,
|
||||
pub ip_salt: Pepper,
|
||||
pub search_config: search::SearchConfig,
|
||||
pub session_queue: web::Data<AuthQueue>,
|
||||
pub payouts_queue: web::Data<PayoutsQueue>,
|
||||
pub session_queue: Arc<AuthQueue>,
|
||||
pub payouts_queue: Arc<PayoutsQueue>,
|
||||
pub analytics_queue: Arc<AnalyticsQueue>,
|
||||
pub active_sockets: web::Data<ActiveSockets>,
|
||||
pub automated_moderation_queue: web::Data<AutomatedModerationQueue>,
|
||||
pub active_sockets: Arc<ActiveSockets>,
|
||||
pub automated_moderation_queue: Arc<AutomatedModerationQueue>,
|
||||
pub rate_limiter: KeyedRateLimiter,
|
||||
pub stripe_client: stripe::Client,
|
||||
}
|
||||
@ -75,27 +76,25 @@ pub fn app_setup(
|
||||
);
|
||||
|
||||
let automated_moderation_queue =
|
||||
web::Data::new(AutomatedModerationQueue::default());
|
||||
Arc::new(AutomatedModerationQueue::default());
|
||||
|
||||
{
|
||||
let automated_moderation_queue_ref = automated_moderation_queue.clone();
|
||||
let pool_ref = pool.clone();
|
||||
let redis_pool_ref = redis_pool.clone();
|
||||
actix_rt::spawn(async move {
|
||||
tokio::task::spawn(async move {
|
||||
automated_moderation_queue_ref
|
||||
.task(pool_ref, redis_pool_ref)
|
||||
.await;
|
||||
});
|
||||
}
|
||||
|
||||
let mut scheduler = scheduler::Scheduler::new();
|
||||
|
||||
let limiter: KeyedRateLimiter = Arc::new(
|
||||
RateLimiter::keyed(Quota::per_minute(NonZeroU32::new(300).unwrap()))
|
||||
.with_middleware::<StateInformationMiddleware>(),
|
||||
);
|
||||
let limiter_clone = Arc::clone(&limiter);
|
||||
scheduler.run(Duration::from_secs(60), move || {
|
||||
schedule(Duration::from_secs(60), move || {
|
||||
info!(
|
||||
"Clearing ratelimiter, storage size: {}",
|
||||
limiter_clone.len()
|
||||
@ -118,18 +117,15 @@ pub fn app_setup(
|
||||
let pool_ref = pool.clone();
|
||||
let search_config_ref = search_config.clone();
|
||||
let redis_pool_ref = redis_pool.clone();
|
||||
scheduler.run(local_index_interval, move || {
|
||||
schedule(local_index_interval, move || {
|
||||
let pool_ref = pool_ref.clone();
|
||||
let redis_pool_ref = redis_pool_ref.clone();
|
||||
let search_config_ref = search_config_ref.clone();
|
||||
async move {
|
||||
info!("Indexing local database");
|
||||
let result = index_projects(
|
||||
pool_ref,
|
||||
redis_pool_ref.clone(),
|
||||
&search_config_ref,
|
||||
)
|
||||
.await;
|
||||
let result =
|
||||
index_projects(&pool_ref, &redis_pool_ref, &search_config_ref)
|
||||
.await;
|
||||
if let Err(e) = result {
|
||||
warn!("Local project indexing failed: {:?}", e);
|
||||
}
|
||||
@ -140,7 +136,7 @@ pub fn app_setup(
|
||||
// Changes statuses of scheduled projects/versions
|
||||
let pool_ref = pool.clone();
|
||||
// TODO: Clear cache when these are run
|
||||
scheduler.run(std::time::Duration::from_secs(60 * 5), move || {
|
||||
schedule(std::time::Duration::from_secs(60 * 5), move || {
|
||||
let pool_ref = pool_ref.clone();
|
||||
info!("Releasing scheduled versions/projects!");
|
||||
|
||||
@ -157,7 +153,10 @@ pub fn app_setup(
|
||||
.await;
|
||||
|
||||
if let Err(e) = projects_results {
|
||||
warn!("Syncing scheduled releases for projects failed: {:?}", e);
|
||||
warn!(
|
||||
"Syncing scheduled releases for projects failed: {:?}",
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
let versions_results = sqlx::query!(
|
||||
@ -172,25 +171,24 @@ pub fn app_setup(
|
||||
.await;
|
||||
|
||||
if let Err(e) = versions_results {
|
||||
warn!("Syncing scheduled releases for versions failed: {:?}", e);
|
||||
warn!(
|
||||
"Syncing scheduled releases for versions failed: {:?}",
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
info!("Finished releasing scheduled versions/projects");
|
||||
}
|
||||
});
|
||||
|
||||
scheduler::schedule_versions(
|
||||
&mut scheduler,
|
||||
pool.clone(),
|
||||
redis_pool.clone(),
|
||||
);
|
||||
scheduler::schedule_versions(pool.clone(), redis_pool.clone());
|
||||
|
||||
let session_queue = web::Data::new(AuthQueue::new());
|
||||
let session_queue = Arc::new(AuthQueue::new());
|
||||
|
||||
let pool_ref = pool.clone();
|
||||
let redis_ref = redis_pool.clone();
|
||||
let session_queue_ref = session_queue.clone();
|
||||
scheduler.run(std::time::Duration::from_secs(60 * 30), move || {
|
||||
schedule(std::time::Duration::from_secs(60 * 30), move || {
|
||||
let pool_ref = pool_ref.clone();
|
||||
let redis_ref = redis_ref.clone();
|
||||
let session_queue_ref = session_queue_ref.clone();
|
||||
@ -208,7 +206,7 @@ pub fn app_setup(
|
||||
let reader = maxmind.clone();
|
||||
{
|
||||
let reader_ref = reader;
|
||||
scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || {
|
||||
schedule(std::time::Duration::from_secs(60 * 60 * 24), move || {
|
||||
let reader_ref = reader_ref.clone();
|
||||
|
||||
async move {
|
||||
@ -232,7 +230,7 @@ pub fn app_setup(
|
||||
let analytics_queue_ref = analytics_queue.clone();
|
||||
let pool_ref = pool.clone();
|
||||
let redis_ref = redis_pool.clone();
|
||||
scheduler.run(std::time::Duration::from_secs(15), move || {
|
||||
schedule(std::time::Duration::from_secs(15), move || {
|
||||
let client_ref = client_ref.clone();
|
||||
let analytics_queue_ref = analytics_queue_ref.clone();
|
||||
let pool_ref = pool_ref.clone();
|
||||
@ -254,7 +252,7 @@ pub fn app_setup(
|
||||
{
|
||||
let pool_ref = pool.clone();
|
||||
let client_ref = clickhouse.clone();
|
||||
scheduler.run(std::time::Duration::from_secs(60 * 60 * 6), move || {
|
||||
schedule(std::time::Duration::from_secs(60 * 60 * 6), move || {
|
||||
let pool_ref = pool_ref.clone();
|
||||
let client_ref = client_ref.clone();
|
||||
|
||||
@ -276,7 +274,7 @@ pub fn app_setup(
|
||||
let redis_ref = redis_pool.clone();
|
||||
let stripe_client_ref = stripe_client.clone();
|
||||
|
||||
actix_rt::spawn(async move {
|
||||
tokio::task::spawn(async move {
|
||||
routes::internal::billing::task(
|
||||
stripe_client_ref,
|
||||
pool_ref,
|
||||
@ -290,7 +288,7 @@ pub fn app_setup(
|
||||
let pool_ref = pool.clone();
|
||||
let redis_ref = redis_pool.clone();
|
||||
|
||||
actix_rt::spawn(async move {
|
||||
tokio::task::spawn(async move {
|
||||
routes::internal::billing::subscription_task(pool_ref, redis_ref)
|
||||
.await;
|
||||
});
|
||||
@ -301,8 +299,8 @@ pub fn app_setup(
|
||||
.to_string(),
|
||||
};
|
||||
|
||||
let payouts_queue = web::Data::new(PayoutsQueue::new());
|
||||
let active_sockets = web::Data::new(ActiveSockets::default());
|
||||
let payouts_queue = Arc::new(PayoutsQueue::new());
|
||||
let active_sockets = Arc::new(ActiveSockets::default());
|
||||
|
||||
LabrinthConfig {
|
||||
pool,
|
||||
@ -310,7 +308,6 @@ pub fn app_setup(
|
||||
clickhouse: clickhouse.clone(),
|
||||
file_host,
|
||||
maxmind,
|
||||
scheduler: Arc::new(scheduler),
|
||||
ip_salt,
|
||||
search_config,
|
||||
session_queue,
|
||||
@ -323,41 +320,25 @@ pub fn app_setup(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn app_config(
|
||||
cfg: &mut web::ServiceConfig,
|
||||
labrinth_config: LabrinthConfig,
|
||||
) {
|
||||
cfg.app_data(web::FormConfig::default().error_handler(|err, _req| {
|
||||
routes::ApiError::Validation(err.to_string()).into()
|
||||
}))
|
||||
.app_data(web::PathConfig::default().error_handler(|err, _req| {
|
||||
routes::ApiError::Validation(err.to_string()).into()
|
||||
}))
|
||||
.app_data(web::QueryConfig::default().error_handler(|err, _req| {
|
||||
routes::ApiError::Validation(err.to_string()).into()
|
||||
}))
|
||||
.app_data(web::JsonConfig::default().error_handler(|err, _req| {
|
||||
routes::ApiError::Validation(err.to_string()).into()
|
||||
}))
|
||||
.app_data(web::Data::new(labrinth_config.redis_pool.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.pool.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.file_host.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.search_config.clone()))
|
||||
.app_data(labrinth_config.session_queue.clone())
|
||||
.app_data(labrinth_config.payouts_queue.clone())
|
||||
.app_data(web::Data::new(labrinth_config.ip_salt.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.analytics_queue.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.clickhouse.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.maxmind.clone()))
|
||||
.app_data(labrinth_config.active_sockets.clone())
|
||||
.app_data(labrinth_config.automated_moderation_queue.clone())
|
||||
.app_data(web::Data::new(labrinth_config.stripe_client.clone()))
|
||||
.configure(routes::v2::config)
|
||||
.configure(routes::v3::config)
|
||||
.configure(routes::internal::config)
|
||||
.configure(routes::root_config)
|
||||
.default_service(web::get().wrap(default_cors()).to(routes::not_found));
|
||||
}
|
||||
// TODO: fix me
|
||||
// pub fn app_config<M, F, Err: ErrorRenderer>(
|
||||
// mut app: App<M, F, Err>,
|
||||
// labrinth_config: LabrinthConfig,
|
||||
// ) -> App<M, F, Err> where F: ServiceFactory<WebRequest<Err>> {
|
||||
// app /*.app_data(web::FormConfig::default().error_handler(|err, _req| {
|
||||
// routes::ApiError::Validation(err.to_string()).into()
|
||||
// }))
|
||||
// .app_data(web::PathConfig::default().error_handler(|err, _req| {
|
||||
// routes::ApiError::Validation(err.to_string()).into()
|
||||
// }))
|
||||
// .app_data(web::QueryConfig::default().error_handler(|err, _req| {
|
||||
// routes::ApiError::Validation(err.to_string()).into()
|
||||
// }))
|
||||
// .app_data(web::JsonConfig::default().error_handler(|err, _req| {
|
||||
// routes::ApiError::Validation(err.to_string()).into()
|
||||
// }))*/
|
||||
//
|
||||
// }
|
||||
|
||||
// This is so that env vars not used immediately don't panic at runtime
|
||||
pub fn check_env_vars() -> bool {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use actix_web::{App, HttpServer};
|
||||
use actix_web_prom::PrometheusMetricsBuilder;
|
||||
use ntex::web::{App, HttpServer};
|
||||
// use actix_web_prom::PrometheusMetricsBuilder;
|
||||
use env_logger::Env;
|
||||
use labrinth::database::redis::RedisPool;
|
||||
use labrinth::file_hosting::S3Host;
|
||||
@ -18,7 +18,7 @@ pub struct Pepper {
|
||||
pub pepper: String,
|
||||
}
|
||||
|
||||
#[actix_rt::main]
|
||||
#[ntex::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
dotenvy::dotenv().ok();
|
||||
env_logger::Builder::from_env(Env::default().default_filter_or("info"))
|
||||
@ -90,10 +90,10 @@ async fn main() -> std::io::Result<()> {
|
||||
let maxmind_reader =
|
||||
Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap());
|
||||
|
||||
let prometheus = PrometheusMetricsBuilder::new("labrinth")
|
||||
.endpoint("/metrics")
|
||||
.build()
|
||||
.expect("Failed to create prometheus metrics middleware");
|
||||
// let prometheus = PrometheusMetricsBuilder::new("labrinth")
|
||||
// .endpoint("/metrics")
|
||||
// .build()
|
||||
// .expect("Failed to create prometheus metrics middleware");
|
||||
|
||||
let search_config = search::SearchConfig::new(None);
|
||||
|
||||
@ -111,11 +111,29 @@ async fn main() -> std::io::Result<()> {
|
||||
// Init App
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.wrap(prometheus.clone())
|
||||
// .wrap(prometheus.clone())
|
||||
.wrap(RateLimit(Arc::clone(&labrinth_config.rate_limiter)))
|
||||
.wrap(actix_web::middleware::Compress::default())
|
||||
.wrap(sentry_actix::Sentry::new())
|
||||
.configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone()))
|
||||
.wrap(ntex::web::middleware::Compress::default())
|
||||
// .wrap(sentry_actix::Sentry::new())
|
||||
.state(labrinth_config.redis_pool.clone())
|
||||
.state(labrinth_config.pool.clone())
|
||||
.state(labrinth_config.file_host.clone())
|
||||
.state(labrinth_config.search_config.clone())
|
||||
.state(labrinth_config.session_queue.clone())
|
||||
.state(labrinth_config.payouts_queue.clone())
|
||||
.state(labrinth_config.ip_salt.clone())
|
||||
.state(labrinth_config.analytics_queue.clone())
|
||||
.state(labrinth_config.clickhouse.clone())
|
||||
.state(labrinth_config.maxmind.clone())
|
||||
.state(labrinth_config.active_sockets.clone())
|
||||
.state(labrinth_config.automated_moderation_queue.clone())
|
||||
.state(labrinth_config.stripe_client.clone())
|
||||
.configure(labrinth::routes::v2::config)
|
||||
.configure(labrinth::routes::v3::config)
|
||||
.configure(labrinth::routes::internal::config)
|
||||
.configure(labrinth::routes::root_config)
|
||||
// // TODO: fix me
|
||||
.default_service(ntex::web::get()/*.wrap(default_cors())*/.to(labrinth::routes::not_found))
|
||||
})
|
||||
.bind(dotenvy::var("BIND_ADDR").unwrap())?
|
||||
.run()
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use std::future::Future;
|
||||
use super::super::ids::OrganizationId;
|
||||
use super::super::teams::TeamId;
|
||||
use super::super::users::UserId;
|
||||
@ -226,30 +226,33 @@ impl LegacyProject {
|
||||
}
|
||||
|
||||
// Because from needs a version_item, this is a helper function to get many from one db query.
|
||||
pub async fn from_many<'a, E>(
|
||||
#[allow(clippy::manual_async_fn)]
|
||||
pub fn from_many<'a, 'c, E>(
|
||||
data: Vec<Project>,
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Self>, DatabaseError>
|
||||
redis: &'a RedisPool,
|
||||
) -> impl Future<Output = Result<Vec<Self>, DatabaseError>> + Send + 'a
|
||||
where
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a,
|
||||
{
|
||||
let version_ids: Vec<_> = data
|
||||
.iter()
|
||||
.filter_map(|p| p.versions.first().map(|i| (*i).into()))
|
||||
.collect();
|
||||
let example_versions =
|
||||
version_item::Version::get_many(&version_ids, exec, redis).await?;
|
||||
let mut legacy_projects = Vec::new();
|
||||
for project in data {
|
||||
let version_item = example_versions
|
||||
async move {
|
||||
let version_ids: Vec<_> = data
|
||||
.iter()
|
||||
.find(|v| v.inner.project_id == project.id.into())
|
||||
.cloned();
|
||||
let project = LegacyProject::from(project, version_item);
|
||||
legacy_projects.push(project);
|
||||
.filter_map(|p| p.versions.first().map(|i| (*i).into()))
|
||||
.collect();
|
||||
let example_versions =
|
||||
version_item::Version::get_many(&version_ids, exec, redis).await?;
|
||||
let mut legacy_projects = Vec::new();
|
||||
for project in data {
|
||||
let version_item = example_versions
|
||||
.iter()
|
||||
.find(|v| v.inner.project_id == project.id.into())
|
||||
.cloned();
|
||||
let project = LegacyProject::from(project, version_item);
|
||||
legacy_projects.push(project);
|
||||
}
|
||||
Ok(legacy_projects)
|
||||
}
|
||||
Ok(legacy_projects)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,10 +1,9 @@
|
||||
//! "Database" for Hydra
|
||||
use crate::models::users::{UserId, UserStatus};
|
||||
use actix_ws::Session;
|
||||
use dashmap::DashMap;
|
||||
|
||||
pub struct ActiveSockets {
|
||||
pub auth_sockets: DashMap<UserId, (UserStatus, Session)>,
|
||||
pub auth_sockets: DashMap<UserId, (UserStatus)>,
|
||||
}
|
||||
|
||||
impl Default for ActiveSockets {
|
||||
|
||||
@ -8,8 +8,8 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::date::get_current_tenths_of_ms;
|
||||
use crate::util::env::parse_strings_from_var;
|
||||
use actix_web::{post, web};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use ntex::web;
|
||||
use ntex::web::{post, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@ -48,18 +48,18 @@ pub struct UrlInput {
|
||||
#[post("view")]
|
||||
pub async fn page_view_ingest(
|
||||
req: HttpRequest,
|
||||
maxmind: web::Data<Arc<MaxMindIndexer>>,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
url_input: web::Json<UrlInput>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
maxmind: web::types::State<Arc<MaxMindIndexer>>,
|
||||
analytics_queue: web::types::State<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
url_input: web::types::Json<UrlInput>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user =
|
||||
get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
|
||||
get_user_from_headers(&req, &*pool, &redis, &session_queue, None)
|
||||
.await
|
||||
.ok();
|
||||
let conn_info = req.connection_info().peer_addr().map(|x| x.to_string());
|
||||
let conn_info = req.peer_addr().map(|x| x.to_string());
|
||||
|
||||
let url = Url::parse(&url_input.url).map_err(|_| {
|
||||
ApiError::InvalidInput("invalid page view URL specified!".to_string())
|
||||
@ -132,7 +132,7 @@ pub async fn page_view_ingest(
|
||||
if PROJECT_TYPES.contains(&segments_vec[0]) {
|
||||
let project = crate::database::models::Project::get(
|
||||
segments_vec[1],
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -164,17 +164,17 @@ pub struct PlaytimeInput {
|
||||
#[post("playtime")]
|
||||
pub async fn playtime_ingest(
|
||||
req: HttpRequest,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
playtime_input: web::Json<
|
||||
analytics_queue: web::types::State<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
playtime_input: web::types::Json<
|
||||
HashMap<crate::models::ids::VersionId, PlaytimeInput>,
|
||||
>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (_, user) = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PERFORM_ANALYTICS]),
|
||||
@ -191,7 +191,7 @@ pub async fn playtime_ingest(
|
||||
|
||||
let versions = crate::database::models::Version::get_many(
|
||||
&playtimes.iter().map(|x| (*x.0).into()).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use actix_web::{get, HttpResponse};
|
||||
use ntex::web::{get, HttpResponse};
|
||||
use serde_json::json;
|
||||
|
||||
#[get("/")]
|
||||
@ -10,5 +10,5 @@ pub async fn index_get() -> HttpResponse {
|
||||
"about": "Welcome traveler!"
|
||||
});
|
||||
|
||||
HttpResponse::Ok().json(data)
|
||||
HttpResponse::Ok().json(&data)
|
||||
}
|
||||
|
||||
@ -9,8 +9,8 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::search::SearchConfig;
|
||||
use crate::util::date::get_current_tenths_of_ms;
|
||||
use crate::util::guards::admin_key_guard;
|
||||
use actix_web::{patch, post, web, HttpRequest, HttpResponse};
|
||||
// use crate::util::guards::admin_key_guard;
|
||||
use ntex::web::{self, patch, post, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@ -36,16 +36,18 @@ pub struct DownloadBody {
|
||||
}
|
||||
|
||||
// This is an internal route, cannot be used without key
|
||||
#[patch("/_count-download", guard = "admin_key_guard")]
|
||||
#[patch("/_count-download")]
|
||||
// TODO: fix me
|
||||
// #[patch("/_count-download", guard = "admin_key_guard")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn count_download(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
maxmind: web::Data<Arc<MaxMindIndexer>>,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
download_body: web::Json<DownloadBody>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
maxmind: web::types::State<Arc<MaxMindIndexer>>,
|
||||
analytics_queue: web::types::State<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
download_body: web::types::Json<DownloadBody>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let token = download_body
|
||||
.headers
|
||||
@ -56,7 +58,7 @@ pub async fn count_download(
|
||||
let user = get_user_record_from_bearer_token(
|
||||
&req,
|
||||
token,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
)
|
||||
@ -81,7 +83,7 @@ pub async fn count_download(
|
||||
",
|
||||
download_body.url,
|
||||
)
|
||||
.fetch_optional(pool.as_ref())
|
||||
.fetch_optional(&*pool)
|
||||
.await?
|
||||
{
|
||||
(version.id, version.mod_id)
|
||||
@ -94,7 +96,7 @@ pub async fn count_download(
|
||||
project_id as crate::database::models::ids::ProjectId,
|
||||
id_option
|
||||
)
|
||||
.fetch_optional(pool.as_ref())
|
||||
.fetch_optional(&*pool)
|
||||
.await?
|
||||
{
|
||||
(version.id, version.mod_id)
|
||||
@ -147,14 +149,16 @@ pub async fn count_download(
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
#[post("/_force_reindex", guard = "admin_key_guard")]
|
||||
#[post("/_force_reindex")]
|
||||
// TODO: fix me
|
||||
// #[post("/_force_reindex", guard = "admin_key_guard")]
|
||||
pub async fn force_reindex(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
config: web::Data<SearchConfig>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
config: web::types::State<SearchConfig>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
use crate::search::indexing::index_projects;
|
||||
let redis = redis.get_ref();
|
||||
index_projects(pool.as_ref().clone(), redis.clone(), &config).await?;
|
||||
index_projects(&*pool, &redis, &config).await?;
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
@ -15,9 +15,9 @@ use crate::models::pats::Scopes;
|
||||
use crate::models::users::Badges;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use log::{info, warn};
|
||||
use ntex::web::{self, delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use rust_decimal::prelude::ToPrimitive;
|
||||
use rust_decimal::Decimal;
|
||||
use serde::Serialize;
|
||||
@ -56,10 +56,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
#[get("products")]
|
||||
pub async fn products(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let products = product_item::QueryProduct::list(&**pool, &redis).await?;
|
||||
let products = product_item::QueryProduct::list(&*pool, &redis).await?;
|
||||
|
||||
let products = products
|
||||
.into_iter()
|
||||
@ -80,19 +80,19 @@ pub async fn products(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(products))
|
||||
Ok(HttpResponse::Ok().json(&products))
|
||||
}
|
||||
|
||||
#[get("subscriptions")]
|
||||
pub async fn subscriptions(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -103,14 +103,14 @@ pub async fn subscriptions(
|
||||
let subscriptions =
|
||||
user_subscription_item::UserSubscriptionItem::get_all_user(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(UserSubscription::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(subscriptions))
|
||||
Ok(HttpResponse::Ok().json(&subscriptions))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@ -130,16 +130,16 @@ pub struct ChargeRefund {
|
||||
#[post("charge/{id}/refund")]
|
||||
pub async fn refund_charge(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::Path<(crate::models::ids::ChargeId,)>,
|
||||
body: web::Json<ChargeRefund>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
info: web::types::Path<(crate::models::ids::ChargeId,)>,
|
||||
body: web::types::Json<ChargeRefund>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -155,8 +155,8 @@ pub async fn refund_charge(
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(charge) = ChargeItem::get(id.into(), &**pool).await? {
|
||||
let refunds = ChargeItem::get_children(id.into(), &**pool).await?;
|
||||
if let Some(charge) = ChargeItem::get(id.into(), &*pool).await? {
|
||||
let refunds = ChargeItem::get_children(id.into(), &*pool).await?;
|
||||
let refunds = -refunds
|
||||
.into_iter()
|
||||
.filter_map(|x| match x.status {
|
||||
@ -259,7 +259,7 @@ pub async fn refund_charge(
|
||||
if body.0.unprovision.unwrap_or(false) {
|
||||
if let Some(subscription_id) = charge.subscription_id {
|
||||
let open_charge =
|
||||
ChargeItem::get_open_subscription(subscription_id, &**pool)
|
||||
ChargeItem::get_open_subscription(subscription_id, &*pool)
|
||||
.await?;
|
||||
if let Some(mut open_charge) = open_charge {
|
||||
open_charge.status = ChargeStatus::Cancelled;
|
||||
@ -287,16 +287,16 @@ pub struct SubscriptionEdit {
|
||||
#[patch("subscription/{id}")]
|
||||
pub async fn edit_subscription(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(crate::models::ids::UserSubscriptionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
edit_subscription: web::Json<SubscriptionEdit>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
info: web::types::Path<(crate::models::ids::UserSubscriptionId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
edit_subscription: web::types::Json<SubscriptionEdit>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -307,7 +307,7 @@ pub async fn edit_subscription(
|
||||
let (id,) = info.into_inner();
|
||||
|
||||
if let Some(subscription) =
|
||||
user_subscription_item::UserSubscriptionItem::get(id.into(), &**pool)
|
||||
user_subscription_item::UserSubscriptionItem::get(id.into(), &*pool)
|
||||
.await?
|
||||
{
|
||||
if subscription.user_id != user.id.into() && !user.role.is_admin() {
|
||||
@ -526,7 +526,7 @@ pub async fn edit_subscription(
|
||||
transaction.commit().await?;
|
||||
|
||||
if let Some((amount, tax, payment_intent)) = intent {
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"payment_intent_id": payment_intent.id,
|
||||
"client_secret": payment_intent.client_secret,
|
||||
"tax": tax,
|
||||
@ -543,14 +543,14 @@ pub async fn edit_subscription(
|
||||
#[get("customer")]
|
||||
pub async fn user_customer(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -570,19 +570,19 @@ pub async fn user_customer(
|
||||
let customer =
|
||||
stripe::Customer::retrieve(&stripe_client, &customer_id, &[]).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(customer))
|
||||
Ok(HttpResponse::Ok().json(&customer))
|
||||
}
|
||||
|
||||
#[get("payments")]
|
||||
pub async fn charges(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -593,12 +593,12 @@ pub async fn charges(
|
||||
let charges =
|
||||
crate::database::models::charge_item::ChargeItem::get_from_user(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(
|
||||
charges
|
||||
&charges
|
||||
.into_iter()
|
||||
.map(|x| Charge {
|
||||
id: x.id.into(),
|
||||
@ -621,14 +621,14 @@ pub async fn charges(
|
||||
#[post("payment_method")]
|
||||
pub async fn add_payment_method_flow(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -661,7 +661,7 @@ pub async fn add_payment_method_flow(
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"client_secret": intent.client_secret
|
||||
})))
|
||||
}
|
||||
@ -674,15 +674,15 @@ pub struct EditPaymentMethod {
|
||||
#[patch("payment_method/{id}")]
|
||||
pub async fn edit_payment_method(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -743,15 +743,15 @@ pub async fn edit_payment_method(
|
||||
#[delete("payment_method/{id}")]
|
||||
pub async fn remove_payment_method(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -787,7 +787,7 @@ pub async fn remove_payment_method(
|
||||
let user_subscriptions =
|
||||
user_subscription_item::UserSubscriptionItem::get_all_user(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -831,14 +831,14 @@ pub async fn remove_payment_method(
|
||||
#[get("payment_methods")]
|
||||
pub async fn payment_methods(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -861,7 +861,7 @@ pub async fn payment_methods(
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(methods.data))
|
||||
Ok(HttpResponse::Ok().json(&methods.data))
|
||||
} else {
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
@ -875,8 +875,8 @@ pub struct ActiveServersQuery {
|
||||
#[get("active_servers")]
|
||||
pub async fn active_servers(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
query: web::Query<ActiveServersQuery>,
|
||||
pool: web::types::State<PgPool>,
|
||||
query: web::types::Query<ActiveServersQuery>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let master_key = dotenvy::var("PYRO_API_KEY")?;
|
||||
|
||||
@ -894,7 +894,7 @@ pub async fn active_servers(
|
||||
let servers =
|
||||
user_subscription_item::UserSubscriptionItem::get_all_servers(
|
||||
query.subscription_status,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -918,7 +918,7 @@ pub async fn active_servers(
|
||||
})
|
||||
.collect::<Vec<ActiveServer>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(server_ids))
|
||||
Ok(HttpResponse::Ok().json(&server_ids))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@ -1021,15 +1021,15 @@ fn infer_currency_code(country: &str) -> String {
|
||||
#[post("payment")]
|
||||
pub async fn initiate_payment(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
payment_request: web::Json<PaymentRequest>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
payment_request: web::types::Json<PaymentRequest>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -1109,7 +1109,7 @@ pub async fn initiate_payment(
|
||||
let charge =
|
||||
crate::database::models::charge_item::ChargeItem::get(
|
||||
id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@ -1131,7 +1131,7 @@ pub async fn initiate_payment(
|
||||
interval,
|
||||
} => {
|
||||
let product =
|
||||
product_item::ProductItem::get(product_id.into(), &**pool)
|
||||
product_item::ProductItem::get(product_id.into(), &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -1142,7 +1142,7 @@ pub async fn initiate_payment(
|
||||
|
||||
let mut product_prices =
|
||||
product_item::ProductPriceItem::get_all_product_prices(
|
||||
product.id, &**pool,
|
||||
product.id, &*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1184,7 +1184,7 @@ pub async fn initiate_payment(
|
||||
let user_subscriptions =
|
||||
user_subscription_item::UserSubscriptionItem::get_all_user(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1198,7 +1198,7 @@ pub async fn initiate_payment(
|
||||
})
|
||||
.map(|x| x.price_id)
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1259,7 +1259,7 @@ pub async fn initiate_payment(
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"price_id": to_base62(price_id.0 as u64),
|
||||
"tax": 0,
|
||||
"total": price,
|
||||
@ -1325,7 +1325,7 @@ pub async fn initiate_payment(
|
||||
let payment_intent =
|
||||
stripe::PaymentIntent::create(&stripe_client, intent).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"payment_intent_id": payment_intent.id,
|
||||
"client_secret": payment_intent.client_secret,
|
||||
"price_id": to_base62(price_id.0 as u64),
|
||||
@ -1340,9 +1340,9 @@ pub async fn initiate_payment(
|
||||
pub async fn stripe_webhook(
|
||||
req: HttpRequest,
|
||||
payload: String,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
stripe_client: web::Data<stripe::Client>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let stripe_signature = req
|
||||
.headers()
|
||||
@ -1716,7 +1716,7 @@ pub async fn stripe_webhook(
|
||||
let minecraft_versions = crate::database::models::legacy_loader_fields::MinecraftGameVersion::list(
|
||||
Some("release"),
|
||||
None,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
).await?;
|
||||
|
||||
|
||||
@ -16,12 +16,12 @@ use crate::util::env::parse_strings_from_var;
|
||||
use crate::util::ext::get_image_ext;
|
||||
use crate::util::img::upload_image_optimized;
|
||||
use crate::util::validate::{validation_errors_to_string, RE_URL_SAFE};
|
||||
use actix_web::web::{scope, Data, Query, ServiceConfig};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use argon2::password_hash::SaltString;
|
||||
use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
|
||||
use base64::Engine;
|
||||
use chrono::{Duration, Utc};
|
||||
use ntex::web::{self, delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use ntex::web::{scope, ServiceConfig};
|
||||
use rand_chacha::rand_core::SeedableRng;
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
@ -1052,10 +1052,10 @@ pub struct Authorization {
|
||||
#[get("init")]
|
||||
pub async fn init(
|
||||
req: HttpRequest,
|
||||
Query(info): Query<AuthorizationInit>, // callback url
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
web::types::Query(info): web::types::Query<AuthorizationInit>, // callback url
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, AuthenticationError> {
|
||||
let url =
|
||||
url::Url::parse(&info.url).map_err(|_| AuthenticationError::Url)?;
|
||||
@ -1073,7 +1073,7 @@ pub async fn init(
|
||||
let (_, user) = get_user_record_from_bearer_token(
|
||||
&req,
|
||||
Some(&token),
|
||||
&**client,
|
||||
&*client,
|
||||
&redis,
|
||||
&session_queue,
|
||||
)
|
||||
@ -1095,17 +1095,17 @@ pub async fn init(
|
||||
|
||||
let url = info.provider.get_redirect_url(state)?;
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*url))
|
||||
.json(serde_json::json!({ "url": url })))
|
||||
.header("Location", &*url)
|
||||
.json(&serde_json::json!({ "url": url })))
|
||||
}
|
||||
|
||||
#[get("callback")]
|
||||
pub async fn auth_callback(
|
||||
req: HttpRequest,
|
||||
Query(query): Query<HashMap<String, String>>,
|
||||
client: Data<PgPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
redis: Data<RedisPool>,
|
||||
web::types::Query(query): web::types::Query<HashMap<String, String>>,
|
||||
client: web::types::State<PgPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, crate::auth::templates::ErrorPage> {
|
||||
let state_string = query
|
||||
.get("state")
|
||||
@ -1128,7 +1128,7 @@ pub async fn auth_callback(
|
||||
let token = provider.get_token(query).await?;
|
||||
let oauth_user = provider.get_user(&token).await?;
|
||||
|
||||
let user_id_opt = provider.get_user_id(&oauth_user.id, &**client).await?;
|
||||
let user_id_opt = provider.get_user_id(&oauth_user.id, &*client).await?;
|
||||
|
||||
let mut transaction = client.begin().await?;
|
||||
if let Some(id) = user_id {
|
||||
@ -1140,7 +1140,7 @@ pub async fn auth_callback(
|
||||
.update_user_id(id, Some(&oauth_user.id), &mut transaction)
|
||||
.await?;
|
||||
|
||||
let user = crate::database::models::User::get_id(id, &**client, &redis).await?;
|
||||
let user = crate::database::models::User::get_id(id, &*client, &redis).await?;
|
||||
|
||||
if provider == AuthProvider::PayPal {
|
||||
sqlx::query!(
|
||||
@ -1170,11 +1170,11 @@ pub async fn auth_callback(
|
||||
crate::database::models::User::clear_caches(&[(id, None)], &redis).await?;
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*url))
|
||||
.json(serde_json::json!({ "url": url })))
|
||||
.header("Location", &*url)
|
||||
.json(&serde_json::json!({ "url": url })))
|
||||
} else {
|
||||
let user_id = if let Some(user_id) = user_id_opt {
|
||||
let user = crate::database::models::User::get_id(user_id, &**client, &redis)
|
||||
let user = crate::database::models::User::get_id(user_id, &*client, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
@ -1191,8 +1191,8 @@ pub async fn auth_callback(
|
||||
);
|
||||
|
||||
return Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*redirect_url))
|
||||
.json(serde_json::json!({ "url": redirect_url })));
|
||||
.header("Location", &*redirect_url)
|
||||
.json(&serde_json::json!({ "url": redirect_url })));
|
||||
}
|
||||
|
||||
user_id
|
||||
@ -1216,8 +1216,8 @@ pub async fn auth_callback(
|
||||
);
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*redirect_url))
|
||||
.json(serde_json::json!({ "url": redirect_url })))
|
||||
.header("Location", &*redirect_url)
|
||||
.json(&serde_json::json!({ "url": redirect_url })))
|
||||
}
|
||||
} else {
|
||||
Err::<HttpResponse, AuthenticationError>(AuthenticationError::InvalidCredentials)
|
||||
@ -1235,14 +1235,14 @@ pub struct DeleteAuthProvider {
|
||||
#[delete("provider")]
|
||||
pub async fn delete_auth_provider(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
delete_provider: web::Json<DeleteAuthProvider>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
delete_provider: web::types::Json<DeleteAuthProvider>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_AUTH_WRITE]),
|
||||
@ -1328,9 +1328,9 @@ pub struct NewAccount {
|
||||
#[post("create")]
|
||||
pub async fn create_account_with_password(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
new_account: web::Json<NewAccount>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
new_account: web::types::Json<NewAccount>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
new_account.0.validate().map_err(|err| {
|
||||
ApiError::InvalidInput(validation_errors_to_string(err, None))
|
||||
@ -1340,13 +1340,9 @@ pub async fn create_account_with_password(
|
||||
return Err(ApiError::Turnstile);
|
||||
}
|
||||
|
||||
if crate::database::models::User::get(
|
||||
&new_account.username,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
.is_some()
|
||||
if crate::database::models::User::get(&new_account.username, &*pool, &redis)
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
return Err(ApiError::InvalidInput("Username is taken!".to_string()));
|
||||
}
|
||||
@ -1381,7 +1377,7 @@ pub async fn create_account_with_password(
|
||||
.hash_password(new_account.password.as_bytes(), &salt)?
|
||||
.to_string();
|
||||
|
||||
if crate::database::models::User::get_email(&new_account.email, &**pool)
|
||||
if crate::database::models::User::get_email(&new_account.email, &*pool)
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
@ -1441,7 +1437,7 @@ pub async fn create_account_with_password(
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
Ok(HttpResponse::Ok().json(&res))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
@ -1454,26 +1450,26 @@ pub struct Login {
|
||||
#[post("login")]
|
||||
pub async fn login_password(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
login: web::Json<Login>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
login: web::types::Json<Login>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
if !check_hcaptcha(&req, &login.challenge).await? {
|
||||
return Err(ApiError::Turnstile);
|
||||
}
|
||||
|
||||
let user = if let Some(user) =
|
||||
crate::database::models::User::get(&login.username, &**pool, &redis)
|
||||
crate::database::models::User::get(&login.username, &*pool, &redis)
|
||||
.await?
|
||||
{
|
||||
user
|
||||
} else {
|
||||
let user =
|
||||
crate::database::models::User::get_email(&login.username, &**pool)
|
||||
crate::database::models::User::get_email(&login.username, &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
crate::database::models::User::get_id(user, &**pool, &redis)
|
||||
crate::database::models::User::get_id(user, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?
|
||||
};
|
||||
@ -1495,7 +1491,7 @@ pub async fn login_password(
|
||||
.insert(Duration::minutes(30), &redis)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"error": "2fa_required",
|
||||
"description": "2FA is required to complete this operation.",
|
||||
"flow": flow,
|
||||
@ -1507,7 +1503,7 @@ pub async fn login_password(
|
||||
let res = crate::models::sessions::Session::from(session, true, None);
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
Ok(HttpResponse::Ok().json(&res))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1599,9 +1595,9 @@ async fn validate_2fa_code(
|
||||
#[post("login/2fa")]
|
||||
pub async fn login_2fa(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
login: web::Json<Login2FA>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
login: web::types::Json<Login2FA>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let flow = Flow::get(&login.flow, &redis)
|
||||
.await?
|
||||
@ -1609,7 +1605,7 @@ pub async fn login_2fa(
|
||||
|
||||
if let Flow::Login2FA { user_id } = flow {
|
||||
let user =
|
||||
crate::database::models::User::get_id(user_id, &**pool, &redis)
|
||||
crate::database::models::User::get_id(user_id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
@ -1637,7 +1633,7 @@ pub async fn login_2fa(
|
||||
let res = crate::models::sessions::Session::from(session, true, None);
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
Ok(HttpResponse::Ok().json(&res))
|
||||
} else {
|
||||
Err(ApiError::Authentication(
|
||||
AuthenticationError::InvalidCredentials,
|
||||
@ -1648,13 +1644,13 @@ pub async fn login_2fa(
|
||||
#[post("2fa/get_secret")]
|
||||
pub async fn begin_2fa_flow(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_AUTH_WRITE]),
|
||||
@ -1673,7 +1669,7 @@ pub async fn begin_2fa_flow(
|
||||
.insert(Duration::minutes(30), &redis)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"secret": encoded.to_string(),
|
||||
"flow": flow,
|
||||
})))
|
||||
@ -1687,10 +1683,10 @@ pub async fn begin_2fa_flow(
|
||||
#[post("2fa")]
|
||||
pub async fn finish_2fa_flow(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
login: web::Json<Login2FA>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
login: web::types::Json<Login2FA>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let flow = Flow::get(&login.flow, &redis)
|
||||
.await?
|
||||
@ -1699,7 +1695,7 @@ pub async fn finish_2fa_flow(
|
||||
if let Flow::Initialize2FA { user_id, secret } = flow {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_AUTH_WRITE]),
|
||||
@ -1796,7 +1792,7 @@ pub async fn finish_2fa_flow(
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"backup_codes": codes,
|
||||
})))
|
||||
} else {
|
||||
@ -1814,15 +1810,15 @@ pub struct Remove2FA {
|
||||
#[delete("2fa")]
|
||||
pub async fn remove_2fa(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
login: web::Json<Remove2FA>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
login: web::types::Json<Remove2FA>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, user) = get_user_record_from_bearer_token(
|
||||
&req,
|
||||
None,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
)
|
||||
@ -1904,9 +1900,9 @@ pub struct ResetPassword {
|
||||
#[post("password/reset")]
|
||||
pub async fn reset_password_begin(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
reset_password: web::Json<ResetPassword>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
reset_password: web::types::Json<ResetPassword>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
if !check_hcaptcha(&req, &reset_password.challenge).await? {
|
||||
return Err(ApiError::Turnstile);
|
||||
@ -1914,15 +1910,15 @@ pub async fn reset_password_begin(
|
||||
|
||||
let user = if let Some(user_id) = crate::database::models::User::get_email(
|
||||
&reset_password.username,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
crate::database::models::User::get_id(user_id, &**pool, &redis).await?
|
||||
crate::database::models::User::get_id(user_id, &*pool, &redis).await?
|
||||
} else {
|
||||
crate::database::models::User::get(
|
||||
&reset_password.username,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -1957,17 +1953,17 @@ pub struct ChangePassword {
|
||||
#[patch("password")]
|
||||
pub async fn change_password(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
change_password: web::Json<ChangePassword>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
change_password: web::types::Json<ChangePassword>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = if let Some(flow) = &change_password.flow {
|
||||
let flow = Flow::get(flow, &redis).await?;
|
||||
|
||||
if let Some(Flow::ForgotPassword { user_id }) = flow {
|
||||
let user =
|
||||
crate::database::models::User::get_id(user_id, &**pool, &redis)
|
||||
crate::database::models::User::get_id(user_id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
@ -1985,7 +1981,7 @@ pub async fn change_password(
|
||||
let (scopes, user) = get_user_record_from_bearer_token(
|
||||
&req,
|
||||
None,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
)
|
||||
@ -2108,11 +2104,11 @@ pub struct SetEmail {
|
||||
#[patch("email")]
|
||||
pub async fn set_email(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
email: web::Json<SetEmail>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
stripe_client: Data<stripe::Client>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
email: web::types::Json<SetEmail>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
stripe_client: web::types::State<stripe::Client>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
email.0.validate().map_err(|err| {
|
||||
ApiError::InvalidInput(validation_errors_to_string(err, None))
|
||||
@ -2120,7 +2116,7 @@ pub async fn set_email(
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_AUTH_WRITE]),
|
||||
@ -2194,13 +2190,13 @@ pub async fn set_email(
|
||||
#[post("email/resend_verify")]
|
||||
pub async fn resend_verify_email(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_AUTH_WRITE]),
|
||||
@ -2243,9 +2239,9 @@ pub struct VerifyEmail {
|
||||
|
||||
#[post("email/verify")]
|
||||
pub async fn verify_email(
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
email: web::Json<VerifyEmail>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
email: web::types::Json<VerifyEmail>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let flow = Flow::get(&email.flow, &redis).await?;
|
||||
|
||||
@ -2255,7 +2251,7 @@ pub async fn verify_email(
|
||||
}) = flow
|
||||
{
|
||||
let user =
|
||||
crate::database::models::User::get_id(user_id, &**pool, &redis)
|
||||
crate::database::models::User::get_id(user_id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
@ -2296,13 +2292,13 @@ pub async fn verify_email(
|
||||
#[post("email/subscribe")]
|
||||
pub async fn subscribe_newsletter(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_AUTH_WRITE]),
|
||||
|
||||
@ -3,7 +3,7 @@ use crate::database::redis::RedisPool;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, post, HttpRequest, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
@ -13,13 +13,13 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[post("/export")]
|
||||
pub async fn export(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -30,11 +30,10 @@ pub async fn export(
|
||||
let user_id = user.id.into();
|
||||
|
||||
let collection_ids =
|
||||
crate::database::models::User::get_collections(user_id, &**pool)
|
||||
.await?;
|
||||
crate::database::models::User::get_collections(user_id, &*pool).await?;
|
||||
let collections = crate::database::models::Collection::get_many(
|
||||
&collection_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -42,25 +41,25 @@ pub async fn export(
|
||||
.map(crate::models::collections::Collection::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let follows = crate::database::models::User::get_follows(user_id, &**pool)
|
||||
let follows = crate::database::models::User::get_follows(user_id, &*pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::ids::ProjectId::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let projects =
|
||||
crate::database::models::User::get_projects(user_id, &**pool, &redis)
|
||||
crate::database::models::User::get_projects(user_id, &*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::ids::ProjectId::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let org_ids =
|
||||
crate::database::models::User::get_organizations(user_id, &**pool)
|
||||
crate::database::models::User::get_organizations(user_id, &*pool)
|
||||
.await?;
|
||||
let orgs =
|
||||
crate::database::models::organization_item::Organization::get_many_ids(
|
||||
&org_ids, &**pool, &redis,
|
||||
&org_ids, &*pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -69,7 +68,7 @@ pub async fn export(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let notifs = crate::database::models::notification_item::Notification::get_many_user(
|
||||
user_id, &**pool, &redis,
|
||||
user_id, &*pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -78,7 +77,7 @@ pub async fn export(
|
||||
|
||||
let oauth_clients =
|
||||
crate::database::models::oauth_client_item::OAuthClient::get_all_user_clients(
|
||||
user_id, &**pool,
|
||||
user_id, &*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -86,7 +85,7 @@ pub async fn export(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let oauth_authorizations = crate::database::models::oauth_client_authorization_item::OAuthClientAuthorization::get_all_for_user(
|
||||
user_id, &**pool,
|
||||
user_id, &*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -95,12 +94,12 @@ pub async fn export(
|
||||
|
||||
let pat_ids =
|
||||
crate::database::models::pat_item::PersonalAccessToken::get_user_pats(
|
||||
user_id, &**pool, &redis,
|
||||
user_id, &*pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
let pats =
|
||||
crate::database::models::pat_item::PersonalAccessToken::get_many_ids(
|
||||
&pat_ids, &**pool, &redis,
|
||||
&pat_ids, &*pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -109,13 +108,13 @@ pub async fn export(
|
||||
|
||||
let payout_ids =
|
||||
crate::database::models::payout_item::Payout::get_all_for_user(
|
||||
user_id, &**pool,
|
||||
user_id, &*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let payouts = crate::database::models::payout_item::Payout::get_many(
|
||||
&payout_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -123,11 +122,11 @@ pub async fn export(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let report_ids =
|
||||
crate::database::models::user_item::User::get_reports(user_id, &**pool)
|
||||
crate::database::models::user_item::User::get_reports(user_id, &*pool)
|
||||
.await?;
|
||||
let reports = crate::database::models::report_item::Report::get_many(
|
||||
&report_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -140,7 +139,7 @@ pub async fn export(
|
||||
",
|
||||
user_id.0
|
||||
)
|
||||
.fetch_all(pool.as_ref())
|
||||
.fetch_all(&*pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| crate::database::models::ids::ThreadMessageId(x.id))
|
||||
@ -149,7 +148,7 @@ pub async fn export(
|
||||
let messages =
|
||||
crate::database::models::thread_item::ThreadMessage::get_many(
|
||||
&message_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -160,7 +159,7 @@ pub async fn export(
|
||||
"SELECT id FROM uploaded_images WHERE owner_id = $1",
|
||||
user_id.0
|
||||
)
|
||||
.fetch_all(pool.as_ref())
|
||||
.fetch_all(&*pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| crate::database::models::ids::ImageId(x.id))
|
||||
@ -168,7 +167,7 @@ pub async fn export(
|
||||
|
||||
let uploaded_images = crate::database::models::image_item::Image::get_many(
|
||||
&uploaded_images_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -178,14 +177,14 @@ pub async fn export(
|
||||
|
||||
let subscriptions =
|
||||
crate::database::models::user_subscription_item::UserSubscriptionItem::get_all_user(
|
||||
user_id, &**pool,
|
||||
user_id, &*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::billing::UserSubscription::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||
"user": user,
|
||||
"collections": collections,
|
||||
"follows": follows,
|
||||
|
||||
@ -12,9 +12,9 @@ use super::v3::oauth_clients;
|
||||
pub use super::ApiError;
|
||||
use crate::util::cors::default_cors;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
pub fn config(cfg: &mut ntex::web::ServiceConfig) {
|
||||
cfg.service(
|
||||
actix_web::web::scope("_internal")
|
||||
ntex::web::scope("_internal")
|
||||
.wrap(default_cors())
|
||||
.configure(admin::config)
|
||||
.configure(oauth_clients::config)
|
||||
@ -23,7 +23,6 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
.configure(pats::config)
|
||||
.configure(moderation::config)
|
||||
.configure(billing::config)
|
||||
.configure(gdpr::config)
|
||||
.configure(statuses::config),
|
||||
.configure(gdpr::config), // .configure(statuses::config),
|
||||
);
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@ use crate::models::projects::ProjectStatus;
|
||||
use crate::queue::moderation::{ApprovalType, IdentifiedFile, MissingMetadata};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes};
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@ -29,14 +29,14 @@ fn default_count() -> i16 {
|
||||
|
||||
pub async fn get_projects(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ResultCount>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
count: web::types::Query<ResultCount>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -55,31 +55,31 @@ pub async fn get_projects(
|
||||
ProjectStatus::Processing.as_str(),
|
||||
count.count as i64
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.fetch(&*pool)
|
||||
.map_ok(|m| database::models::ProjectId(m.id))
|
||||
.try_collect::<Vec<database::models::ProjectId>>()
|
||||
.await?;
|
||||
|
||||
let projects: Vec<_> =
|
||||
database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
database::Project::get_many_ids(&project_ids, &*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::projects::Project::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
Ok(HttpResponse::Ok().json(&projects))
|
||||
}
|
||||
|
||||
pub async fn get_project_meta(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -88,7 +88,7 @@ pub async fn get_project_meta(
|
||||
|
||||
let project_id = info.into_inner().0;
|
||||
let project =
|
||||
database::models::Project::get(&project_id, &**pool, &redis).await?;
|
||||
database::models::Project::get(&project_id, &*pool, &redis).await?;
|
||||
|
||||
if let Some(project) = project {
|
||||
let rows = sqlx::query!(
|
||||
@ -101,7 +101,7 @@ pub async fn get_project_meta(
|
||||
",
|
||||
project.inner.id.0
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.fetch_all(&*pool)
|
||||
.await?;
|
||||
|
||||
let mut merged = MissingMetadata {
|
||||
@ -141,7 +141,7 @@ pub async fn get_project_meta(
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.fetch_all(&*pool)
|
||||
.await?;
|
||||
|
||||
for row in rows {
|
||||
@ -176,7 +176,7 @@ pub async fn get_project_meta(
|
||||
",
|
||||
&check_flames,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.fetch_all(&*pool)
|
||||
.await?;
|
||||
|
||||
for row in rows {
|
||||
@ -199,7 +199,7 @@ pub async fn get_project_meta(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(merged))
|
||||
Ok(HttpResponse::Ok().json(&merged))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -224,14 +224,14 @@ pub enum Judgement {
|
||||
|
||||
pub async fn set_project_meta(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
judgements: web::Json<HashMap<String, Judgement>>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
judgements: web::types::Json<HashMap<String, Judgement>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
|
||||
@ -5,9 +5,8 @@ use crate::auth::get_user_from_headers;
|
||||
use crate::routes::ApiError;
|
||||
|
||||
use crate::database::redis::RedisPool;
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use ntex::web::{self, delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use rand::distributions::Alphanumeric;
|
||||
use rand::Rng;
|
||||
use rand_chacha::rand_core::SeedableRng;
|
||||
@ -30,13 +29,13 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[get("pat")]
|
||||
pub async fn get_pats(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_READ]),
|
||||
@ -47,17 +46,18 @@ pub async fn get_pats(
|
||||
let pat_ids =
|
||||
database::models::pat_item::PersonalAccessToken::get_user_pats(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let pats = database::models::pat_item::PersonalAccessToken::get_many_ids(
|
||||
&pat_ids, &**pool, &redis,
|
||||
&pat_ids, &*pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(
|
||||
pats.into_iter()
|
||||
&pats
|
||||
.into_iter()
|
||||
.map(|x| PersonalAccessToken::from(x, false))
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
@ -74,10 +74,10 @@ pub struct NewPersonalAccessToken {
|
||||
#[post("pat")]
|
||||
pub async fn create_pat(
|
||||
req: HttpRequest,
|
||||
info: web::Json<NewPersonalAccessToken>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
info: web::types::Json<NewPersonalAccessToken>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
info.0.validate().map_err(|err| {
|
||||
ApiError::InvalidInput(validation_errors_to_string(err, None))
|
||||
@ -96,7 +96,7 @@ pub async fn create_pat(
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_CREATE]),
|
||||
@ -136,7 +136,7 @@ pub async fn create_pat(
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(PersonalAccessToken {
|
||||
Ok(HttpResponse::Ok().json(&PersonalAccessToken {
|
||||
id: id.into(),
|
||||
name,
|
||||
access_token: Some(token),
|
||||
@ -159,11 +159,11 @@ pub struct ModifyPersonalAccessToken {
|
||||
#[patch("pat/{id}")]
|
||||
pub async fn edit_pat(
|
||||
req: HttpRequest,
|
||||
id: web::Path<(String,)>,
|
||||
info: web::Json<ModifyPersonalAccessToken>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
id: web::types::Path<(String,)>,
|
||||
info: web::types::Json<ModifyPersonalAccessToken>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
info.0.validate().map_err(|err| {
|
||||
ApiError::InvalidInput(validation_errors_to_string(err, None))
|
||||
@ -171,7 +171,7 @@ pub async fn edit_pat(
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_WRITE]),
|
||||
@ -181,7 +181,7 @@ pub async fn edit_pat(
|
||||
|
||||
let id = id.into_inner().0;
|
||||
let pat = database::models::pat_item::PersonalAccessToken::get(
|
||||
&id, &**pool, &redis,
|
||||
&id, &*pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -256,14 +256,14 @@ pub async fn edit_pat(
|
||||
#[delete("pat/{id}")]
|
||||
pub async fn delete_pat(
|
||||
req: HttpRequest,
|
||||
id: web::Path<(String,)>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
id: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_DELETE]),
|
||||
@ -272,7 +272,7 @@ pub async fn delete_pat(
|
||||
.1;
|
||||
let id = id.into_inner().0;
|
||||
let pat = database::models::pat_item::PersonalAccessToken::get(
|
||||
&id, &**pool, &redis,
|
||||
&id, &*pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@ -8,10 +8,10 @@ use crate::models::sessions::Session;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::env::parse_var;
|
||||
use actix_web::http::header::AUTHORIZATION;
|
||||
use actix_web::web::{scope, Data, ServiceConfig};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use ntex::http::header::AUTHORIZATION;
|
||||
use ntex::web::{self, delete, get, post, HttpRequest, HttpResponse};
|
||||
use ntex::web::{scope, ServiceConfig};
|
||||
use rand::distributions::Alphanumeric;
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
@ -22,7 +22,7 @@ pub fn config(cfg: &mut ServiceConfig) {
|
||||
cfg.service(
|
||||
scope("session")
|
||||
.service(list)
|
||||
.service(delete)
|
||||
.service(delete_session)
|
||||
.service(refresh),
|
||||
);
|
||||
}
|
||||
@ -40,15 +40,14 @@ pub struct SessionMetadata {
|
||||
pub async fn get_session_metadata(
|
||||
req: &HttpRequest,
|
||||
) -> Result<SessionMetadata, AuthenticationError> {
|
||||
let conn_info = req.connection_info().clone();
|
||||
let ip_addr = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
|
||||
if let Some(header) = req.headers().get("CF-Connecting-IP") {
|
||||
header.to_str().ok()
|
||||
header.to_str().map(|x| x.to_string()).ok()
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
req.peer_addr().map(|x| x.to_string())
|
||||
}
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
req.peer_addr().map(|x| x.to_string())
|
||||
};
|
||||
|
||||
let country = req
|
||||
@ -132,13 +131,13 @@ pub async fn issue_session(
|
||||
#[get("list")]
|
||||
pub async fn list(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_READ]),
|
||||
@ -153,29 +152,29 @@ pub async fn list(
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
let session_ids =
|
||||
DBSession::get_user_sessions(current_user.id.into(), &**pool, &redis)
|
||||
DBSession::get_user_sessions(current_user.id.into(), &*pool, &redis)
|
||||
.await?;
|
||||
let sessions = DBSession::get_many_ids(&session_ids, &**pool, &redis)
|
||||
let sessions = DBSession::get_many_ids(&session_ids, &*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| x.expires > Utc::now())
|
||||
.map(|x| Session::from(x, false, Some(session)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(sessions))
|
||||
Ok(HttpResponse::Ok().json(&sessions))
|
||||
}
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn delete(
|
||||
info: web::Path<(String,)>,
|
||||
pub async fn delete_session(
|
||||
info: web::types::Path<(String,)>,
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_DELETE]),
|
||||
@ -183,7 +182,7 @@ pub async fn delete(
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
|
||||
let session = DBSession::get(info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(session) = session {
|
||||
if session.user_id == current_user.id.into() {
|
||||
@ -208,12 +207,12 @@ pub async fn delete(
|
||||
#[post("refresh")]
|
||||
pub async fn refresh(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user =
|
||||
get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
|
||||
get_user_from_headers(&req, &*pool, &redis, &session_queue, None)
|
||||
.await?
|
||||
.1;
|
||||
let session = req
|
||||
@ -224,7 +223,7 @@ pub async fn refresh(
|
||||
ApiError::Authentication(AuthenticationError::InvalidCredentials)
|
||||
})?;
|
||||
|
||||
let session = DBSession::get(session, &**pool, &redis).await?;
|
||||
let session = DBSession::get(session, &*pool, &redis).await?;
|
||||
|
||||
if let Some(session) = session {
|
||||
if current_user.id != session.user_id.into()
|
||||
@ -252,7 +251,7 @@ pub async fn refresh(
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(Session::from(new_session, true, None)))
|
||||
Ok(HttpResponse::Ok().json(&Session::from(new_session, true, None)))
|
||||
} else {
|
||||
Err(ApiError::Authentication(
|
||||
AuthenticationError::InvalidCredentials,
|
||||
|
||||
@ -8,17 +8,16 @@ use crate::models::users::{User, UserStatus};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::queue::socket::ActiveSockets;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::web::{Data, Payload};
|
||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use actix_ws::AggregatedMessage;
|
||||
use ntex::web::{self, get, HttpRequest, HttpResponse};
|
||||
// use actix_ws::AggregatedMessage;
|
||||
use chrono::Utc;
|
||||
use futures_util::StreamExt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(ws_init);
|
||||
}
|
||||
// pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
// cfg.service(ws_init);
|
||||
// }
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
@ -41,161 +40,161 @@ struct LauncherHeartbeatInit {
|
||||
code: String,
|
||||
}
|
||||
|
||||
#[get("launcher_socket")]
|
||||
pub async fn ws_init(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
web::Query(auth): web::Query<LauncherHeartbeatInit>,
|
||||
body: Payload,
|
||||
db: Data<ActiveSockets>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, db_user) = get_user_record_from_bearer_token(
|
||||
&req,
|
||||
Some(&auth.code),
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::Authentication(AuthenticationError::InvalidCredentials)
|
||||
})?;
|
||||
|
||||
if !scopes.contains(Scopes::SESSION_ACCESS) {
|
||||
return Err(ApiError::Authentication(
|
||||
AuthenticationError::InvalidCredentials,
|
||||
));
|
||||
}
|
||||
|
||||
let user = User::from_full(db_user);
|
||||
|
||||
if let Some((_, (_, session))) = db.auth_sockets.remove(&user.id) {
|
||||
let _ = session.close(None).await;
|
||||
}
|
||||
|
||||
let (res, mut session, msg_stream) = match actix_ws::handle(&req, body) {
|
||||
Ok(x) => x,
|
||||
Err(e) => return Ok(e.error_response()),
|
||||
};
|
||||
|
||||
let status = UserStatus {
|
||||
user_id: user.id,
|
||||
profile_name: None,
|
||||
last_update: Utc::now(),
|
||||
};
|
||||
|
||||
let friends =
|
||||
FriendItem::get_user_friends(user.id.into(), Some(true), &**pool)
|
||||
.await?;
|
||||
|
||||
let friend_statuses = if !friends.is_empty() {
|
||||
friends
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
db.auth_sockets.get(
|
||||
&if x.user_id == user.id.into() {
|
||||
x.friend_id
|
||||
} else {
|
||||
x.user_id
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
})
|
||||
.map(|x| x.value().0.clone())
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let _ = session
|
||||
.text(serde_json::to_string(
|
||||
&ServerToClientMessage::FriendStatuses {
|
||||
statuses: friend_statuses,
|
||||
},
|
||||
)?)
|
||||
.await;
|
||||
|
||||
db.auth_sockets.insert(user.id, (status.clone(), session));
|
||||
|
||||
broadcast_friends(
|
||||
user.id,
|
||||
ServerToClientMessage::StatusUpdate { status },
|
||||
&pool,
|
||||
&db,
|
||||
Some(friends),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut stream = msg_stream.aggregate_continuations();
|
||||
|
||||
actix_web::rt::spawn(async move {
|
||||
// receive messages from websocket
|
||||
while let Some(msg) = stream.next().await {
|
||||
match msg {
|
||||
Ok(AggregatedMessage::Text(text)) => {
|
||||
if let Ok(message) =
|
||||
serde_json::from_str::<ClientToServerMessage>(&text)
|
||||
{
|
||||
match message {
|
||||
ClientToServerMessage::StatusUpdate {
|
||||
profile_name,
|
||||
} => {
|
||||
if let Some(mut pair) =
|
||||
db.auth_sockets.get_mut(&user.id)
|
||||
{
|
||||
let (status, _) = pair.value_mut();
|
||||
|
||||
if status
|
||||
.profile_name
|
||||
.as_ref()
|
||||
.map(|x| x.len() > 64)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
status.profile_name = profile_name;
|
||||
status.last_update = Utc::now();
|
||||
|
||||
let _ = broadcast_friends(
|
||||
user.id,
|
||||
ServerToClientMessage::StatusUpdate {
|
||||
status: status.clone(),
|
||||
},
|
||||
&pool,
|
||||
&db,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(AggregatedMessage::Close(_)) => {
|
||||
let _ = close_socket(user.id, &pool, &db).await;
|
||||
}
|
||||
|
||||
Ok(AggregatedMessage::Ping(msg)) => {
|
||||
if let Some(mut socket) = db.auth_sockets.get_mut(&user.id)
|
||||
{
|
||||
let (_, socket) = socket.value_mut();
|
||||
let _ = socket.pong(&msg).await;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let _ = close_socket(user.id, &pool, &db).await;
|
||||
});
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
// #[get("launcher_socket")]
|
||||
// pub async fn ws_init(
|
||||
// req: HttpRequest,
|
||||
// pool: web::types::State<PgPool>,
|
||||
// web::types::Query(auth): web::types::Query<LauncherHeartbeatInit>,
|
||||
// body: Payload,
|
||||
// db: web::types::State<ActiveSockets>,
|
||||
// redis: web::types::State<RedisPool>,
|
||||
// session_queue: web::types::State<AuthQueue>,
|
||||
// ) -> Result<HttpResponse, ApiError> {
|
||||
// let (scopes, db_user) = get_user_record_from_bearer_token(
|
||||
// &req,
|
||||
// Some(&auth.code),
|
||||
// &*pool,
|
||||
// &redis,
|
||||
// &session_queue,
|
||||
// )
|
||||
// .await?
|
||||
// .ok_or_else(|| {
|
||||
// ApiError::Authentication(AuthenticationError::InvalidCredentials)
|
||||
// })?;
|
||||
//
|
||||
// if !scopes.contains(Scopes::SESSION_ACCESS) {
|
||||
// return Err(ApiError::Authentication(
|
||||
// AuthenticationError::InvalidCredentials,
|
||||
// ));
|
||||
// }
|
||||
//
|
||||
// let user = User::from_full(db_user);
|
||||
//
|
||||
// if let Some((_, (_, session))) = db.auth_sockets.remove(&user.id) {
|
||||
// let _ = session.close(None).await;
|
||||
// }
|
||||
//
|
||||
// let (res, mut session, msg_stream) = match actix_ws::handle(&req, body) {
|
||||
// Ok(x) => x,
|
||||
// Err(e) => return Ok(e.error_response()),
|
||||
// };
|
||||
//
|
||||
// let status = UserStatus {
|
||||
// user_id: user.id,
|
||||
// profile_name: None,
|
||||
// last_update: Utc::now(),
|
||||
// };
|
||||
//
|
||||
// let friends =
|
||||
// FriendItem::get_user_friends(user.id.into(), Some(true), &*pool)
|
||||
// .await?;
|
||||
//
|
||||
// let friend_statuses = if !friends.is_empty() {
|
||||
// friends
|
||||
// .iter()
|
||||
// .filter_map(|x| {
|
||||
// db.auth_sockets.get(
|
||||
// &if x.user_id == user.id.into() {
|
||||
// x.friend_id
|
||||
// } else {
|
||||
// x.user_id
|
||||
// }
|
||||
// .into(),
|
||||
// )
|
||||
// })
|
||||
// .map(|x| x.value().0.clone())
|
||||
// .collect::<Vec<_>>()
|
||||
// } else {
|
||||
// Vec::new()
|
||||
// };
|
||||
//
|
||||
// let _ = session
|
||||
// .text(serde_json::to_string(
|
||||
// &ServerToClientMessage::FriendStatuses {
|
||||
// statuses: friend_statuses,
|
||||
// },
|
||||
// )?)
|
||||
// .await;
|
||||
//
|
||||
// db.auth_sockets.insert(user.id, (status.clone(), session));
|
||||
//
|
||||
// broadcast_friends(
|
||||
// user.id,
|
||||
// ServerToClientMessage::StatusUpdate { status },
|
||||
// &pool,
|
||||
// &db,
|
||||
// Some(friends),
|
||||
// )
|
||||
// .await?;
|
||||
//
|
||||
// let mut stream = msg_stream.aggregate_continuations();
|
||||
//
|
||||
// ntex::rt::spawn(async move {
|
||||
// // receive messages from websocket
|
||||
// while let Some(msg) = stream.next().await {
|
||||
// match msg {
|
||||
// Ok(AggregatedMessage::Text(text)) => {
|
||||
// if let Ok(message) =
|
||||
// serde_json::from_str::<ClientToServerMessage>(&text)
|
||||
// {
|
||||
// match message {
|
||||
// ClientToServerMessage::StatusUpdate {
|
||||
// profile_name,
|
||||
// } => {
|
||||
// if let Some(mut pair) =
|
||||
// db.auth_sockets.get_mut(&user.id)
|
||||
// {
|
||||
// let (status, _) = pair.value_mut();
|
||||
//
|
||||
// if status
|
||||
// .profile_name
|
||||
// .as_ref()
|
||||
// .map(|x| x.len() > 64)
|
||||
// .unwrap_or(false)
|
||||
// {
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// status.profile_name = profile_name;
|
||||
// status.last_update = Utc::now();
|
||||
//
|
||||
// let _ = broadcast_friends(
|
||||
// user.id,
|
||||
// ServerToClientMessage::StatusUpdate {
|
||||
// status: status.clone(),
|
||||
// },
|
||||
// &pool,
|
||||
// &db,
|
||||
// None,
|
||||
// )
|
||||
// .await;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Ok(AggregatedMessage::Close(_)) => {
|
||||
// let _ = close_socket(user.id, &pool, &db).await;
|
||||
// }
|
||||
//
|
||||
// Ok(AggregatedMessage::Ping(msg)) => {
|
||||
// if let Some(mut socket) = db.auth_sockets.get_mut(&user.id)
|
||||
// {
|
||||
// let (_, socket) = socket.value_mut();
|
||||
// let _ = socket.pong(&msg).await;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// _ => {}
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// let _ = close_socket(user.id, &pool, &db).await;
|
||||
// });
|
||||
//
|
||||
// Ok(res)
|
||||
// }
|
||||
|
||||
pub async fn broadcast_friends(
|
||||
user_id: UserId,
|
||||
@ -218,13 +217,14 @@ pub async fn broadcast_friends(
|
||||
};
|
||||
|
||||
if friend.accepted {
|
||||
if let Some(mut socket) =
|
||||
sockets.auth_sockets.get_mut(&friend_id.into())
|
||||
{
|
||||
let (_, socket) = socket.value_mut();
|
||||
|
||||
let _ = socket.text(serde_json::to_string(&message)?).await;
|
||||
}
|
||||
// TODO: FIX ME
|
||||
// if let Some(mut socket) =
|
||||
// sockets.auth_sockets.get_mut(&friend_id.into())
|
||||
// {
|
||||
// let (_, socket) = socket.value_mut();
|
||||
//
|
||||
// let _ = socket.text(serde_json::to_string(&message)?).await;
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
@ -236,18 +236,19 @@ pub async fn close_socket(
|
||||
pool: &PgPool,
|
||||
sockets: &ActiveSockets,
|
||||
) -> Result<(), crate::database::models::DatabaseError> {
|
||||
if let Some((_, (_, socket))) = sockets.auth_sockets.remove(&id) {
|
||||
let _ = socket.close(None).await;
|
||||
|
||||
broadcast_friends(
|
||||
id,
|
||||
ServerToClientMessage::UserOffline { id },
|
||||
pool,
|
||||
sockets,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
// TODO: FIX ME
|
||||
// if let Some((_, (_, socket))) = sockets.auth_sockets.remove(&id) {
|
||||
// let _ = socket.close(None).await;
|
||||
//
|
||||
// broadcast_friends(
|
||||
// id,
|
||||
// ServerToClientMessage::UserOffline { id },
|
||||
// pool,
|
||||
// sockets,
|
||||
// None,
|
||||
// )
|
||||
// .await?;
|
||||
// }
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -9,7 +9,8 @@ use crate::models::projects::{ProjectId, VersionId};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::{auth::get_user_from_headers, database};
|
||||
use actix_web::{get, route, web, HttpRequest, HttpResponse};
|
||||
use ntex::web;
|
||||
use ntex::web::{get, HttpRequest, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
use yaserde_derive::YaSerialize;
|
||||
@ -18,7 +19,7 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(maven_metadata);
|
||||
cfg.service(version_file_sha512);
|
||||
cfg.service(version_file_sha1);
|
||||
cfg.service(version_file);
|
||||
// cfg.service(version_file);
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, YaSerialize)]
|
||||
@ -69,21 +70,21 @@ pub struct MavenPom {
|
||||
#[get("maven/modrinth/{id}/maven-metadata.xml")]
|
||||
pub async fn maven_metadata(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
params: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let project_id = params.into_inner().0;
|
||||
let Some(project) =
|
||||
database::models::Project::get(&project_id, &**pool, &redis).await?
|
||||
database::models::Project::get(&project_id, &*pool, &redis).await?
|
||||
else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -109,7 +110,7 @@ pub async fn maven_metadata(
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.fetch_all(&*pool)
|
||||
.await?;
|
||||
|
||||
let mut new_versions = Vec::new();
|
||||
@ -268,28 +269,29 @@ fn find_file<'a>(
|
||||
None
|
||||
}
|
||||
|
||||
#[route(
|
||||
"maven/modrinth/{id}/{versionnum}/{file}",
|
||||
method = "GET",
|
||||
method = "HEAD"
|
||||
)]
|
||||
// TODO: fix me
|
||||
// #[route(
|
||||
// "maven/modrinth/{id}/{versionnum}/{file}",
|
||||
// method = "GET",
|
||||
// method = "HEAD"
|
||||
// )]
|
||||
pub async fn version_file(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
params: web::types::Path<(String, String, String)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let Some(project) =
|
||||
database::models::Project::get(&project_id, &**pool, &redis).await?
|
||||
database::models::Project::get(&project_id, &*pool, &redis).await?
|
||||
else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -331,7 +333,7 @@ pub async fn version_file(
|
||||
find_file(&project_id, &vnum, &version, &file)
|
||||
{
|
||||
return Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("location", &*selected_file.url))
|
||||
.header("location", &*selected_file.url)
|
||||
.body(""));
|
||||
}
|
||||
|
||||
@ -341,21 +343,21 @@ pub async fn version_file(
|
||||
#[get("maven/modrinth/{id}/{versionnum}/{file}.sha1")]
|
||||
pub async fn version_file_sha1(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
params: web::types::Path<(String, String, String)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let Some(project) =
|
||||
database::models::Project::get(&project_id, &**pool, &redis).await?
|
||||
database::models::Project::get(&project_id, &*pool, &redis).await?
|
||||
else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -386,21 +388,21 @@ pub async fn version_file_sha1(
|
||||
#[get("maven/modrinth/{id}/{versionnum}/{file}.sha512")]
|
||||
pub async fn version_file_sha512(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
params: web::types::Path<(String, String, String)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let Some(project) =
|
||||
database::models::Project::get(&project_id, &**pool, &redis).await?
|
||||
database::models::Project::get(&project_id, &*pool, &redis).await?
|
||||
else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
|
||||
@ -2,11 +2,11 @@ use crate::file_hosting::FileHostingError;
|
||||
use crate::routes::analytics::{page_view_ingest, playtime_ingest};
|
||||
use crate::util::cors::default_cors;
|
||||
use crate::util::env::parse_strings_from_var;
|
||||
use actix_cors::Cors;
|
||||
use actix_files::Files;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use futures::FutureExt;
|
||||
use ntex::http::StatusCode;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use ntex_cors::Cors;
|
||||
use ntex_files::Files;
|
||||
|
||||
pub mod internal;
|
||||
pub mod v2;
|
||||
@ -35,45 +35,42 @@ pub fn root_config(cfg: &mut web::ServiceConfig) {
|
||||
);
|
||||
cfg.service(
|
||||
web::scope("analytics")
|
||||
.wrap(
|
||||
Cors::default()
|
||||
.allowed_origin_fn(|origin, _req_head| {
|
||||
let allowed_origins =
|
||||
parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS")
|
||||
.unwrap_or_default();
|
||||
.wrap({
|
||||
let mut cors = Cors::new();
|
||||
|
||||
allowed_origins.contains(&"*".to_string())
|
||||
|| allowed_origins.contains(
|
||||
&origin
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
)
|
||||
})
|
||||
.allowed_methods(vec!["GET", "POST"])
|
||||
for origin in
|
||||
parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS")
|
||||
.unwrap_or_default()
|
||||
{
|
||||
cors = cors.allowed_origin(&*origin);
|
||||
}
|
||||
|
||||
cors.allowed_methods(vec!["GET", "POST"])
|
||||
.allowed_headers(vec![
|
||||
actix_web::http::header::AUTHORIZATION,
|
||||
actix_web::http::header::ACCEPT,
|
||||
actix_web::http::header::CONTENT_TYPE,
|
||||
ntex::http::header::AUTHORIZATION,
|
||||
ntex::http::header::ACCEPT,
|
||||
ntex::http::header::CONTENT_TYPE,
|
||||
])
|
||||
.max_age(3600),
|
||||
)
|
||||
.max_age(3600)
|
||||
.finish()
|
||||
})
|
||||
.service(page_view_ingest)
|
||||
.service(playtime_ingest),
|
||||
);
|
||||
cfg.service(
|
||||
web::scope("api/v1")
|
||||
.wrap(default_cors())
|
||||
.wrap_fn(|req, _srv| {
|
||||
async {
|
||||
Ok(req.into_response(
|
||||
HttpResponse::Gone()
|
||||
.content_type("application/json")
|
||||
.body(r#"{"error":"api_deprecated","description":"You are using an application that uses an outdated version of Modrinth's API. Please either update it or switch to another application. For developers: https://docs.modrinth.com/api/#versioning"}"#)
|
||||
))
|
||||
}.boxed_local()
|
||||
})
|
||||
);
|
||||
// TODO: FIX ME
|
||||
// cfg.service(
|
||||
// web::scope("api/v1")
|
||||
// .wrap(default_cors())
|
||||
// .wrap_fn(|req, _srv| {
|
||||
// async {
|
||||
// Ok(req.into_response(
|
||||
// HttpResponse::Gone()
|
||||
// .content_type("application/json")
|
||||
// .body(r#"{"error":"api_deprecated","description":"You are using an application that uses an outdated version of Modrinth's API. Please either update it or switch to another application. For developers: https://docs.modrinth.com/api/#versioning"}"#)
|
||||
// ))
|
||||
// }.boxed_local()
|
||||
// })
|
||||
// );
|
||||
cfg.service(
|
||||
web::scope("")
|
||||
.wrap(default_cors())
|
||||
@ -177,7 +174,7 @@ impl ApiError {
|
||||
}
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for ApiError {
|
||||
impl ntex::web::WebResponseError for ApiError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
ApiError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
@ -210,7 +207,7 @@ impl actix_web::ResponseError for ApiError {
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(self.as_api_error())
|
||||
fn error_response(&self, _req: &HttpRequest) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(&self.as_api_error())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use crate::models::error::ApiError;
|
||||
use actix_web::{HttpResponse, Responder};
|
||||
use ntex::web::{HttpResponse, Responder};
|
||||
|
||||
pub async fn not_found() -> impl Responder {
|
||||
let data = ApiError {
|
||||
@ -7,5 +7,5 @@ pub async fn not_found() -> impl Responder {
|
||||
description: "the requested route does not exist".to_string(),
|
||||
};
|
||||
|
||||
HttpResponse::NotFound().json(data)
|
||||
HttpResponse::NotFound().json(&data)
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, get, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -15,7 +15,7 @@ use crate::queue::session::AuthQueue;
|
||||
|
||||
use super::ApiError;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
pub fn config(cfg: &mut ntex::web::ServiceConfig) {
|
||||
cfg.service(forge_updates);
|
||||
}
|
||||
|
||||
@ -32,23 +32,23 @@ fn default_neoforge() -> String {
|
||||
#[get("{id}/forge_updates.json")]
|
||||
pub async fn forge_updates(
|
||||
req: HttpRequest,
|
||||
web::Query(neo): web::Query<NeoForge>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(neo): web::types::Query<NeoForge>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
const ERROR: &str = "The specified project does not exist!";
|
||||
|
||||
let (id,) = info.into_inner();
|
||||
|
||||
let project = database::models::Project::get(&id, &**pool, &redis)
|
||||
let project = database::models::Project::get(&id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::InvalidInput(ERROR.to_string()))?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -62,7 +62,7 @@ pub async fn forge_updates(
|
||||
}
|
||||
|
||||
let versions =
|
||||
database::models::Version::get_many(&project.versions, &**pool, &redis)
|
||||
database::models::Version::get_many(&project.versions, &*pool, &redis)
|
||||
.await?;
|
||||
|
||||
let loaders = match &*neo.neoforge {
|
||||
@ -129,5 +129,5 @@ pub async fn forge_updates(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
mod moderation;
|
||||
mod notifications;
|
||||
pub(crate) mod project_creation;
|
||||
// pub(crate) mod project_creation;
|
||||
mod projects;
|
||||
mod reports;
|
||||
mod statistics;
|
||||
@ -8,16 +8,16 @@ pub mod tags;
|
||||
mod teams;
|
||||
mod threads;
|
||||
mod users;
|
||||
mod version_creation;
|
||||
// mod version_creation;
|
||||
pub mod version_file;
|
||||
mod versions;
|
||||
|
||||
pub use super::ApiError;
|
||||
use crate::util::cors::default_cors;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
pub fn config(cfg: &mut ntex::web::ServiceConfig) {
|
||||
cfg.service(
|
||||
actix_web::web::scope("v2")
|
||||
ntex::web::scope("v2")
|
||||
.wrap(default_cors())
|
||||
.configure(super::internal::admin::config)
|
||||
// Todo: separate these- they need to also follow v2-v3 conversion
|
||||
@ -26,7 +26,7 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
.configure(super::internal::pats::config)
|
||||
.configure(moderation::config)
|
||||
.configure(notifications::config)
|
||||
.configure(project_creation::config)
|
||||
// .configure(project_creation::config)
|
||||
.configure(projects::config)
|
||||
.configure(reports::config)
|
||||
.configure(statistics::config)
|
||||
|
||||
@ -4,7 +4,7 @@ use crate::models::v2::projects::LegacyProject;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::internal;
|
||||
use crate::{database::redis::RedisPool, routes::v2_reroute};
|
||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, get, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -25,16 +25,18 @@ fn default_count() -> i16 {
|
||||
#[get("projects")]
|
||||
pub async fn get_projects(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ResultCount>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
count: web::types::Query<ResultCount>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = internal::moderation::get_projects(
|
||||
req,
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
web::Query(internal::moderation::ResultCount { count: count.count }),
|
||||
web::types::Query(internal::moderation::ResultCount {
|
||||
count: count.count,
|
||||
}),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
@ -44,8 +46,8 @@ pub async fn get_projects(
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects =
|
||||
LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
LegacyProject::from_many(project, &*pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(&legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v2_reroute;
|
||||
use crate::routes::v3;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, delete, get, patch, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -31,14 +31,14 @@ pub struct NotificationIds {
|
||||
#[get("notifications")]
|
||||
pub async fn notifications_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<NotificationIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let resp = v3::notifications::notifications_get(
|
||||
req,
|
||||
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
web::types::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
@ -51,7 +51,7 @@ pub async fn notifications_get(
|
||||
.into_iter()
|
||||
.map(LegacyNotification::from)
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(notifications))
|
||||
Ok(HttpResponse::Ok().json(¬ifications))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -60,10 +60,10 @@ pub async fn notifications_get(
|
||||
#[get("{id}")]
|
||||
pub async fn notification_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(NotificationId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::notifications::notification_get(
|
||||
req,
|
||||
@ -77,7 +77,7 @@ pub async fn notification_get(
|
||||
match v2_reroute::extract_ok_json::<Notification>(response).await {
|
||||
Ok(notification) => {
|
||||
let notification = LegacyNotification::from(notification);
|
||||
Ok(HttpResponse::Ok().json(notification))
|
||||
Ok(HttpResponse::Ok().json(¬ification))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -86,10 +86,10 @@ pub async fn notification_get(
|
||||
#[patch("{id}")]
|
||||
pub async fn notification_read(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(NotificationId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notification_read(req, info, pool, redis, session_queue)
|
||||
@ -100,10 +100,10 @@ pub async fn notification_read(
|
||||
#[delete("{id}")]
|
||||
pub async fn notification_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(NotificationId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notification_delete(
|
||||
@ -120,15 +120,15 @@ pub async fn notification_delete(
|
||||
#[patch("notifications")]
|
||||
pub async fn notifications_read(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<NotificationIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notifications_read(
|
||||
req,
|
||||
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
web::types::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
@ -140,15 +140,15 @@ pub async fn notifications_read(
|
||||
#[delete("notifications")]
|
||||
pub async fn notifications_delete(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<NotificationIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notifications_delete(
|
||||
req,
|
||||
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
web::types::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
|
||||
@ -11,9 +11,8 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::default_project_type;
|
||||
use crate::routes::v3::project_creation::{CreateError, NewGalleryItem};
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{post, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, post, HttpRequest, HttpResponse};
|
||||
use ntex_multipart::Multipart;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::postgres::PgPool;
|
||||
@ -24,7 +23,7 @@ use validator::Validate;
|
||||
|
||||
use super::version_creation::InitialVersionData;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
pub fn config(cfg: &mut ntex::web::ServiceConfig) {
|
||||
cfg.service(project_create);
|
||||
}
|
||||
|
||||
@ -139,10 +138,10 @@ struct ProjectCreateData {
|
||||
pub async fn project_create(
|
||||
req: HttpRequest,
|
||||
payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
// Convert V2 multipart payload to V3 multipart payload
|
||||
let payload = v2_reroute::alter_actix_multipart(
|
||||
@ -260,13 +259,13 @@ pub async fn project_create(
|
||||
Ok(project) => {
|
||||
let version_item = match project.versions.first() {
|
||||
Some(vid) => {
|
||||
version_item::Version::get((*vid).into(), &**client, &redis)
|
||||
version_item::Version::get((*vid).into(), &*client, &redis)
|
||||
.await?
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let project = LegacyProject::from(project, version_item);
|
||||
Ok(HttpResponse::Ok().json(project))
|
||||
Ok(HttpResponse::Ok().json(&project))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::projects::ProjectIds;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use crate::search::{search_for_project, SearchConfig, SearchError};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@ -52,8 +52,8 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
#[get("search")]
|
||||
pub async fn project_search(
|
||||
web::Query(info): web::Query<SearchRequest>,
|
||||
config: web::Data<SearchConfig>,
|
||||
web::types::Query(info): web::types::Query<SearchRequest>,
|
||||
config: web::types::State<SearchConfig>,
|
||||
) -> Result<HttpResponse, SearchError> {
|
||||
// Search now uses loader_fields instead of explicit 'client_side' and 'server_side' fields
|
||||
// While the backend for this has changed, it doesnt affect much
|
||||
@ -108,7 +108,7 @@ pub async fn project_search(
|
||||
|
||||
let results = LegacySearchResults::from(results);
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
/// Parses a facet into a key, operator, and value
|
||||
@ -153,14 +153,14 @@ pub struct RandomProjects {
|
||||
|
||||
#[get("projects_random")]
|
||||
pub async fn random_projects_get(
|
||||
web::Query(count): web::Query<RandomProjects>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
web::types::Query(count): web::types::Query<RandomProjects>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let count = v3::projects::RandomProjects { count: count.count };
|
||||
|
||||
let response = v3::projects::random_projects_get(
|
||||
web::Query(count),
|
||||
web::types::Query(count),
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
)
|
||||
@ -171,8 +171,8 @@ pub async fn random_projects_get(
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects =
|
||||
LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
LegacyProject::from_many(project, &*pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(&legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -181,15 +181,15 @@ pub async fn random_projects_get(
|
||||
#[get("projects")]
|
||||
pub async fn projects_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ProjectIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Call V3 project creation
|
||||
let response = v3::projects::projects_get(
|
||||
req,
|
||||
web::Query(ids),
|
||||
web::types::Query(ids),
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
session_queue,
|
||||
@ -202,8 +202,8 @@ pub async fn projects_get(
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects =
|
||||
LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
LegacyProject::from_many(project, &*pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(&legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -212,10 +212,10 @@ pub async fn projects_get(
|
||||
#[get("{id}")]
|
||||
pub async fn project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Convert V2 data to V3 data
|
||||
// Call V3 project creation
|
||||
@ -234,13 +234,13 @@ pub async fn project_get(
|
||||
Ok(project) => {
|
||||
let version_item = match project.versions.first() {
|
||||
Some(vid) => {
|
||||
version_item::Version::get((*vid).into(), &**pool, &redis)
|
||||
version_item::Version::get((*vid).into(), &*pool, &redis)
|
||||
.await?
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let project = LegacyProject::from(project, version_item);
|
||||
Ok(HttpResponse::Ok().json(project))
|
||||
Ok(HttpResponse::Ok().json(&project))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -249,9 +249,9 @@ pub async fn project_get(
|
||||
//checks the validity of a project id or slug
|
||||
#[get("{id}/check")]
|
||||
pub async fn project_get_check(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns an id only, do not need to convert
|
||||
v3::projects::project_get_check(info, pool, redis)
|
||||
@ -268,10 +268,10 @@ struct DependencyInfo {
|
||||
#[get("dependencies")]
|
||||
pub async fn dependency_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// TODO: tests, probably
|
||||
let response = v3::projects::dependency_list(
|
||||
@ -292,7 +292,7 @@ pub async fn dependency_list(
|
||||
Ok(dependency_info) => {
|
||||
let converted_projects = LegacyProject::from_many(
|
||||
dependency_info.projects,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -302,7 +302,7 @@ pub async fn dependency_list(
|
||||
.map(LegacyVersion::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(DependencyInfo {
|
||||
Ok(HttpResponse::Ok().json(&DependencyInfo {
|
||||
projects: converted_projects,
|
||||
versions: converted_versions,
|
||||
}))
|
||||
@ -414,13 +414,13 @@ pub struct EditProject {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn project_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
new_project: web::Json<EditProject>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
moderation_queue: web::Data<AutomatedModerationQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
search_config: web::types::State<SearchConfig>,
|
||||
new_project: web::types::Json<EditProject>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
moderation_queue: web::types::State<AutomatedModerationQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let v2_new_project = new_project.into_inner();
|
||||
let client_side = v2_new_project.client_side;
|
||||
@ -474,7 +474,7 @@ pub async fn project_edit(
|
||||
if let Some(donation_urls) = v2_new_project.donation_urls {
|
||||
// Fetch current donation links from project so we know what to delete
|
||||
let fetched_example_project =
|
||||
project_item::Project::get(&info.0, &**pool, &redis).await?;
|
||||
project_item::Project::get(&info.0, &*pool, &redis).await?;
|
||||
let donation_links = fetched_example_project
|
||||
.map(|x| {
|
||||
x.urls
|
||||
@ -525,7 +525,7 @@ pub async fn project_edit(
|
||||
info,
|
||||
pool.clone(),
|
||||
search_config,
|
||||
web::Json(new_project),
|
||||
web::types::Json(new_project),
|
||||
redis.clone(),
|
||||
session_queue.clone(),
|
||||
moderation_queue,
|
||||
@ -540,13 +540,13 @@ pub async fn project_edit(
|
||||
{
|
||||
let project_item = project_item::Project::get(
|
||||
&new_slug.unwrap_or(project_id),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let version_ids = project_item.map(|x| x.versions).unwrap_or_default();
|
||||
let versions =
|
||||
version_item::Version::get_many(&version_ids, &**pool, &redis)
|
||||
version_item::Version::get_many(&version_ids, &*pool, &redis)
|
||||
.await?;
|
||||
for version in versions {
|
||||
let version = Version::from(version);
|
||||
@ -643,11 +643,11 @@ pub struct BulkEditProject {
|
||||
#[patch("projects")]
|
||||
pub async fn projects_edit(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
bulk_edit_project: web::Json<BulkEditProject>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ProjectIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
bulk_edit_project: web::types::Json<BulkEditProject>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let bulk_edit_project = bulk_edit_project.into_inner();
|
||||
|
||||
@ -656,7 +656,7 @@ pub async fn projects_edit(
|
||||
// If we are *setting* donation links, we will set every possible donation link to None, as
|
||||
// setting will delete all of them then 're-add' the ones we want to keep
|
||||
if let Some(donation_url) = bulk_edit_project.donation_urls {
|
||||
let link_platforms = LinkPlatform::list(&**pool, &redis).await?;
|
||||
let link_platforms = LinkPlatform::list(&*pool, &redis).await?;
|
||||
for link in link_platforms {
|
||||
if link.donation {
|
||||
link_urls.insert(link.name, None);
|
||||
@ -717,9 +717,9 @@ pub async fn projects_edit(
|
||||
// This returns NoContent or failure so we don't need to do anything with it
|
||||
v3::projects::projects_edit(
|
||||
req,
|
||||
web::Query(ids),
|
||||
web::types::Query(ids),
|
||||
pool.clone(),
|
||||
web::Json(v3::projects::BulkEditProject {
|
||||
web::types::Json(v3::projects::BulkEditProject {
|
||||
categories: bulk_edit_project.categories,
|
||||
add_categories: bulk_edit_project.add_categories,
|
||||
remove_categories: bulk_edit_project.remove_categories,
|
||||
@ -745,18 +745,18 @@ pub struct Extension {
|
||||
#[patch("{id}/icon")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn project_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_icon_edit(
|
||||
web::Query(v3::projects::Extension { ext: ext.ext }),
|
||||
web::types::Query(v3::projects::Extension { ext: ext.ext }),
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
@ -772,11 +772,11 @@ pub async fn project_icon_edit(
|
||||
#[delete("{id}/icon")]
|
||||
pub async fn delete_project_icon(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::delete_project_icon(
|
||||
@ -804,21 +804,21 @@ pub struct GalleryCreateQuery {
|
||||
#[post("{id}/gallery")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn add_gallery_item(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryCreateQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(item): web::types::Query<GalleryCreateQuery>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::add_gallery_item(
|
||||
web::Query(v3::projects::Extension { ext: ext.ext }),
|
||||
web::types::Query(v3::projects::Extension { ext: ext.ext }),
|
||||
req,
|
||||
web::Query(v3::projects::GalleryCreateQuery {
|
||||
web::types::Query(v3::projects::GalleryCreateQuery {
|
||||
featured: item.featured,
|
||||
name: item.title,
|
||||
description: item.description,
|
||||
@ -860,15 +860,15 @@ pub struct GalleryEditQuery {
|
||||
#[patch("{id}/gallery")]
|
||||
pub async fn edit_gallery_item(
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryEditQuery>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(item): web::types::Query<GalleryEditQuery>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::edit_gallery_item(
|
||||
req,
|
||||
web::Query(v3::projects::GalleryEditQuery {
|
||||
web::types::Query(v3::projects::GalleryEditQuery {
|
||||
url: item.url,
|
||||
featured: item.featured,
|
||||
name: item.title,
|
||||
@ -891,16 +891,16 @@ pub struct GalleryDeleteQuery {
|
||||
#[delete("{id}/gallery")]
|
||||
pub async fn delete_gallery_item(
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryDeleteQuery>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(item): web::types::Query<GalleryDeleteQuery>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::delete_gallery_item(
|
||||
req,
|
||||
web::Query(v3::projects::GalleryDeleteQuery { url: item.url }),
|
||||
web::types::Query(v3::projects::GalleryDeleteQuery { url: item.url }),
|
||||
pool,
|
||||
redis,
|
||||
file_host,
|
||||
@ -913,11 +913,11 @@ pub async fn delete_gallery_item(
|
||||
#[delete("{id}")]
|
||||
pub async fn project_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
search_config: web::types::State<SearchConfig>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_delete(
|
||||
@ -935,10 +935,10 @@ pub async fn project_delete(
|
||||
#[post("{id}/follow")]
|
||||
pub async fn project_follow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_follow(req, info, pool, redis, session_queue)
|
||||
@ -949,10 +949,10 @@ pub async fn project_follow(
|
||||
#[delete("{id}/follow")]
|
||||
pub async fn project_unfollow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_unfollow(req, info, pool, redis, session_queue)
|
||||
|
||||
@ -3,7 +3,7 @@ use crate::models::reports::Report;
|
||||
use crate::models::v2::reports::LegacyReport;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
@ -20,10 +20,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[post("report")]
|
||||
pub async fn report_create(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
body: web::Payload,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
body: web::types::Payload,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response =
|
||||
v3::reports::report_create(req, pool, body, redis, session_queue)
|
||||
@ -34,7 +34,7 @@ pub async fn report_create(
|
||||
match v2_reroute::extract_ok_json::<Report>(response).await {
|
||||
Ok(report) => {
|
||||
let report = LegacyReport::from(report);
|
||||
Ok(HttpResponse::Ok().json(report))
|
||||
Ok(HttpResponse::Ok().json(&report))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -58,16 +58,16 @@ fn default_all() -> bool {
|
||||
#[get("report")]
|
||||
pub async fn reports(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ReportsRequestOptions>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
count: web::types::Query<ReportsRequestOptions>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::reports::reports(
|
||||
req,
|
||||
pool,
|
||||
redis,
|
||||
web::Query(v3::reports::ReportsRequestOptions {
|
||||
web::types::Query(v3::reports::ReportsRequestOptions {
|
||||
count: count.count,
|
||||
all: count.all,
|
||||
}),
|
||||
@ -81,7 +81,7 @@ pub async fn reports(
|
||||
Ok(reports) => {
|
||||
let reports: Vec<_> =
|
||||
reports.into_iter().map(LegacyReport::from).collect();
|
||||
Ok(HttpResponse::Ok().json(reports))
|
||||
Ok(HttpResponse::Ok().json(&reports))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -95,14 +95,14 @@ pub struct ReportIds {
|
||||
#[get("reports")]
|
||||
pub async fn reports_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ReportIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ReportIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::reports::reports_get(
|
||||
req,
|
||||
web::Query(v3::reports::ReportIds { ids: ids.ids }),
|
||||
web::types::Query(v3::reports::ReportIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
@ -115,7 +115,7 @@ pub async fn reports_get(
|
||||
Ok(report_list) => {
|
||||
let report_list: Vec<_> =
|
||||
report_list.into_iter().map(LegacyReport::from).collect();
|
||||
Ok(HttpResponse::Ok().json(report_list))
|
||||
Ok(HttpResponse::Ok().json(&report_list))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -124,10 +124,10 @@ pub async fn reports_get(
|
||||
#[get("report/{id}")]
|
||||
pub async fn report_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
info: web::types::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response =
|
||||
v3::reports::report_get(req, pool, redis, info, session_queue)
|
||||
@ -138,7 +138,7 @@ pub async fn report_get(
|
||||
match v2_reroute::extract_ok_json::<Report>(response).await {
|
||||
Ok(report) => {
|
||||
let report = LegacyReport::from(report);
|
||||
Ok(HttpResponse::Ok().json(report))
|
||||
Ok(HttpResponse::Ok().json(&report))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -154,11 +154,11 @@ pub struct EditReport {
|
||||
#[patch("report/{id}")]
|
||||
pub async fn report_edit(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
edit_report: web::Json<EditReport>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
info: web::types::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
edit_report: web::types::Json<EditReport>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let edit_report = edit_report.into_inner();
|
||||
// Returns NoContent, so no need to convert
|
||||
@ -168,7 +168,7 @@ pub async fn report_edit(
|
||||
redis,
|
||||
info,
|
||||
session_queue,
|
||||
web::Json(v3::reports::EditReport {
|
||||
web::types::Json(v3::reports::EditReport {
|
||||
body: edit_report.body,
|
||||
closed: edit_report.closed,
|
||||
}),
|
||||
@ -180,10 +180,10 @@ pub async fn report_edit(
|
||||
#[delete("report/{id}")]
|
||||
pub async fn report_delete(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
info: web::types::Path<(crate::models::reports::ReportId,)>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::reports::report_delete(req, pool, info, redis, session_queue)
|
||||
|
||||
@ -3,7 +3,7 @@ use crate::routes::{
|
||||
v3::{self, statistics::V3Stats},
|
||||
ApiError,
|
||||
};
|
||||
use actix_web::{get, web, HttpResponse};
|
||||
use ntex::web::{self, get, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
@ -20,7 +20,7 @@ pub struct V2Stats {
|
||||
|
||||
#[get("statistics")]
|
||||
pub async fn get_stats(
|
||||
pool: web::Data<PgPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::statistics::get_stats(pool)
|
||||
.await
|
||||
@ -34,7 +34,7 @@ pub async fn get_stats(
|
||||
authors: stats.authors,
|
||||
files: stats.files,
|
||||
};
|
||||
Ok(HttpResponse::Ok().json(stats))
|
||||
Ok(HttpResponse::Ok().json(&stats))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
|
||||
@ -7,9 +7,9 @@ use crate::models::v2::projects::LegacySideType;
|
||||
use crate::routes::v2_reroute::capitalize_first;
|
||||
use crate::routes::v3::tags::{LinkPlatformQueryData, LoaderFieldsEnumQuery};
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_web::{get, web, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use itertools::Itertools;
|
||||
use ntex::web::{self, get, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
@ -37,8 +37,8 @@ pub struct CategoryData {
|
||||
|
||||
#[get("category")]
|
||||
pub async fn category_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::tags::category_list(pool, redis).await?;
|
||||
|
||||
@ -56,7 +56,7 @@ pub async fn category_list(
|
||||
header: c.header,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(categories))
|
||||
Ok(HttpResponse::Ok().json(&categories))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -71,8 +71,8 @@ pub struct LoaderData {
|
||||
|
||||
#[get("loader")]
|
||||
pub async fn loader_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::tags::loader_list(pool, redis).await?;
|
||||
|
||||
@ -110,7 +110,7 @@ pub async fn loader_list(
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(loaders))
|
||||
Ok(HttpResponse::Ok().json(&loaders))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -133,9 +133,9 @@ pub struct GameVersionQuery {
|
||||
|
||||
#[get("game_version")]
|
||||
pub async fn game_version_list(
|
||||
pool: web::Data<PgPool>,
|
||||
query: web::Query<GameVersionQuery>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
query: web::types::Query<GameVersionQuery>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut filters = HashMap::new();
|
||||
if let Some(type_) = &query.type_ {
|
||||
@ -146,7 +146,7 @@ pub async fn game_version_list(
|
||||
}
|
||||
let response = v3::tags::loader_fields_list(
|
||||
pool,
|
||||
web::Query(LoaderFieldsEnumQuery {
|
||||
web::types::Query(LoaderFieldsEnumQuery {
|
||||
loader_field: "game_versions".to_string(),
|
||||
filters: Some(filters),
|
||||
}),
|
||||
@ -178,7 +178,7 @@ pub async fn game_version_list(
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
HttpResponse::Ok().json(fields)
|
||||
HttpResponse::Ok().json(&fields)
|
||||
}
|
||||
Err(response) => response,
|
||||
},
|
||||
@ -206,7 +206,7 @@ pub async fn license_list() -> HttpResponse {
|
||||
name: l.name,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
HttpResponse::Ok().json(licenses)
|
||||
HttpResponse::Ok().json(&licenses)
|
||||
}
|
||||
Err(response) => response,
|
||||
}
|
||||
@ -220,7 +220,7 @@ pub struct LicenseText {
|
||||
|
||||
#[get("license/{id}")]
|
||||
pub async fn license_text(
|
||||
params: web::Path<(String,)>,
|
||||
params: web::types::Path<(String,)>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let license = v3::tags::license_text(params)
|
||||
.await
|
||||
@ -231,7 +231,7 @@ pub async fn license_text(
|
||||
match v2_reroute::extract_ok_json::<v3::tags::LicenseText>(license)
|
||||
.await
|
||||
{
|
||||
Ok(license) => HttpResponse::Ok().json(LicenseText {
|
||||
Ok(license) => HttpResponse::Ok().json(&LicenseText {
|
||||
title: license.title,
|
||||
body: license.body,
|
||||
}),
|
||||
@ -251,8 +251,8 @@ pub struct DonationPlatformQueryData {
|
||||
|
||||
#[get("donation_platform")]
|
||||
pub async fn donation_platform_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::tags::link_platform_list(pool, redis).await?;
|
||||
|
||||
@ -287,7 +287,7 @@ pub async fn donation_platform_list(
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
HttpResponse::Ok().json(platforms)
|
||||
HttpResponse::Ok().json(&platforms)
|
||||
}
|
||||
Err(response) => response,
|
||||
},
|
||||
@ -297,8 +297,8 @@ pub async fn donation_platform_list(
|
||||
|
||||
#[get("report_type")]
|
||||
pub async fn report_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// This returns a list of strings directly, so we don't need to convert to v2 format.
|
||||
v3::tags::report_type_list(pool, redis)
|
||||
@ -308,8 +308,8 @@ pub async fn report_type_list(
|
||||
|
||||
#[get("project_type")]
|
||||
pub async fn project_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// This returns a list of strings directly, so we don't need to convert to v2 format.
|
||||
v3::tags::project_type_list(pool, redis)
|
||||
@ -328,5 +328,5 @@ pub async fn side_type_list() -> Result<HttpResponse, ApiError> {
|
||||
LegacySideType::Unknown,
|
||||
];
|
||||
let side_types = side_types.iter().map(|s| s.to_string()).collect_vec();
|
||||
Ok(HttpResponse::Ok().json(side_types))
|
||||
Ok(HttpResponse::Ok().json(&side_types))
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@ use crate::models::users::UserId;
|
||||
use crate::models::v2::teams::LegacyTeamMember;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@ -33,10 +33,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[get("{id}/members")]
|
||||
pub async fn team_members_get_project(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::teams::team_members_get_project(
|
||||
req,
|
||||
@ -54,7 +54,7 @@ pub async fn team_members_get_project(
|
||||
.into_iter()
|
||||
.map(LegacyTeamMember::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(members))
|
||||
Ok(HttpResponse::Ok().json(&members))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -64,10 +64,10 @@ pub async fn team_members_get_project(
|
||||
#[get("{id}/members")]
|
||||
pub async fn team_members_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response =
|
||||
v3::teams::team_members_get(req, info, pool, redis, session_queue)
|
||||
@ -80,7 +80,7 @@ pub async fn team_members_get(
|
||||
.into_iter()
|
||||
.map(LegacyTeamMember::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(members))
|
||||
Ok(HttpResponse::Ok().json(&members))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -94,14 +94,14 @@ pub struct TeamIds {
|
||||
#[get("teams")]
|
||||
pub async fn teams_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<TeamIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<TeamIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::teams::teams_get(
|
||||
req,
|
||||
web::Query(v3::teams::TeamIds { ids: ids.ids }),
|
||||
web::types::Query(v3::teams::TeamIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
@ -120,7 +120,7 @@ pub async fn teams_get(
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(members))
|
||||
Ok(HttpResponse::Ok().json(&members))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -129,10 +129,10 @@ pub async fn teams_get(
|
||||
#[post("{id}/join")]
|
||||
pub async fn join_team(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::join_team(req, info, pool, redis, session_queue)
|
||||
@ -167,18 +167,18 @@ pub struct NewTeamMember {
|
||||
#[post("{id}/members")]
|
||||
pub async fn add_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_member: web::Json<NewTeamMember>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
new_member: web::types::Json<NewTeamMember>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::add_team_member(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::teams::NewTeamMember {
|
||||
web::types::Json(v3::teams::NewTeamMember {
|
||||
user_id: new_member.user_id,
|
||||
role: new_member.role.clone(),
|
||||
permissions: new_member.permissions,
|
||||
@ -205,18 +205,18 @@ pub struct EditTeamMember {
|
||||
#[patch("{id}/members/{user_id}")]
|
||||
pub async fn edit_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
edit_member: web::Json<EditTeamMember>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId, UserId)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
edit_member: web::types::Json<EditTeamMember>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::edit_team_member(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::teams::EditTeamMember {
|
||||
web::types::Json(v3::teams::EditTeamMember {
|
||||
permissions: edit_member.permissions,
|
||||
organization_permissions: edit_member.organization_permissions,
|
||||
role: edit_member.role.clone(),
|
||||
@ -238,18 +238,18 @@ pub struct TransferOwnership {
|
||||
#[patch("{id}/owner")]
|
||||
pub async fn transfer_ownership(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_owner: web::Json<TransferOwnership>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
new_owner: web::types::Json<TransferOwnership>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::transfer_ownership(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::teams::TransferOwnership {
|
||||
web::types::Json(v3::teams::TransferOwnership {
|
||||
user_id: new_owner.user_id,
|
||||
}),
|
||||
redis,
|
||||
@ -262,10 +262,10 @@ pub async fn transfer_ownership(
|
||||
#[delete("{id}/members/{user_id}")]
|
||||
pub async fn remove_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId, UserId)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::remove_team_member(req, info, pool, redis, session_queue)
|
||||
|
||||
@ -7,7 +7,7 @@ use crate::models::threads::{MessageBody, Thread, ThreadId};
|
||||
use crate::models::v2::threads::LegacyThread;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, delete, get, post, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -24,10 +24,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[get("{id}")]
|
||||
pub async fn thread_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(ThreadId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
v3::threads::thread_get(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
@ -42,14 +42,14 @@ pub struct ThreadIds {
|
||||
#[get("threads")]
|
||||
pub async fn threads_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ThreadIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ThreadIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::threads::threads_get(
|
||||
req,
|
||||
web::Query(v3::threads::ThreadIds { ids: ids.ids }),
|
||||
web::types::Query(v3::threads::ThreadIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
@ -64,7 +64,7 @@ pub async fn threads_get(
|
||||
.into_iter()
|
||||
.map(LegacyThread::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(threads))
|
||||
Ok(HttpResponse::Ok().json(&threads))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -78,11 +78,11 @@ pub struct NewThreadMessage {
|
||||
#[post("{id}")]
|
||||
pub async fn thread_send_message(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_message: web::Json<NewThreadMessage>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(ThreadId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
new_message: web::types::Json<NewThreadMessage>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_message = new_message.into_inner();
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
@ -90,7 +90,7 @@ pub async fn thread_send_message(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::threads::NewThreadMessage {
|
||||
web::types::Json(v3::threads::NewThreadMessage {
|
||||
body: new_message.body,
|
||||
}),
|
||||
redis,
|
||||
@ -103,11 +103,11 @@ pub async fn thread_send_message(
|
||||
#[delete("{id}")]
|
||||
pub async fn message_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadMessageId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
info: web::types::Path<(ThreadMessageId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::threads::message_delete(
|
||||
|
||||
@ -8,8 +8,8 @@ use crate::models::v2::projects::LegacyProject;
|
||||
use crate::models::v2::user::LegacyUser;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||
use lazy_static::lazy_static;
|
||||
use ntex::web::{self, delete, get, patch, HttpRequest, HttpResponse};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@ -35,9 +35,9 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[get("user")]
|
||||
pub async fn user_auth_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::user_auth_get(req, pool, redis, session_queue)
|
||||
.await
|
||||
@ -47,7 +47,7 @@ pub async fn user_auth_get(
|
||||
match v2_reroute::extract_ok_json::<User>(response).await {
|
||||
Ok(user) => {
|
||||
let user = LegacyUser::from(user);
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
Ok(HttpResponse::Ok().json(&user))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -60,12 +60,12 @@ pub struct UserIds {
|
||||
|
||||
#[get("users")]
|
||||
pub async fn users_get(
|
||||
web::Query(ids): web::Query<UserIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
web::types::Query(ids): web::types::Query<UserIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::users_get(
|
||||
web::Query(v3::users::UserIds { ids: ids.ids }),
|
||||
web::types::Query(v3::users::UserIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
)
|
||||
@ -77,7 +77,7 @@ pub async fn users_get(
|
||||
Ok(users) => {
|
||||
let legacy_users: Vec<LegacyUser> =
|
||||
users.into_iter().map(LegacyUser::from).collect();
|
||||
Ok(HttpResponse::Ok().json(legacy_users))
|
||||
Ok(HttpResponse::Ok().json(&legacy_users))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -85,9 +85,9 @@ pub async fn users_get(
|
||||
|
||||
#[get("{id}")]
|
||||
pub async fn user_get(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::user_get(info, pool, redis)
|
||||
.await
|
||||
@ -97,7 +97,7 @@ pub async fn user_get(
|
||||
match v2_reroute::extract_ok_json::<User>(response).await {
|
||||
Ok(user) => {
|
||||
let user = LegacyUser::from(user);
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
Ok(HttpResponse::Ok().json(&user))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -106,10 +106,10 @@ pub async fn user_get(
|
||||
#[get("{user_id}/projects")]
|
||||
pub async fn projects_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::projects_list(
|
||||
req,
|
||||
@ -125,8 +125,8 @@ pub async fn projects_list(
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects =
|
||||
LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
LegacyProject::from_many(project, &*pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(&legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -162,18 +162,18 @@ pub struct EditUser {
|
||||
#[patch("{id}")]
|
||||
pub async fn user_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
new_user: web::Json<EditUser>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
new_user: web::types::Json<EditUser>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_user = new_user.into_inner();
|
||||
// Returns NoContent, so we don't need to convert to V2
|
||||
v3::users::user_edit(
|
||||
req,
|
||||
info,
|
||||
web::Json(v3::users::EditUser {
|
||||
web::types::Json(v3::users::EditUser {
|
||||
username: new_user.username,
|
||||
bio: new_user.bio,
|
||||
role: new_user.role,
|
||||
@ -197,18 +197,18 @@ pub struct Extension {
|
||||
#[patch("{id}/icon")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn user_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert to V2
|
||||
v3::users::user_icon_edit(
|
||||
web::Query(v3::users::Extension { ext: ext.ext }),
|
||||
web::types::Query(v3::users::Extension { ext: ext.ext }),
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
@ -224,10 +224,10 @@ pub async fn user_icon_edit(
|
||||
#[delete("{id}")]
|
||||
pub async fn user_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert to V2
|
||||
v3::users::user_delete(req, info, pool, redis, session_queue)
|
||||
@ -238,10 +238,10 @@ pub async fn user_delete(
|
||||
#[get("{id}/follows")]
|
||||
pub async fn user_follows(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::user_follows(
|
||||
req,
|
||||
@ -257,8 +257,8 @@ pub async fn user_follows(
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects =
|
||||
LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
LegacyProject::from_many(project, &*pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(&legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -267,10 +267,10 @@ pub async fn user_follows(
|
||||
#[get("{id}/notifications")]
|
||||
pub async fn user_notifications(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response =
|
||||
v3::users::user_notifications(req, info, pool, redis, session_queue)
|
||||
@ -283,7 +283,7 @@ pub async fn user_notifications(
|
||||
.into_iter()
|
||||
.map(LegacyNotification::from)
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(legacy_notifications))
|
||||
Ok(HttpResponse::Ok().json(&legacy_notifications))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
|
||||
@ -13,10 +13,8 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::CreateError;
|
||||
use crate::routes::v3::version_creation;
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::http::header::ContentDisposition;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, post, HttpRequest, HttpResponse};
|
||||
use ntex_multipart::Multipart;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::postgres::PgPool;
|
||||
@ -85,11 +83,11 @@ struct InitialFileData {
|
||||
pub async fn version_create(
|
||||
req: HttpRequest,
|
||||
payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
moderation_queue: Data<AutomatedModerationQueue>,
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
moderation_queue: web::types::State<AutomatedModerationQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let payload = v2_reroute::alter_actix_multipart(
|
||||
payload,
|
||||
@ -272,7 +270,7 @@ pub async fn version_create(
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
Ok(HttpResponse::Ok().json(&v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -281,7 +279,7 @@ pub async fn version_create(
|
||||
// Gets version fields of an example version of a project, if one exists.
|
||||
async fn get_example_version_fields(
|
||||
project_id: Option<ProjectId>,
|
||||
pool: Data<PgPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<Vec<VersionField>>, CreateError> {
|
||||
let project_id = match project_id {
|
||||
@ -290,7 +288,7 @@ async fn get_example_version_fields(
|
||||
};
|
||||
|
||||
let vid =
|
||||
match project_item::Project::get_id(project_id.into(), &**pool, redis)
|
||||
match project_item::Project::get_id(project_id.into(), &*pool, redis)
|
||||
.await?
|
||||
.and_then(|p| p.versions.first().cloned())
|
||||
{
|
||||
@ -299,7 +297,7 @@ async fn get_example_version_fields(
|
||||
};
|
||||
|
||||
let example_version =
|
||||
match version_item::Version::get(vid, &**pool, redis).await? {
|
||||
match version_item::Version::get(vid, &*pool, redis).await? {
|
||||
Some(version) => version,
|
||||
None => return Ok(None),
|
||||
};
|
||||
@ -310,12 +308,12 @@ async fn get_example_version_fields(
|
||||
#[post("{version_id}/file")]
|
||||
pub async fn upload_file_to_version(
|
||||
req: HttpRequest,
|
||||
url_data: web::Path<(VersionId,)>,
|
||||
url_data: web::types::Path<(VersionId,)>,
|
||||
payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
// Returns NoContent, so no need to convert to V2
|
||||
let response = v3::version_creation::upload_file_to_version(
|
||||
|
||||
@ -5,7 +5,7 @@ use crate::models::v2::projects::{LegacyProject, LegacyVersion};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::version_file::HashQuery;
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, delete, get, post, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@ -32,11 +32,11 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[get("{version_id}")]
|
||||
pub async fn get_version_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::version_file::get_version_from_hash(
|
||||
req,
|
||||
@ -53,7 +53,7 @@ pub async fn get_version_from_hash(
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
Ok(HttpResponse::Ok().json(&v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -63,11 +63,11 @@ pub async fn get_version_from_hash(
|
||||
#[get("{version_id}/download")]
|
||||
pub async fn download_version(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns TemporaryRedirect, so no need to convert to V2
|
||||
v3::version_file::download_version(
|
||||
@ -86,11 +86,11 @@ pub async fn download_version(
|
||||
#[delete("{version_id}")]
|
||||
pub async fn delete_file(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert to V2
|
||||
v3::version_file::delete_file(
|
||||
@ -115,12 +115,12 @@ pub struct UpdateData {
|
||||
#[post("{version_id}/update")]
|
||||
pub async fn get_update_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
update_data: web::Json<UpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
update_data: web::types::Json<UpdateData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let update_data = update_data.into_inner();
|
||||
let mut loader_fields = HashMap::new();
|
||||
@ -143,7 +143,7 @@ pub async fn get_update_from_hash(
|
||||
pool,
|
||||
redis,
|
||||
hash_query,
|
||||
web::Json(update_data),
|
||||
web::types::Json(update_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
@ -153,7 +153,7 @@ pub async fn get_update_from_hash(
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
Ok(HttpResponse::Ok().json(&v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -170,10 +170,10 @@ pub struct FileHashes {
|
||||
#[post("")]
|
||||
pub async fn get_versions_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_data: web::types::Json<FileHashes>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let file_data = file_data.into_inner();
|
||||
let file_data = v3::version_file::FileHashes {
|
||||
@ -184,7 +184,7 @@ pub async fn get_versions_from_hashes(
|
||||
req,
|
||||
pool,
|
||||
redis,
|
||||
web::Json(file_data),
|
||||
web::types::Json(file_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
@ -202,7 +202,7 @@ pub async fn get_versions_from_hashes(
|
||||
(hash, v2_version)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
Ok(HttpResponse::Ok().json(v2_versions))
|
||||
Ok(HttpResponse::Ok().json(&v2_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -211,10 +211,10 @@ pub async fn get_versions_from_hashes(
|
||||
#[post("project")]
|
||||
pub async fn get_projects_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_data: web::types::Json<FileHashes>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let file_data = file_data.into_inner();
|
||||
let file_data = v3::version_file::FileHashes {
|
||||
@ -225,7 +225,7 @@ pub async fn get_projects_from_hashes(
|
||||
req,
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
web::Json(file_data),
|
||||
web::types::Json(file_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
@ -245,7 +245,7 @@ pub async fn get_projects_from_hashes(
|
||||
.collect::<HashMap<_, _>>();
|
||||
let legacy_projects = LegacyProject::from_many(
|
||||
projects_hashes.into_values().collect(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -260,7 +260,7 @@ pub async fn get_projects_from_hashes(
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(legacy_projects_hashes))
|
||||
Ok(HttpResponse::Ok().json(&legacy_projects_hashes))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -277,9 +277,9 @@ pub struct ManyUpdateData {
|
||||
|
||||
#[post("update")]
|
||||
pub async fn update_files(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyUpdateData>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
update_data: web::types::Json<ManyUpdateData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let update_data = update_data.into_inner();
|
||||
let update_data = v3::version_file::ManyUpdateData {
|
||||
@ -290,10 +290,13 @@ pub async fn update_files(
|
||||
hashes: update_data.hashes,
|
||||
};
|
||||
|
||||
let response =
|
||||
v3::version_file::update_files(pool, redis, web::Json(update_data))
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
let response = v3::version_file::update_files(
|
||||
pool,
|
||||
redis,
|
||||
web::types::Json(update_data),
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response)
|
||||
@ -307,7 +310,7 @@ pub async fn update_files(
|
||||
(hash, v2_version)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
Ok(HttpResponse::Ok().json(v3_versions))
|
||||
Ok(HttpResponse::Ok().json(&v3_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -330,10 +333,10 @@ pub struct ManyFileUpdateData {
|
||||
#[post("update_individual")]
|
||||
pub async fn update_individual_files(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyFileUpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
update_data: web::types::Json<ManyFileUpdateData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let update_data = update_data.into_inner();
|
||||
let update_data = v3::version_file::ManyFileUpdateData {
|
||||
@ -365,7 +368,7 @@ pub async fn update_individual_files(
|
||||
req,
|
||||
pool,
|
||||
redis,
|
||||
web::Json(update_data),
|
||||
web::types::Json(update_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
@ -383,7 +386,7 @@ pub async fn update_individual_files(
|
||||
(hash, v2_version)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
Ok(HttpResponse::Ok().json(v3_versions))
|
||||
Ok(HttpResponse::Ok().json(&v3_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
|
||||
@ -11,21 +11,21 @@ use crate::models::v2::projects::LegacyVersion;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use crate::search::SearchConfig;
|
||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, delete, get, patch, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(versions_get);
|
||||
cfg.service(super::version_creation::version_create);
|
||||
// TODO: fix me
|
||||
// cfg.service(super::version_creation::version_create);
|
||||
|
||||
cfg.service(
|
||||
web::scope("version")
|
||||
.service(version_get)
|
||||
.service(version_delete)
|
||||
.service(version_edit)
|
||||
.service(super::version_creation::upload_file_to_version),
|
||||
.service(version_edit), // .service(super::version_creation::upload_file_to_version),
|
||||
);
|
||||
}
|
||||
|
||||
@ -42,11 +42,11 @@ pub struct VersionListFilters {
|
||||
#[get("version")]
|
||||
pub async fn version_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
web::Query(filters): web::Query<VersionListFilters>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
web::types::Query(filters): web::types::Query<VersionListFilters>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let loaders = if let Some(loaders) = filters.loaders {
|
||||
if let Ok(mut loaders) = serde_json::from_str::<Vec<String>>(&loaders) {
|
||||
@ -100,7 +100,7 @@ pub async fn version_list(
|
||||
let response = v3::versions::version_list(
|
||||
req,
|
||||
info,
|
||||
web::Query(filters),
|
||||
web::types::Query(filters),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
@ -115,7 +115,7 @@ pub async fn version_list(
|
||||
.into_iter()
|
||||
.map(LegacyVersion::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(v2_versions))
|
||||
Ok(HttpResponse::Ok().json(&v2_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -125,10 +125,10 @@ pub async fn version_list(
|
||||
#[get("version/{slug}")]
|
||||
pub async fn version_project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String, String)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner();
|
||||
let response = v3::versions::version_project_get_helper(
|
||||
@ -144,7 +144,7 @@ pub async fn version_project_get(
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
Ok(HttpResponse::Ok().json(&v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -158,15 +158,15 @@ pub struct VersionIds {
|
||||
#[get("versions")]
|
||||
pub async fn versions_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<VersionIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<VersionIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = v3::versions::VersionIds { ids: ids.ids };
|
||||
let response = v3::versions::versions_get(
|
||||
req,
|
||||
web::Query(ids),
|
||||
web::types::Query(ids),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
@ -181,7 +181,7 @@ pub async fn versions_get(
|
||||
.into_iter()
|
||||
.map(LegacyVersion::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(v2_versions))
|
||||
Ok(HttpResponse::Ok().json(&v2_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -190,10 +190,10 @@ pub async fn versions_get(
|
||||
#[get("{version_id}")]
|
||||
pub async fn version_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(models::ids::VersionId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
let response =
|
||||
@ -204,7 +204,7 @@ pub async fn version_get(
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
Ok(HttpResponse::Ok().json(&v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
@ -248,11 +248,11 @@ pub struct EditVersionFileType {
|
||||
#[patch("{id}")]
|
||||
pub async fn version_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
new_version: web::Json<EditVersion>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(VersionId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
new_version: web::types::Json<EditVersion>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_version = new_version.into_inner();
|
||||
|
||||
@ -322,7 +322,7 @@ pub async fn version_edit(
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
web::Json(serde_json::to_value(new_version)?),
|
||||
web::types::Json(serde_json::to_value(new_version)?),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
@ -333,11 +333,11 @@ pub async fn version_edit(
|
||||
#[delete("{version_id}")]
|
||||
pub async fn version_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
info: web::types::Path<(VersionId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
search_config: web::types::State<SearchConfig>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::versions::version_delete(
|
||||
|
||||
@ -1,17 +1,15 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::v3::project_creation::CreateError;
|
||||
// use super::v3::project_creation::CreateError;
|
||||
use super::ApiError;
|
||||
use crate::models::v2::projects::LegacySideType;
|
||||
use crate::util::actix::{
|
||||
generate_multipart, MultipartSegment, MultipartSegmentData,
|
||||
};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::http::header::{
|
||||
ContentDisposition, HeaderMap, TryIntoHeaderPair,
|
||||
};
|
||||
use actix_web::HttpResponse;
|
||||
use futures::{stream, Future, StreamExt};
|
||||
use ntex::http::header::HeaderMap;
|
||||
use ntex::web::HttpResponse;
|
||||
use ntex_multipart::Multipart;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
pub async fn extract_ok_json<T>(
|
||||
@ -21,21 +19,24 @@ where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
// If the response is StatusCode::OK, parse the json and return it
|
||||
if response.status() == actix_web::http::StatusCode::OK {
|
||||
if response.status() == ntex::http::StatusCode::OK {
|
||||
let failure_http_response = || {
|
||||
HttpResponse::InternalServerError().json(json!({
|
||||
HttpResponse::InternalServerError().json(&json!({
|
||||
"error": "reroute_error",
|
||||
"description": "Could not parse response from V2 redirection of route."
|
||||
}))
|
||||
};
|
||||
|
||||
// TODO: fix me
|
||||
Err(failure_http_response())
|
||||
// Takes json out of HttpResponse, mutates it, then regenerates the HttpResponse
|
||||
let body = response.into_body();
|
||||
let bytes = actix_web::body::to_bytes(body)
|
||||
.await
|
||||
.map_err(|_| failure_http_response())?;
|
||||
let json_value: T = serde_json::from_slice(&bytes)
|
||||
.map_err(|_| failure_http_response())?;
|
||||
Ok(json_value)
|
||||
// let body = response.into_body();
|
||||
// let bytes = actix_web::body::to_bytes(body)
|
||||
// .await
|
||||
// .map_err(|_| failure_http_response())?;
|
||||
// let json_value: T = serde_json::from_slice(&bytes)
|
||||
// .map_err(|_| failure_http_response())?;
|
||||
// Ok(json_value)
|
||||
} else {
|
||||
Err(response)
|
||||
}
|
||||
@ -55,115 +56,115 @@ pub fn flatten_404_error(res: ApiError) -> Result<HttpResponse, ApiError> {
|
||||
// 1. A json segment
|
||||
// 2. Any number of other binary segments
|
||||
// 'closure' is called with the json value, and the content disposition of the other segments
|
||||
pub async fn alter_actix_multipart<T, U, Fut>(
|
||||
mut multipart: Multipart,
|
||||
mut headers: HeaderMap,
|
||||
mut closure: impl FnMut(T, Vec<ContentDisposition>) -> Fut,
|
||||
) -> Result<Multipart, CreateError>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
U: serde::Serialize,
|
||||
Fut: Future<Output = Result<U, CreateError>>,
|
||||
{
|
||||
let mut segments: Vec<MultipartSegment> = Vec::new();
|
||||
|
||||
let mut json = None;
|
||||
let mut json_segment = None;
|
||||
let mut content_dispositions = Vec::new();
|
||||
|
||||
if let Some(field) = multipart.next().await {
|
||||
let mut field = field?;
|
||||
let content_disposition = field.content_disposition().clone();
|
||||
let field_name = content_disposition.get_name().unwrap_or("");
|
||||
let field_filename = content_disposition.get_filename();
|
||||
let field_content_type = field.content_type();
|
||||
let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
while let Some(chunk) = field.next().await {
|
||||
let data = chunk?;
|
||||
buffer.extend_from_slice(&data);
|
||||
}
|
||||
|
||||
{
|
||||
let json_value: T = serde_json::from_slice(&buffer)?;
|
||||
json = Some(json_value);
|
||||
}
|
||||
|
||||
json_segment = Some(MultipartSegment {
|
||||
name: field_name.to_string(),
|
||||
filename: field_filename.map(|s| s.to_string()),
|
||||
content_type: field_content_type,
|
||||
data: MultipartSegmentData::Binary(vec![]), // Initialize to empty, will be finished after
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(field) = multipart.next().await {
|
||||
let mut field = field?;
|
||||
let content_disposition = field.content_disposition().clone();
|
||||
let field_name = content_disposition.get_name().unwrap_or("");
|
||||
let field_filename = content_disposition.get_filename();
|
||||
let field_content_type = field.content_type();
|
||||
let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
while let Some(chunk) = field.next().await {
|
||||
let data = chunk?;
|
||||
buffer.extend_from_slice(&data);
|
||||
}
|
||||
|
||||
content_dispositions.push(content_disposition.clone());
|
||||
segments.push(MultipartSegment {
|
||||
name: field_name.to_string(),
|
||||
filename: field_filename.map(|s| s.to_string()),
|
||||
content_type: field_content_type,
|
||||
data: MultipartSegmentData::Binary(buffer),
|
||||
})
|
||||
}
|
||||
|
||||
// Finishes the json segment, with aggregated content dispositions
|
||||
{
|
||||
let json_value = json.ok_or(CreateError::InvalidInput(
|
||||
"No json segment found in multipart.".to_string(),
|
||||
))?;
|
||||
let mut json_segment =
|
||||
json_segment.ok_or(CreateError::InvalidInput(
|
||||
"No json segment found in multipart.".to_string(),
|
||||
))?;
|
||||
|
||||
// Call closure, with the json value and names of the other segments
|
||||
let json_value: U = closure(json_value, content_dispositions).await?;
|
||||
let buffer = serde_json::to_vec(&json_value)?;
|
||||
json_segment.data = MultipartSegmentData::Binary(buffer);
|
||||
|
||||
// Insert the json segment at the beginning
|
||||
segments.insert(0, json_segment);
|
||||
}
|
||||
|
||||
let (boundary, payload) = generate_multipart(segments);
|
||||
|
||||
match (
|
||||
"Content-Type",
|
||||
format!("multipart/form-data; boundary={}", boundary).as_str(),
|
||||
)
|
||||
.try_into_pair()
|
||||
{
|
||||
Ok((key, value)) => {
|
||||
headers.insert(key, value);
|
||||
}
|
||||
Err(err) => {
|
||||
CreateError::InvalidInput(format!(
|
||||
"Error inserting test header: {:?}.",
|
||||
err
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let new_multipart =
|
||||
Multipart::new(&headers, stream::once(async { Ok(payload) }));
|
||||
|
||||
Ok(new_multipart)
|
||||
}
|
||||
// pub async fn alter_actix_multipart<T, U, Fut>(
|
||||
// mut multipart: Multipart,
|
||||
// mut headers: HeaderMap,
|
||||
// mut closure: impl FnMut(T, Vec<ContentDisposition>) -> Fut,
|
||||
// ) -> Result<Multipart, CreateError>
|
||||
// where
|
||||
// T: serde::de::DeserializeOwned,
|
||||
// U: serde::Serialize,
|
||||
// Fut: Future<Output = Result<U, CreateError>>,
|
||||
// {
|
||||
// let mut segments: Vec<MultipartSegment> = Vec::new();
|
||||
//
|
||||
// let mut json = None;
|
||||
// let mut json_segment = None;
|
||||
// let mut content_dispositions = Vec::new();
|
||||
//
|
||||
// if let Some(field) = multipart.next().await {
|
||||
// let mut field = field?;
|
||||
// let content_disposition = field.content_disposition().clone();
|
||||
// let field_name = content_disposition.get_name().unwrap_or("");
|
||||
// let field_filename = content_disposition.get_filename();
|
||||
// let field_content_type = field.content_type();
|
||||
// let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||
//
|
||||
// let mut buffer = Vec::new();
|
||||
// while let Some(chunk) = field.next().await {
|
||||
// let data = chunk?;
|
||||
// buffer.extend_from_slice(&data);
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// let json_value: T = serde_json::from_slice(&buffer)?;
|
||||
// json = Some(json_value);
|
||||
// }
|
||||
//
|
||||
// json_segment = Some(MultipartSegment {
|
||||
// name: field_name.to_string(),
|
||||
// filename: field_filename.map(|s| s.to_string()),
|
||||
// content_type: field_content_type,
|
||||
// data: MultipartSegmentData::Binary(vec![]), // Initialize to empty, will be finished after
|
||||
// });
|
||||
// }
|
||||
//
|
||||
// while let Some(field) = multipart.next().await {
|
||||
// let mut field = field?;
|
||||
// let content_disposition = field.content_disposition().clone();
|
||||
// let field_name = content_disposition.get_name().unwrap_or("");
|
||||
// let field_filename = content_disposition.get_filename();
|
||||
// let field_content_type = field.content_type();
|
||||
// let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||
//
|
||||
// let mut buffer = Vec::new();
|
||||
// while let Some(chunk) = field.next().await {
|
||||
// let data = chunk?;
|
||||
// buffer.extend_from_slice(&data);
|
||||
// }
|
||||
//
|
||||
// content_dispositions.push(content_disposition.clone());
|
||||
// segments.push(MultipartSegment {
|
||||
// name: field_name.to_string(),
|
||||
// filename: field_filename.map(|s| s.to_string()),
|
||||
// content_type: field_content_type,
|
||||
// data: MultipartSegmentData::Binary(buffer),
|
||||
// })
|
||||
// }
|
||||
//
|
||||
// // Finishes the json segment, with aggregated content dispositions
|
||||
// {
|
||||
// let json_value = json.ok_or(CreateError::InvalidInput(
|
||||
// "No json segment found in multipart.".to_string(),
|
||||
// ))?;
|
||||
// let mut json_segment =
|
||||
// json_segment.ok_or(CreateError::InvalidInput(
|
||||
// "No json segment found in multipart.".to_string(),
|
||||
// ))?;
|
||||
//
|
||||
// // Call closure, with the json value and names of the other segments
|
||||
// let json_value: U = closure(json_value, content_dispositions).await?;
|
||||
// let buffer = serde_json::to_vec(&json_value)?;
|
||||
// json_segment.data = MultipartSegmentData::Binary(buffer);
|
||||
//
|
||||
// // Insert the json segment at the beginning
|
||||
// segments.insert(0, json_segment);
|
||||
// }
|
||||
//
|
||||
// let (boundary, payload) = generate_multipart(segments);
|
||||
//
|
||||
// match (
|
||||
// "Content-Type",
|
||||
// format!("multipart/form-data; boundary={}", boundary).as_str(),
|
||||
// )
|
||||
// .try_into_pair()
|
||||
// {
|
||||
// Ok((key, value)) => {
|
||||
// headers.insert(key, value);
|
||||
// }
|
||||
// Err(err) => {
|
||||
// CreateError::InvalidInput(format!(
|
||||
// "Error inserting test header: {:?}.",
|
||||
// err
|
||||
// ));
|
||||
// }
|
||||
// };
|
||||
//
|
||||
// let new_multipart =
|
||||
// Multipart::new(&headers, stream::once(async { Ok(payload) }));
|
||||
//
|
||||
// Ok(new_multipart)
|
||||
// }
|
||||
|
||||
// Converts a "client_side" and "server_side" pair into the new v3 corresponding fields
|
||||
pub fn convert_side_types_v3(
|
||||
|
||||
@ -11,8 +11,8 @@ use crate::{
|
||||
},
|
||||
queue::session::AuthQueue,
|
||||
};
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::types::PgInterval;
|
||||
use sqlx::PgPool;
|
||||
@ -69,15 +69,15 @@ pub struct FetchedPlaytime {
|
||||
}
|
||||
pub async fn playtimes_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
clickhouse: web::types::State<clickhouse::Client>,
|
||||
data: web::types::Query<GetData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
@ -107,7 +107,7 @@ pub async fn playtimes_get(
|
||||
start_date,
|
||||
end_date,
|
||||
resolution_minutes,
|
||||
clickhouse.into_inner(),
|
||||
&*clickhouse,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -122,7 +122,7 @@ pub async fn playtimes_get(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
Ok(HttpResponse::Ok().json(&hm))
|
||||
}
|
||||
|
||||
/// Get view data for a set of projects or versions
|
||||
@ -136,15 +136,15 @@ pub async fn playtimes_get(
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
pub async fn views_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
clickhouse: web::types::State<clickhouse::Client>,
|
||||
data: web::types::Query<GetData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
@ -174,7 +174,7 @@ pub async fn views_get(
|
||||
start_date,
|
||||
end_date,
|
||||
resolution_minutes,
|
||||
clickhouse.into_inner(),
|
||||
&*clickhouse,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -189,7 +189,7 @@ pub async fn views_get(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
Ok(HttpResponse::Ok().json(&hm))
|
||||
}
|
||||
|
||||
/// Get download data for a set of projects or versions
|
||||
@ -203,15 +203,15 @@ pub async fn views_get(
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
pub async fn downloads_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
clickhouse: web::types::State<clickhouse::Client>,
|
||||
data: web::types::Query<GetData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
@ -242,7 +242,7 @@ pub async fn downloads_get(
|
||||
start_date,
|
||||
end_date,
|
||||
resolution_minutes,
|
||||
clickhouse.into_inner(),
|
||||
&*clickhouse,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -257,7 +257,7 @@ pub async fn downloads_get(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
Ok(HttpResponse::Ok().json(&hm))
|
||||
}
|
||||
|
||||
/// Get payout data for a set of projects
|
||||
@ -271,14 +271,14 @@ pub async fn downloads_get(
|
||||
/// ONLY project IDs can be used. Unauthorized projects will be filtered out.
|
||||
pub async fn revenue_get(
|
||||
req: HttpRequest,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
data: web::types::Query<GetData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_READ]),
|
||||
@ -345,7 +345,7 @@ pub async fn revenue_get(
|
||||
end_date,
|
||||
duration,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.fetch_all(&*pool)
|
||||
.await?.into_iter().map(|x| PayoutValue {
|
||||
mod_id: x.mod_id,
|
||||
amount_sum: x.amount_sum,
|
||||
@ -364,7 +364,7 @@ pub async fn revenue_get(
|
||||
end_date,
|
||||
duration,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.fetch_all(&*pool)
|
||||
.await?.into_iter().map(|x| PayoutValue {
|
||||
mod_id: x.mod_id,
|
||||
amount_sum: x.amount_sum,
|
||||
@ -392,7 +392,7 @@ pub async fn revenue_get(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
Ok(HttpResponse::Ok().json(&hm))
|
||||
}
|
||||
|
||||
/// Get country data for a set of projects or versions
|
||||
@ -409,15 +409,15 @@ pub async fn revenue_get(
|
||||
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
|
||||
pub async fn countries_downloads_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
clickhouse: web::types::State<clickhouse::Client>,
|
||||
data: web::types::Query<GetData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
@ -445,7 +445,7 @@ pub async fn countries_downloads_get(
|
||||
project_ids.unwrap_or_default(),
|
||||
start_date,
|
||||
end_date,
|
||||
clickhouse.into_inner(),
|
||||
&*clickhouse,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -465,7 +465,7 @@ pub async fn countries_downloads_get(
|
||||
.map(|(key, value)| (key, condense_countries(value)))
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
Ok(HttpResponse::Ok().json(&hm))
|
||||
}
|
||||
|
||||
/// Get country data for a set of projects or versions
|
||||
@ -482,15 +482,15 @@ pub async fn countries_downloads_get(
|
||||
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
|
||||
pub async fn countries_views_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
clickhouse: web::types::State<clickhouse::Client>,
|
||||
data: web::types::Query<GetData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
@ -518,7 +518,7 @@ pub async fn countries_views_get(
|
||||
project_ids.unwrap_or_default(),
|
||||
start_date,
|
||||
end_date,
|
||||
clickhouse.into_inner(),
|
||||
&*clickhouse,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -538,7 +538,7 @@ pub async fn countries_views_get(
|
||||
.map(|(key, value)| (key, condense_countries(value)))
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
Ok(HttpResponse::Ok().json(&hm))
|
||||
}
|
||||
|
||||
fn condense_countries(countries: HashMap<String, u64>) -> HashMap<String, u64> {
|
||||
@ -561,14 +561,14 @@ fn condense_countries(countries: HashMap<String, u64>) -> HashMap<String, u64> {
|
||||
async fn filter_allowed_ids(
|
||||
mut project_ids: Option<Vec<String>>,
|
||||
user: crate::models::users::User,
|
||||
pool: &web::Data<PgPool>,
|
||||
pool: &web::types::State<PgPool>,
|
||||
redis: &RedisPool,
|
||||
remove_defaults: Option<bool>,
|
||||
) -> Result<Option<Vec<ProjectId>>, ApiError> {
|
||||
// If no project_ids or version_ids are provided, we default to all projects the user has *public* access to
|
||||
if project_ids.is_none() && !remove_defaults.unwrap_or(false) {
|
||||
project_ids = Some(
|
||||
user_item::User::get_projects(user.id.into(), &***pool, redis)
|
||||
user_item::User::get_projects(user.id.into(), &**pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| ProjectId::from(x).to_string())
|
||||
@ -581,7 +581,7 @@ async fn filter_allowed_ids(
|
||||
let project_ids = if let Some(project_strings) = project_ids {
|
||||
let projects_data = database::models::Project::get_many(
|
||||
&project_strings,
|
||||
&***pool,
|
||||
&**pool,
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
@ -592,7 +592,7 @@ async fn filter_allowed_ids(
|
||||
.collect::<Vec<database::models::TeamId>>();
|
||||
let team_members =
|
||||
database::models::TeamMember::get_from_team_full_many(
|
||||
&team_ids, &***pool, redis,
|
||||
&team_ids, &**pool, redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -602,7 +602,7 @@ async fn filter_allowed_ids(
|
||||
.collect::<Vec<database::models::OrganizationId>>();
|
||||
let organizations = database::models::Organization::get_many_ids(
|
||||
&organization_ids,
|
||||
&***pool,
|
||||
&**pool,
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
@ -614,7 +614,7 @@ async fn filter_allowed_ids(
|
||||
let organization_team_members =
|
||||
database::models::TeamMember::get_from_team_full_many(
|
||||
&organization_team_ids,
|
||||
&***pool,
|
||||
&**pool,
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -10,16 +10,14 @@ use crate::models::ids::base62_impl::parse_base62;
|
||||
use crate::models::ids::{CollectionId, ProjectId};
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::CreateError;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::img::delete_old_images;
|
||||
use crate::util::routes::read_from_payload;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use crate::{database, models};
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use itertools::Itertools;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
@ -58,17 +56,17 @@ pub struct CollectionCreateData {
|
||||
|
||||
pub async fn collection_create(
|
||||
req: HttpRequest,
|
||||
collection_create_data: web::Json<CollectionCreateData>,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
collection_create_data: web::types::Json<CollectionCreateData>,
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let collection_create_data = collection_create_data.into_inner();
|
||||
|
||||
// The currently logged in user
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**client,
|
||||
&*client,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_CREATE]),
|
||||
@ -77,7 +75,7 @@ pub async fn collection_create(
|
||||
.1;
|
||||
|
||||
collection_create_data.validate().map_err(|err| {
|
||||
CreateError::InvalidInput(validation_errors_to_string(err, None))
|
||||
ApiError::InvalidInput(validation_errors_to_string(err, None))
|
||||
})?;
|
||||
|
||||
let mut transaction = client.begin().await?;
|
||||
@ -126,7 +124,7 @@ pub async fn collection_create(
|
||||
};
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -135,10 +133,10 @@ pub struct CollectionIds {
|
||||
}
|
||||
pub async fn collections_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<CollectionIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<CollectionIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
||||
let ids = ids
|
||||
@ -149,11 +147,11 @@ pub async fn collections_get(
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let collections_data =
|
||||
database::models::Collection::get_many(&ids, &**pool, &redis).await?;
|
||||
database::models::Collection::get_many(&ids, &*pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_READ]),
|
||||
@ -165,24 +163,24 @@ pub async fn collections_get(
|
||||
let collections =
|
||||
filter_visible_collections(collections_data, &user_option).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(collections))
|
||||
Ok(HttpResponse::Ok().json(&collections))
|
||||
}
|
||||
|
||||
pub async fn collection_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection_data =
|
||||
database::models::Collection::get(id, &**pool, &redis).await?;
|
||||
database::models::Collection::get(id, &*pool, &redis).await?;
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_READ]),
|
||||
@ -193,7 +191,7 @@ pub async fn collection_get(
|
||||
|
||||
if let Some(data) = collection_data {
|
||||
if is_visible_collection(&data, &user_option).await? {
|
||||
return Ok(HttpResponse::Ok().json(Collection::from(data)));
|
||||
return Ok(HttpResponse::Ok().json(&Collection::from(data)));
|
||||
}
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
@ -220,15 +218,15 @@ pub struct EditCollection {
|
||||
|
||||
pub async fn collection_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_collection: web::Json<EditCollection>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
new_collection: web::types::Json<EditCollection>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_WRITE]),
|
||||
@ -242,7 +240,7 @@ pub async fn collection_edit(
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let result = database::models::Collection::get(id, &**pool, &redis).await?;
|
||||
let result = database::models::Collection::get(id, &*pool, &redis).await?;
|
||||
|
||||
if let Some(collection_item) = result {
|
||||
if !can_modify_collection(&collection_item, &user) {
|
||||
@ -323,7 +321,7 @@ pub async fn collection_edit(
|
||||
let mut validated_project_ids = Vec::new();
|
||||
for project_id in new_project_ids {
|
||||
let project =
|
||||
database::models::Project::get(project_id, &**pool, &redis)
|
||||
database::models::Project::get(project_id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
@ -374,18 +372,18 @@ pub struct Extension {
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn collection_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_WRITE]),
|
||||
@ -395,14 +393,13 @@ pub async fn collection_icon_edit(
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection_item =
|
||||
database::models::Collection::get(id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The specified collection does not exist!".to_string(),
|
||||
)
|
||||
})?;
|
||||
let collection_item = database::models::Collection::get(id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The specified collection does not exist!".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
if !can_modify_collection(&collection_item, &user) {
|
||||
return Ok(HttpResponse::Unauthorized().body(""));
|
||||
@ -411,7 +408,7 @@ pub async fn collection_icon_edit(
|
||||
delete_old_images(
|
||||
collection_item.icon_url,
|
||||
collection_item.raw_icon_url,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -429,7 +426,7 @@ pub async fn collection_icon_edit(
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -458,15 +455,15 @@ pub async fn collection_icon_edit(
|
||||
|
||||
pub async fn delete_collection_icon(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_WRITE]),
|
||||
@ -476,14 +473,13 @@ pub async fn delete_collection_icon(
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection_item =
|
||||
database::models::Collection::get(id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The specified collection does not exist!".to_string(),
|
||||
)
|
||||
})?;
|
||||
let collection_item = database::models::Collection::get(id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The specified collection does not exist!".to_string(),
|
||||
)
|
||||
})?;
|
||||
if !can_modify_collection(&collection_item, &user) {
|
||||
return Ok(HttpResponse::Unauthorized().body(""));
|
||||
}
|
||||
@ -491,7 +487,7 @@ pub async fn delete_collection_icon(
|
||||
delete_old_images(
|
||||
collection_item.icon_url,
|
||||
collection_item.raw_icon_url,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
let mut transaction = pool.begin().await?;
|
||||
@ -516,14 +512,14 @@ pub async fn delete_collection_icon(
|
||||
|
||||
pub async fn collection_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_DELETE]),
|
||||
@ -533,7 +529,7 @@ pub async fn collection_delete(
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection = database::models::Collection::get(id, &**pool, &redis)
|
||||
let collection = database::models::Collection::get(id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
|
||||
@ -7,8 +7,8 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::queue::socket::ActiveSockets;
|
||||
use crate::routes::internal::statuses::{close_socket, ServerToClientMessage};
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use ntex::web::{self, delete, get, post, HttpRequest, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
@ -20,15 +20,15 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[post("friend/{id}")]
|
||||
pub async fn add_friend(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
db: web::Data<ActiveSockets>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
db: web::types::State<ActiveSockets>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
@ -38,7 +38,7 @@ pub async fn add_friend(
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let friend =
|
||||
crate::database::models::User::get(&string, &**pool, &redis).await?;
|
||||
crate::database::models::User::get(&string, &*pool, &redis).await?;
|
||||
|
||||
if let Some(friend) = friend {
|
||||
let mut transaction = pool.begin().await?;
|
||||
@ -47,7 +47,7 @@ pub async fn add_friend(
|
||||
crate::database::models::friend_item::FriendItem::get_friend(
|
||||
user.id.into(),
|
||||
friend.id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
@ -76,22 +76,23 @@ pub async fn add_friend(
|
||||
friend_id: UserId,
|
||||
sockets: &ActiveSockets,
|
||||
) -> Result<(), ApiError> {
|
||||
if let Some(pair) = sockets.auth_sockets.get(&user_id.into()) {
|
||||
let (friend_status, _) = pair.value();
|
||||
if let Some(mut socket) =
|
||||
sockets.auth_sockets.get_mut(&friend_id.into())
|
||||
{
|
||||
let (_, socket) = socket.value_mut();
|
||||
|
||||
let _ = socket
|
||||
.text(serde_json::to_string(
|
||||
&ServerToClientMessage::StatusUpdate {
|
||||
status: friend_status.clone(),
|
||||
},
|
||||
)?)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
// TODO: FIX ME
|
||||
// if let Some(pair) = sockets.auth_sockets.get(&user_id.into()) {
|
||||
// let (friend_status, _) = pair.value();
|
||||
// if let Some(mut socket) =
|
||||
// sockets.auth_sockets.get_mut(&friend_id.into())
|
||||
// {
|
||||
// let (_, socket) = socket.value_mut();
|
||||
//
|
||||
// let _ = socket
|
||||
// .text(serde_json::to_string(
|
||||
// &ServerToClientMessage::StatusUpdate {
|
||||
// status: friend_status.clone(),
|
||||
// },
|
||||
// )?)
|
||||
// .await;
|
||||
// }
|
||||
// }
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -120,20 +121,21 @@ pub async fn add_friend(
|
||||
.insert(&mut transaction)
|
||||
.await?;
|
||||
|
||||
if let Some(mut socket) = db.auth_sockets.get_mut(&friend.id.into())
|
||||
{
|
||||
let (_, socket) = socket.value_mut();
|
||||
|
||||
if socket
|
||||
.text(serde_json::to_string(
|
||||
&ServerToClientMessage::FriendRequest { from: user.id },
|
||||
)?)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
close_socket(user.id, &pool, &db).await?;
|
||||
}
|
||||
}
|
||||
// TODO: FIX ME
|
||||
// if let Some(mut socket) = db.auth_sockets.get_mut(&friend.id.into())
|
||||
// {
|
||||
// let (_, socket) = socket.value_mut();
|
||||
//
|
||||
// if socket
|
||||
// .text(serde_json::to_string(
|
||||
// &ServerToClientMessage::FriendRequest { from: user.id },
|
||||
// )?)
|
||||
// .await
|
||||
// .is_err()
|
||||
// {
|
||||
// close_socket(user.id, &pool, &db).await?;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
@ -147,15 +149,15 @@ pub async fn add_friend(
|
||||
#[delete("friend/{id}")]
|
||||
pub async fn remove_friend(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
db: web::Data<ActiveSockets>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
db: web::types::State<ActiveSockets>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
@ -165,7 +167,7 @@ pub async fn remove_friend(
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let friend =
|
||||
crate::database::models::User::get(&string, &**pool, &redis).await?;
|
||||
crate::database::models::User::get(&string, &*pool, &redis).await?;
|
||||
|
||||
if let Some(friend) = friend {
|
||||
let mut transaction = pool.begin().await?;
|
||||
@ -177,17 +179,18 @@ pub async fn remove_friend(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(mut socket) = db.auth_sockets.get_mut(&friend.id.into()) {
|
||||
let (_, socket) = socket.value_mut();
|
||||
|
||||
let _ = socket
|
||||
.text(serde_json::to_string(
|
||||
&ServerToClientMessage::FriendRequestRejected {
|
||||
from: user.id,
|
||||
},
|
||||
)?)
|
||||
.await;
|
||||
}
|
||||
// TODO: FIX ME
|
||||
// if let Some(mut socket) = db.auth_sockets.get_mut(&friend.id.into()) {
|
||||
// let (_, socket) = socket.value_mut();
|
||||
//
|
||||
// let _ = socket
|
||||
// .text(serde_json::to_string(
|
||||
// &ServerToClientMessage::FriendRequestRejected {
|
||||
// from: user.id,
|
||||
// },
|
||||
// )?)
|
||||
// .await;
|
||||
// }
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
@ -200,13 +203,13 @@ pub async fn remove_friend(
|
||||
#[get("friends")]
|
||||
pub async fn friends(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_READ]),
|
||||
@ -218,12 +221,12 @@ pub async fn friends(
|
||||
crate::database::models::friend_item::FriendItem::get_user_friends(
|
||||
user.id.into(),
|
||||
None,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(UserFriend::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(friends))
|
||||
Ok(HttpResponse::Ok().json(&friends))
|
||||
}
|
||||
|
||||
@ -16,7 +16,7 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::img::upload_image_optimized;
|
||||
use crate::util::routes::read_from_payload;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -41,12 +41,12 @@ pub struct ImageUpload {
|
||||
|
||||
pub async fn images_add(
|
||||
req: HttpRequest,
|
||||
web::Query(data): web::Query<ImageUpload>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(data): web::types::Query<ImageUpload>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::types::Payload,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut context = ImageContext::from_str(&data.context, None);
|
||||
|
||||
@ -54,7 +54,7 @@ pub async fn images_add(
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&scopes),
|
||||
@ -68,7 +68,7 @@ pub async fn images_add(
|
||||
ImageContext::Project { project_id } => {
|
||||
if let Some(id) = data.project_id {
|
||||
let project =
|
||||
project_item::Project::get(&id, &**pool, &redis).await?;
|
||||
project_item::Project::get(&id, &*pool, &redis).await?;
|
||||
if let Some(project) = project {
|
||||
if is_team_member_project(
|
||||
&project.inner,
|
||||
@ -93,7 +93,7 @@ pub async fn images_add(
|
||||
ImageContext::Version { version_id } => {
|
||||
if let Some(id) = data.version_id {
|
||||
let version =
|
||||
version_item::Version::get(id.into(), &**pool, &redis)
|
||||
version_item::Version::get(id.into(), &*pool, &redis)
|
||||
.await?;
|
||||
if let Some(version) = version {
|
||||
if is_team_member_version(
|
||||
@ -120,15 +120,14 @@ pub async fn images_add(
|
||||
ImageContext::ThreadMessage { thread_message_id } => {
|
||||
if let Some(id) = data.thread_message_id {
|
||||
let thread_message =
|
||||
thread_item::ThreadMessage::get(id.into(), &**pool)
|
||||
thread_item::ThreadMessage::get(id.into(), &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The thread message could not found."
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
let thread = thread_item::Thread::get(thread_message.thread_id, &**pool)
|
||||
ApiError::InvalidInput(
|
||||
"The thread message could not found.".to_string(),
|
||||
)
|
||||
})?;
|
||||
let thread = thread_item::Thread::get(thread_message.thread_id, &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -148,14 +147,14 @@ pub async fn images_add(
|
||||
}
|
||||
ImageContext::Report { report_id } => {
|
||||
if let Some(id) = data.report_id {
|
||||
let report = report_item::Report::get(id.into(), &**pool)
|
||||
let report = report_item::Report::get(id.into(), &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The report could not be found.".to_string(),
|
||||
)
|
||||
})?;
|
||||
let thread = thread_item::Thread::get(report.thread_id, &**pool)
|
||||
let thread = thread_item::Thread::get(report.thread_id, &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -193,7 +192,7 @@ pub async fn images_add(
|
||||
&data.ext,
|
||||
None,
|
||||
None,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -255,5 +254,5 @@ pub async fn images_add(
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(image))
|
||||
Ok(HttpResponse::Ok().json(&image))
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
pub use super::ApiError;
|
||||
use crate::util::cors::default_cors;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use ntex::web::{self, HttpResponse};
|
||||
use serde_json::json;
|
||||
|
||||
pub mod analytics_get;
|
||||
@ -10,7 +10,7 @@ pub mod images;
|
||||
pub mod notifications;
|
||||
pub mod organizations;
|
||||
pub mod payouts;
|
||||
pub mod project_creation;
|
||||
// pub mod project_creation;
|
||||
pub mod projects;
|
||||
pub mod reports;
|
||||
pub mod statistics;
|
||||
@ -18,7 +18,7 @@ pub mod tags;
|
||||
pub mod teams;
|
||||
pub mod threads;
|
||||
pub mod users;
|
||||
pub mod version_creation;
|
||||
// pub mod version_creation;
|
||||
pub mod version_file;
|
||||
pub mod versions;
|
||||
|
||||
@ -33,7 +33,7 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
.configure(images::config)
|
||||
.configure(notifications::config)
|
||||
.configure(organizations::config)
|
||||
.configure(project_creation::config)
|
||||
// .configure(project_creation::config)
|
||||
.configure(projects::config)
|
||||
.configure(reports::config)
|
||||
.configure(statistics::config)
|
||||
@ -49,7 +49,7 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
}
|
||||
|
||||
pub async fn hello_world() -> Result<HttpResponse, ApiError> {
|
||||
Ok(HttpResponse::Ok().json(json!({
|
||||
Ok(HttpResponse::Ok().json(&json!({
|
||||
"hello": "world",
|
||||
})))
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@ use crate::models::notifications::Notification;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -30,14 +30,14 @@ pub struct NotificationIds {
|
||||
|
||||
pub async fn notifications_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<NotificationIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_READ]),
|
||||
@ -57,7 +57,7 @@ pub async fn notifications_get(
|
||||
let notifications_data: Vec<DBNotification> =
|
||||
database::models::notification_item::Notification::get_many(
|
||||
¬ification_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -67,19 +67,19 @@ pub async fn notifications_get(
|
||||
.map(Notification::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(notifications))
|
||||
Ok(HttpResponse::Ok().json(¬ifications))
|
||||
}
|
||||
|
||||
pub async fn notification_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(NotificationId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_READ]),
|
||||
@ -92,13 +92,13 @@ pub async fn notification_get(
|
||||
let notification_data =
|
||||
database::models::notification_item::Notification::get(
|
||||
id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(data) = notification_data {
|
||||
if user.id == data.user_id.into() || user.role.is_admin() {
|
||||
Ok(HttpResponse::Ok().json(Notification::from(data)))
|
||||
Ok(HttpResponse::Ok().json(&Notification::from(data)))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -109,14 +109,14 @@ pub async fn notification_get(
|
||||
|
||||
pub async fn notification_read(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(NotificationId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
@ -129,7 +129,7 @@ pub async fn notification_read(
|
||||
let notification_data =
|
||||
database::models::notification_item::Notification::get(
|
||||
id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -159,14 +159,14 @@ pub async fn notification_read(
|
||||
|
||||
pub async fn notification_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(NotificationId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
@ -179,7 +179,7 @@ pub async fn notification_delete(
|
||||
let notification_data =
|
||||
database::models::notification_item::Notification::get(
|
||||
id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -210,14 +210,14 @@ pub async fn notification_delete(
|
||||
|
||||
pub async fn notifications_read(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<NotificationIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
@ -236,7 +236,7 @@ pub async fn notifications_read(
|
||||
let notifications_data =
|
||||
database::models::notification_item::Notification::get_many(
|
||||
¬ification_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -263,14 +263,14 @@ pub async fn notifications_read(
|
||||
|
||||
pub async fn notifications_delete(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<NotificationIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
@ -289,7 +289,7 @@ pub async fn notifications_delete(
|
||||
let notifications_data =
|
||||
database::models::notification_item::Notification::get_many(
|
||||
¬ification_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@ -1,12 +1,8 @@
|
||||
use std::{collections::HashSet, fmt::Display, sync::Arc};
|
||||
|
||||
use actix_web::{
|
||||
delete, get, patch, post,
|
||||
web::{self, scope},
|
||||
HttpRequest, HttpResponse,
|
||||
};
|
||||
use chrono::Utc;
|
||||
use itertools::Itertools;
|
||||
use ntex::web::{self, delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use rand::{distributions::Alphanumeric, Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -31,7 +27,6 @@ use crate::{
|
||||
pats::Scopes,
|
||||
},
|
||||
queue::session::AuthQueue,
|
||||
routes::v3::project_creation::CreateError,
|
||||
util::validate::validation_errors_to_string,
|
||||
};
|
||||
use crate::{
|
||||
@ -49,7 +44,7 @@ use crate::util::img::{delete_old_images, upload_image_optimized};
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
scope("oauth")
|
||||
web::scope("oauth")
|
||||
.configure(crate::auth::oauth::config)
|
||||
.service(revoke_oauth_authorization)
|
||||
.service(oauth_client_create)
|
||||
@ -65,14 +60,14 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
pub async fn get_user_clients(
|
||||
req: HttpRequest,
|
||||
info: web::Path<String>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<String>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -80,7 +75,7 @@ pub async fn get_user_clients(
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let target_user = User::get(&info.into_inner(), &**pool, &redis).await?;
|
||||
let target_user = User::get(&info.into_inner(), &*pool, &redis).await?;
|
||||
|
||||
if let Some(target_user) = target_user {
|
||||
if target_user.id != current_user.id.into()
|
||||
@ -92,14 +87,14 @@ pub async fn get_user_clients(
|
||||
}
|
||||
|
||||
let clients =
|
||||
OAuthClient::get_all_user_clients(target_user.id, &**pool).await?;
|
||||
OAuthClient::get_all_user_clients(target_user.id, &*pool).await?;
|
||||
|
||||
let response = clients
|
||||
.into_iter()
|
||||
.map(models::oauth_clients::OAuthClient::from)
|
||||
.collect_vec();
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -107,12 +102,12 @@ pub async fn get_user_clients(
|
||||
|
||||
#[get("app/{id}")]
|
||||
pub async fn get_client(
|
||||
id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
id: web::types::Path<ApiOAuthClientId>,
|
||||
pool: web::types::State<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let clients = get_clients_inner(&[id.into_inner()], pool).await?;
|
||||
if let Some(client) = clients.into_iter().next() {
|
||||
Ok(HttpResponse::Ok().json(client))
|
||||
Ok(HttpResponse::Ok().json(&client))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -120,8 +115,8 @@ pub async fn get_client(
|
||||
|
||||
#[get("apps")]
|
||||
pub async fn get_clients(
|
||||
info: web::Query<GetOAuthClientsRequest>,
|
||||
pool: web::Data<PgPool>,
|
||||
info: web::types::Query<GetOAuthClientsRequest>,
|
||||
pool: web::types::State<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids: Vec<_> = info
|
||||
.ids
|
||||
@ -131,7 +126,7 @@ pub async fn get_clients(
|
||||
|
||||
let clients = get_clients_inner(&ids, pool).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(clients))
|
||||
Ok(HttpResponse::Ok().json(&clients))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
@ -162,14 +157,14 @@ pub struct NewOAuthApp {
|
||||
#[post("app")]
|
||||
pub async fn oauth_client_create<'a>(
|
||||
req: HttpRequest,
|
||||
new_oauth_app: web::Json<NewOAuthApp>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
new_oauth_app: web::types::Json<NewOAuthApp>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -178,7 +173,7 @@ pub async fn oauth_client_create<'a>(
|
||||
.1;
|
||||
|
||||
new_oauth_app.validate().map_err(|e| {
|
||||
CreateError::ValidationError(validation_errors_to_string(e, None))
|
||||
ApiError::InvalidInput(validation_errors_to_string(e, None))
|
||||
})?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
@ -214,7 +209,7 @@ pub async fn oauth_client_create<'a>(
|
||||
|
||||
let client = models::oauth_clients::OAuthClient::from(client);
|
||||
|
||||
Ok(HttpResponse::Ok().json(OAuthClientCreationResult {
|
||||
Ok(HttpResponse::Ok().json(&OAuthClientCreationResult {
|
||||
client,
|
||||
client_secret,
|
||||
}))
|
||||
@ -223,14 +218,14 @@ pub async fn oauth_client_create<'a>(
|
||||
#[delete("app/{id}")]
|
||||
pub async fn oauth_client_delete<'a>(
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
client_id: web::types::Path<ApiOAuthClientId>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -239,10 +234,10 @@ pub async fn oauth_client_delete<'a>(
|
||||
.1;
|
||||
|
||||
let client =
|
||||
OAuthClient::get(client_id.into_inner().into(), &**pool).await?;
|
||||
OAuthClient::get(client_id.into_inner().into(), &*pool).await?;
|
||||
if let Some(client) = client {
|
||||
client.validate_authorized(Some(¤t_user))?;
|
||||
OAuthClient::remove(client.id, &**pool).await?;
|
||||
OAuthClient::remove(client.id, &*pool).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
@ -279,15 +274,15 @@ pub struct OAuthClientEdit {
|
||||
#[patch("app/{id}")]
|
||||
pub async fn oauth_client_edit(
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
client_updates: web::Json<OAuthClientEdit>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
client_id: web::types::Path<ApiOAuthClientId>,
|
||||
client_updates: web::types::Json<OAuthClientEdit>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -300,7 +295,7 @@ pub async fn oauth_client_edit(
|
||||
})?;
|
||||
|
||||
if let Some(existing_client) =
|
||||
OAuthClient::get(client_id.into_inner().into(), &**pool).await?
|
||||
OAuthClient::get(client_id.into_inner().into(), &*pool).await?
|
||||
{
|
||||
existing_client.validate_authorized(Some(¤t_user))?;
|
||||
|
||||
@ -354,18 +349,18 @@ pub struct Extension {
|
||||
#[patch("app/{id}/icon")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn oauth_client_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
client_id: web::types::Path<ApiOAuthClientId>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -373,7 +368,7 @@ pub async fn oauth_client_icon_edit(
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let client = OAuthClient::get((*client_id).into(), &**pool)
|
||||
let client = OAuthClient::get((*client_id).into(), &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -386,7 +381,7 @@ pub async fn oauth_client_icon_edit(
|
||||
delete_old_images(
|
||||
client.icon_url.clone(),
|
||||
client.raw_icon_url.clone(),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -402,7 +397,7 @@ pub async fn oauth_client_icon_edit(
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -424,15 +419,15 @@ pub async fn oauth_client_icon_edit(
|
||||
#[delete("app/{id}/icon")]
|
||||
pub async fn oauth_client_icon_delete(
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
client_id: web::types::Path<ApiOAuthClientId>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -440,7 +435,7 @@ pub async fn oauth_client_icon_delete(
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let client = OAuthClient::get((*client_id).into(), &**pool)
|
||||
let client = OAuthClient::get((*client_id).into(), &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -452,7 +447,7 @@ pub async fn oauth_client_icon_delete(
|
||||
delete_old_images(
|
||||
client.icon_url.clone(),
|
||||
client.raw_icon_url.clone(),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -473,13 +468,13 @@ pub async fn oauth_client_icon_delete(
|
||||
#[get("authorizations")]
|
||||
pub async fn get_user_oauth_authorizations(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -489,27 +484,27 @@ pub async fn get_user_oauth_authorizations(
|
||||
|
||||
let authorizations = OAuthClientAuthorization::get_all_for_user(
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mapped: Vec<models::oauth_clients::OAuthClientAuthorization> =
|
||||
authorizations.into_iter().map(|a| a.into()).collect_vec();
|
||||
|
||||
Ok(HttpResponse::Ok().json(mapped))
|
||||
Ok(HttpResponse::Ok().json(&mapped))
|
||||
}
|
||||
|
||||
#[delete("authorizations")]
|
||||
pub async fn revoke_oauth_authorization(
|
||||
req: HttpRequest,
|
||||
info: web::Query<DeleteOAuthClientQueryParam>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Query<DeleteOAuthClientQueryParam>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
@ -520,7 +515,7 @@ pub async fn revoke_oauth_authorization(
|
||||
OAuthClientAuthorization::remove(
|
||||
info.client_id.into(),
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -587,10 +582,10 @@ async fn edit_redirects(
|
||||
|
||||
pub async fn get_clients_inner(
|
||||
ids: &[ApiOAuthClientId],
|
||||
pool: web::Data<PgPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
) -> Result<Vec<models::oauth_clients::OAuthClient>, ApiError> {
|
||||
let ids: Vec<OAuthClientId> = ids.iter().map(|i| (*i).into()).collect();
|
||||
let clients = OAuthClient::get_many(&ids, &**pool).await?;
|
||||
let clients = OAuthClient::get_many(&ids, &*pool).await?;
|
||||
|
||||
Ok(clients.into_iter().map(|c| c.into()).collect_vec())
|
||||
}
|
||||
|
||||
@ -15,13 +15,13 @@ use crate::models::organizations::OrganizationId;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::CreateError;
|
||||
// use crate::routes::v3::project_creation::CreateError;
|
||||
use crate::util::img::delete_old_images;
|
||||
use crate::util::routes::read_from_payload;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use crate::{database, models};
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use futures::TryStreamExt;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@ -52,15 +52,15 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
pub async fn organization_projects_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let info = info.into_inner().0;
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_READ, Scopes::PROJECT_READ]),
|
||||
@ -80,14 +80,14 @@ pub async fn organization_projects_get(
|
||||
possible_organization_id.map(|x| x as i64),
|
||||
info
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.fetch(&*pool)
|
||||
.map_ok(|m| database::models::ProjectId(m.id))
|
||||
.try_collect::<Vec<database::models::ProjectId>>()
|
||||
.await?;
|
||||
|
||||
let projects_data = crate::database::models::Project::get_many_ids(
|
||||
&project_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -95,7 +95,7 @@ pub async fn organization_projects_get(
|
||||
let projects =
|
||||
filter_visible_projects(projects_data, ¤t_user, &pool, true)
|
||||
.await?;
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
Ok(HttpResponse::Ok().json(&projects))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
@ -114,14 +114,14 @@ pub struct NewOrganization {
|
||||
|
||||
pub async fn organization_create(
|
||||
req: HttpRequest,
|
||||
new_organization: web::Json<NewOrganization>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
new_organization: web::types::Json<NewOrganization>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_CREATE]),
|
||||
@ -130,7 +130,7 @@ pub async fn organization_create(
|
||||
.1;
|
||||
|
||||
new_organization.validate().map_err(|err| {
|
||||
CreateError::ValidationError(validation_errors_to_string(err, None))
|
||||
ApiError::InvalidInput(validation_errors_to_string(err, None))
|
||||
})?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
@ -150,7 +150,7 @@ pub async fn organization_create(
|
||||
)
|
||||
.await?;
|
||||
if !results.is_empty() {
|
||||
return Err(CreateError::SlugCollision);
|
||||
return Err(ApiError::InvalidInput("Slug collision".to_owned()));
|
||||
}
|
||||
|
||||
let organization_id = generate_organization_id(&mut transaction).await?;
|
||||
@ -185,7 +185,7 @@ pub async fn organization_create(
|
||||
transaction.commit().await?;
|
||||
|
||||
// Only member is the owner, the logged in one
|
||||
let member_data = TeamMember::get_from_team_full(team_id, &**pool, &redis)
|
||||
let member_data = TeamMember::get_from_team_full(team_id, &*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.next();
|
||||
@ -196,7 +196,7 @@ pub async fn organization_create(
|
||||
false,
|
||||
)]
|
||||
} else {
|
||||
return Err(CreateError::InvalidInput(
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Failed to get created team.".to_owned(), // should never happen
|
||||
));
|
||||
};
|
||||
@ -204,20 +204,20 @@ pub async fn organization_create(
|
||||
let organization =
|
||||
models::organizations::Organization::from(organization, members_data);
|
||||
|
||||
Ok(HttpResponse::Ok().json(organization))
|
||||
Ok(HttpResponse::Ok().json(&organization))
|
||||
}
|
||||
|
||||
pub async fn organization_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_READ]),
|
||||
@ -227,15 +227,15 @@ pub async fn organization_get(
|
||||
.ok();
|
||||
let user_id = current_user.as_ref().map(|x| x.id.into());
|
||||
|
||||
let organization_data = Organization::get(&id, &**pool, &redis).await?;
|
||||
let organization_data = Organization::get(&id, &*pool, &redis).await?;
|
||||
if let Some(data) = organization_data {
|
||||
let members_data =
|
||||
TeamMember::get_from_team_full(data.team_id, &**pool, &redis)
|
||||
TeamMember::get_from_team_full(data.team_id, &*pool, &redis)
|
||||
.await?;
|
||||
|
||||
let users = crate::database::models::User::get_many_ids(
|
||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -271,7 +271,7 @@ pub async fn organization_get(
|
||||
|
||||
let organization =
|
||||
models::organizations::Organization::from(data, team_members);
|
||||
return Ok(HttpResponse::Ok().json(organization));
|
||||
return Ok(HttpResponse::Ok().json(&organization));
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -283,31 +283,31 @@ pub struct OrganizationIds {
|
||||
|
||||
pub async fn organizations_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<OrganizationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<OrganizationIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
||||
let organizations_data =
|
||||
Organization::get_many(&ids, &**pool, &redis).await?;
|
||||
Organization::get_many(&ids, &*pool, &redis).await?;
|
||||
let team_ids = organizations_data
|
||||
.iter()
|
||||
.map(|x| x.team_id)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let teams_data =
|
||||
TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
|
||||
TeamMember::get_from_team_full_many(&team_ids, &*pool, &redis).await?;
|
||||
let users = crate::database::models::User::get_many_ids(
|
||||
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_READ]),
|
||||
@ -362,7 +362,7 @@ pub async fn organizations_get(
|
||||
organizations.push(organization);
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(organizations))
|
||||
Ok(HttpResponse::Ok().json(&organizations))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
@ -380,15 +380,15 @@ pub struct OrganizationEdit {
|
||||
|
||||
pub async fn organizations_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
new_organization: web::Json<OrganizationEdit>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
new_organization: web::types::Json<OrganizationEdit>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_WRITE]),
|
||||
@ -402,14 +402,14 @@ pub async fn organizations_edit(
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let result =
|
||||
database::models::Organization::get(&string, &**pool, &redis).await?;
|
||||
database::models::Organization::get(&string, &*pool, &redis).await?;
|
||||
if let Some(organization_item) = result {
|
||||
let id = organization_item.id;
|
||||
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
organization_item.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -544,14 +544,14 @@ pub async fn organizations_edit(
|
||||
|
||||
pub async fn organization_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_DELETE]),
|
||||
@ -561,7 +561,7 @@ pub async fn organization_delete(
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let organization =
|
||||
database::models::Organization::get(&string, &**pool, &redis)
|
||||
database::models::Organization::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -575,7 +575,7 @@ pub async fn organization_delete(
|
||||
organization.id,
|
||||
user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?
|
||||
@ -606,7 +606,7 @@ pub async fn organization_delete(
|
||||
",
|
||||
organization.team_id as database::models::ids::TeamId
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.fetch_one(&*pool)
|
||||
.await?
|
||||
.user_id;
|
||||
let owner_id = database::models::ids::UserId(owner_id);
|
||||
@ -683,16 +683,16 @@ pub struct OrganizationProjectAdd {
|
||||
}
|
||||
pub async fn organization_projects_add(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
project_info: web::Json<OrganizationProjectAdd>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
project_info: web::types::Json<OrganizationProjectAdd>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let info = info.into_inner().0;
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]),
|
||||
@ -701,7 +701,7 @@ pub async fn organization_projects_add(
|
||||
.1;
|
||||
|
||||
let organization =
|
||||
database::models::Organization::get(&info, &**pool, &redis)
|
||||
database::models::Organization::get(&info, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -711,7 +711,7 @@ pub async fn organization_projects_add(
|
||||
|
||||
let project_item = database::models::Project::get(
|
||||
&project_info.project_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -732,7 +732,7 @@ pub async fn organization_projects_add(
|
||||
project_item.inner.id,
|
||||
current_user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@ -745,7 +745,7 @@ pub async fn organization_projects_add(
|
||||
organization.id,
|
||||
current_user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@ -846,16 +846,16 @@ pub struct OrganizationProjectRemoval {
|
||||
|
||||
pub async fn organization_projects_remove(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
data: web::Json<OrganizationProjectRemoval>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String, String)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
data: web::types::Json<OrganizationProjectRemoval>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (organization_id, project_id) = info.into_inner();
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]),
|
||||
@ -864,7 +864,7 @@ pub async fn organization_projects_remove(
|
||||
.1;
|
||||
|
||||
let organization =
|
||||
database::models::Organization::get(&organization_id, &**pool, &redis)
|
||||
database::models::Organization::get(&organization_id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -873,7 +873,7 @@ pub async fn organization_projects_remove(
|
||||
})?;
|
||||
|
||||
let project_item =
|
||||
database::models::Project::get(&project_id, &**pool, &redis)
|
||||
database::models::Project::get(&project_id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -897,7 +897,7 @@ pub async fn organization_projects_remove(
|
||||
organization.id,
|
||||
current_user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@ -917,7 +917,7 @@ pub async fn organization_projects_remove(
|
||||
organization.id,
|
||||
data.new_owner.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@ -933,7 +933,7 @@ pub async fn organization_projects_remove(
|
||||
project_item.inner.id,
|
||||
data.new_owner.into(),
|
||||
true,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1028,18 +1028,18 @@ pub struct Extension {
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn organization_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_WRITE]),
|
||||
@ -1049,7 +1049,7 @@ pub async fn organization_icon_edit(
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let organization_item =
|
||||
database::models::Organization::get(&string, &**pool, &redis)
|
||||
database::models::Organization::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -1061,7 +1061,7 @@ pub async fn organization_icon_edit(
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
organization_item.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
@ -1083,7 +1083,7 @@ pub async fn organization_icon_edit(
|
||||
delete_old_images(
|
||||
organization_item.icon_url,
|
||||
organization_item.raw_icon_url,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1101,7 +1101,7 @@ pub async fn organization_icon_edit(
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1134,15 +1134,15 @@ pub async fn organization_icon_edit(
|
||||
|
||||
pub async fn delete_organization_icon(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_WRITE]),
|
||||
@ -1152,7 +1152,7 @@ pub async fn delete_organization_icon(
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let organization_item =
|
||||
database::models::Organization::get(&string, &**pool, &redis)
|
||||
database::models::Organization::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -1164,7 +1164,7 @@ pub async fn delete_organization_icon(
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
organization_item.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
@ -1186,7 +1186,7 @@ pub async fn delete_organization_icon(
|
||||
delete_old_images(
|
||||
organization_item.icon_url,
|
||||
organization_item.raw_icon_url,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@ -8,10 +8,10 @@ use crate::models::payouts::{PayoutMethodType, PayoutStatus};
|
||||
use crate::queue::payouts::{make_aditude_request, PayoutsQueue};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::{Datelike, Duration, TimeZone, Utc, Weekday};
|
||||
use hex::ToHex;
|
||||
use hmac::{Hmac, Mac, NewMac};
|
||||
use ntex::web::{self, delete, get, post, HttpRequest, HttpResponse};
|
||||
use reqwest::Method;
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -37,9 +37,9 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
#[post("_paypal")]
|
||||
pub async fn paypal_webhook(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
payouts: web::Data<PayoutsQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
payouts: web::types::State<PayoutsQueue>,
|
||||
body: String,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let auth_algo = req
|
||||
@ -191,8 +191,8 @@ pub async fn paypal_webhook(
|
||||
#[post("_tremendous")]
|
||||
pub async fn tremendous_webhook(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
body: String,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let signature = req
|
||||
@ -298,13 +298,13 @@ pub async fn tremendous_webhook(
|
||||
#[get("")]
|
||||
pub async fn user_payouts(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_READ]),
|
||||
@ -315,17 +315,17 @@ pub async fn user_payouts(
|
||||
let payout_ids =
|
||||
crate::database::models::payout_item::Payout::get_all_for_user(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
let payouts = crate::database::models::payout_item::Payout::get_many(
|
||||
&payout_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(
|
||||
payouts
|
||||
&payouts
|
||||
.into_iter()
|
||||
.map(crate::models::payouts::Payout::from)
|
||||
.collect::<Vec<_>>(),
|
||||
@ -343,16 +343,16 @@ pub struct Withdrawal {
|
||||
#[post("")]
|
||||
pub async fn create_payout(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
body: web::Json<Withdrawal>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
payouts_queue: web::Data<PayoutsQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
body: web::types::Json<Withdrawal>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
payouts_queue: web::types::State<PayoutsQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, user) = get_user_record_from_bearer_token(
|
||||
&req,
|
||||
None,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
)
|
||||
@ -635,16 +635,16 @@ pub async fn create_payout(
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn cancel_payout(
|
||||
info: web::Path<(PayoutId,)>,
|
||||
info: web::types::Path<(PayoutId,)>,
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
payouts: web::Data<PayoutsQueue>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
payouts: web::types::State<PayoutsQueue>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_WRITE]),
|
||||
@ -654,7 +654,7 @@ pub async fn cancel_payout(
|
||||
|
||||
let id = info.into_inner().0;
|
||||
let payout =
|
||||
crate::database::models::payout_item::Payout::get(id.into(), &**pool)
|
||||
crate::database::models::payout_item::Payout::get(id.into(), &*pool)
|
||||
.await?;
|
||||
|
||||
if let Some(payout) = payout {
|
||||
@ -738,8 +738,8 @@ pub struct MethodFilter {
|
||||
|
||||
#[get("methods")]
|
||||
pub async fn payment_methods(
|
||||
payouts_queue: web::Data<PayoutsQueue>,
|
||||
filter: web::Query<MethodFilter>,
|
||||
payouts_queue: web::types::State<PayoutsQueue>,
|
||||
filter: web::types::Query<MethodFilter>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let methods = payouts_queue
|
||||
.get_payout_methods()
|
||||
@ -756,7 +756,7 @@ pub async fn payment_methods(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(methods))
|
||||
Ok(HttpResponse::Ok().json(&methods))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@ -768,13 +768,13 @@ pub struct UserBalance {
|
||||
#[get("balance")]
|
||||
pub async fn get_balance(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_READ]),
|
||||
@ -784,7 +784,7 @@ pub async fn get_balance(
|
||||
|
||||
let balance = get_user_balance(user.id.into(), &pool).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(balance))
|
||||
Ok(HttpResponse::Ok().json(&balance))
|
||||
}
|
||||
|
||||
async fn get_user_balance(
|
||||
@ -862,8 +862,8 @@ pub struct RevenueData {
|
||||
|
||||
#[get("platform_revenue")]
|
||||
pub async fn platform_revenue(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut redis = redis.connect().await?;
|
||||
|
||||
@ -874,7 +874,7 @@ pub async fn platform_revenue(
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(HttpResponse::Ok().json(res));
|
||||
return Ok(HttpResponse::Ok().json(&res));
|
||||
}
|
||||
|
||||
let all_time_payouts = sqlx::query!(
|
||||
@ -882,7 +882,7 @@ pub async fn platform_revenue(
|
||||
SELECT SUM(amount) from payouts_values
|
||||
",
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.fetch_optional(&*pool)
|
||||
.await?
|
||||
.and_then(|x| x.sum)
|
||||
.unwrap_or(Decimal::ZERO);
|
||||
@ -968,7 +968,7 @@ pub async fn platform_revenue(
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
Ok(HttpResponse::Ok().json(&res))
|
||||
}
|
||||
|
||||
fn get_legacy_data_point(timestamp: u64) -> RevenueData {
|
||||
|
||||
@ -24,14 +24,13 @@ use crate::search::indexing::IndexingError;
|
||||
use crate::util::img::upload_image_optimized;
|
||||
use crate::util::routes::read_from_field;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_multipart::{Field, Multipart};
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use futures::stream::StreamExt;
|
||||
use image::ImageError;
|
||||
use itertools::Itertools;
|
||||
use ntex::http::StatusCode;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use ntex_multipart::{Field, Multipart};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
@ -40,7 +39,7 @@ use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
pub fn config(cfg: &mut ntex::web::ServiceConfig) {
|
||||
cfg.route("project", web::post().to(project_create));
|
||||
}
|
||||
|
||||
@ -55,7 +54,7 @@ pub enum CreateError {
|
||||
#[error("Indexing Error: {0}")]
|
||||
IndexingError(#[from] IndexingError),
|
||||
#[error("Error while parsing multipart payload: {0}")]
|
||||
MultipartError(#[from] actix_multipart::MultipartError),
|
||||
MultipartError(#[from] ntex_multipart::MultipartError),
|
||||
#[error("Error while parsing JSON: {0}")]
|
||||
SerDeError(#[from] serde_json::Error),
|
||||
#[error("Error while validating input: {0}")]
|
||||
@ -90,7 +89,7 @@ pub enum CreateError {
|
||||
RerouteError(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for CreateError {
|
||||
impl ntex::web::WebResponseError for CreateError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
@ -123,8 +122,8 @@ impl actix_web::ResponseError for CreateError {
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(ApiError {
|
||||
fn error_response(&self, _req: &HttpRequest) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(&ApiError {
|
||||
error: match self {
|
||||
CreateError::EnvError(..) => "environment_error",
|
||||
CreateError::SqlxDatabaseError(..) => "database_error",
|
||||
@ -265,10 +264,10 @@ pub async fn undo_uploads(
|
||||
pub async fn project_create(
|
||||
req: HttpRequest,
|
||||
mut payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
let mut uploaded_files = Vec::new();
|
||||
@ -277,7 +276,7 @@ pub async fn project_create(
|
||||
req,
|
||||
&mut payload,
|
||||
&mut transaction,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
&mut uploaded_files,
|
||||
&client,
|
||||
&redis,
|
||||
@ -286,7 +285,7 @@ pub async fn project_create(
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
let undo_result = undo_uploads(&***file_host, &uploaded_files).await;
|
||||
let undo_result = undo_uploads(&**file_host, &uploaded_files).await;
|
||||
let rollback_result = transaction.rollback().await;
|
||||
|
||||
undo_result?;
|
||||
@ -377,10 +376,12 @@ async fn project_create_inner(
|
||||
)))
|
||||
})?;
|
||||
|
||||
let content_disposition = field.content_disposition();
|
||||
let name = content_disposition.get_name().ok_or_else(|| {
|
||||
CreateError::MissingValueError(String::from("Missing content name"))
|
||||
})?;
|
||||
// TODO: fix me
|
||||
// let content_disposition = field.content_disposition();
|
||||
// let name = content_disposition.get_name().ok_or_else(|| {
|
||||
// CreateError::MissingValueError(String::from("Missing content name"))
|
||||
// })?;
|
||||
let name = "TODO FIX ME";
|
||||
|
||||
if name != "data" {
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
@ -476,14 +477,17 @@ async fn project_create_inner(
|
||||
}
|
||||
|
||||
let result = async {
|
||||
let content_disposition = field.content_disposition().clone();
|
||||
// let content_disposition = field.content_disposition().clone();
|
||||
//
|
||||
// let name = content_disposition.get_name().ok_or_else(|| {
|
||||
// CreateError::MissingValueError("Missing content name".to_string())
|
||||
// })?;
|
||||
// TODO: fix me
|
||||
let name = "TODO FIX ME".to_string();
|
||||
|
||||
let name = content_disposition.get_name().ok_or_else(|| {
|
||||
CreateError::MissingValueError("Missing content name".to_string())
|
||||
})?;
|
||||
|
||||
let (file_name, file_extension) =
|
||||
super::version_creation::get_name_ext(&content_disposition)?;
|
||||
let (file_name, file_extension) = ("TODO FIX ME", "TODO FIX ME");
|
||||
// let (file_name, file_extension) =
|
||||
// super::version_creation::get_name_ext(&content_disposition)?;
|
||||
|
||||
if name == "icon" {
|
||||
if icon_data.is_some() {
|
||||
@ -915,7 +919,7 @@ async fn project_create_inner(
|
||||
fields: HashMap::new(), // Fields instantiate to empty
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -29,10 +29,10 @@ use crate::util::img;
|
||||
use crate::util::img::{delete_old_images, upload_image_optimized};
|
||||
use crate::util::routes::read_from_payload;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::PgPool;
|
||||
@ -84,9 +84,9 @@ pub struct RandomProjects {
|
||||
}
|
||||
|
||||
pub async fn random_projects_get(
|
||||
web::Query(count): web::Query<RandomProjects>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
web::types::Query(count): web::types::Query<RandomProjects>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
count.validate().map_err(|err| {
|
||||
ApiError::Validation(validation_errors_to_string(err, None))
|
||||
@ -102,19 +102,19 @@ pub async fn random_projects_get(
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.fetch(&*pool)
|
||||
.map_ok(|m| db_ids::ProjectId(m.id))
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
let projects_data =
|
||||
db_models::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
db_models::Project::get_many_ids(&project_ids, &*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Project::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(projects_data))
|
||||
Ok(HttpResponse::Ok().json(&projects_data))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -124,18 +124,18 @@ pub struct ProjectIds {
|
||||
|
||||
pub async fn projects_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ProjectIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
||||
let projects_data =
|
||||
db_models::Project::get_many(&ids, &**pool, &redis).await?;
|
||||
db_models::Project::get_many(&ids, &*pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -148,23 +148,22 @@ pub async fn projects_get(
|
||||
filter_visible_projects(projects_data, &user_option, &pool, false)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
Ok(HttpResponse::Ok().json(&projects))
|
||||
}
|
||||
|
||||
pub async fn project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_data =
|
||||
db_models::Project::get(&string, &**pool, &redis).await?;
|
||||
let project_data = db_models::Project::get(&string, &*pool, &redis).await?;
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -175,7 +174,7 @@ pub async fn project_get(
|
||||
|
||||
if let Some(data) = project_data {
|
||||
if is_visible_project(&data.inner, &user_option, &pool, false).await? {
|
||||
return Ok(HttpResponse::Ok().json(Project::from(data)));
|
||||
return Ok(HttpResponse::Ok().json(&Project::from(data)));
|
||||
}
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
@ -244,17 +243,17 @@ pub struct EditProject {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn project_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
new_project: web::Json<EditProject>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
moderation_queue: web::Data<AutomatedModerationQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
search_config: web::types::State<SearchConfig>,
|
||||
new_project: web::types::Json<EditProject>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
moderation_queue: web::types::State<AutomatedModerationQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -267,7 +266,7 @@ pub async fn project_edit(
|
||||
})?;
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let result = db_models::Project::get(&string, &**pool, &redis).await?;
|
||||
let result = db_models::Project::get(&string, &*pool, &redis).await?;
|
||||
if let Some(project_item) = result {
|
||||
let id = project_item.inner.id;
|
||||
|
||||
@ -275,7 +274,7 @@ pub async fn project_edit(
|
||||
db_models::TeamMember::get_for_project_permissions(
|
||||
&project_item.inner,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -949,8 +948,8 @@ pub async fn edit_project_categories(
|
||||
// }
|
||||
|
||||
pub async fn project_search(
|
||||
web::Query(info): web::Query<SearchRequest>,
|
||||
config: web::Data<SearchConfig>,
|
||||
web::types::Query(info): web::types::Query<SearchRequest>,
|
||||
config: web::types::State<SearchConfig>,
|
||||
) -> Result<HttpResponse, SearchError> {
|
||||
let results = search_for_project(&info, &config).await?;
|
||||
|
||||
@ -966,21 +965,21 @@ pub async fn project_search(
|
||||
// total_hits: results.total_hits,
|
||||
// };
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
//checks the validity of a project id or slug
|
||||
pub async fn project_get_check(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let slug = info.into_inner().0;
|
||||
|
||||
let project_data = db_models::Project::get(&slug, &**pool, &redis).await?;
|
||||
let project_data = db_models::Project::get(&slug, &*pool, &redis).await?;
|
||||
|
||||
if let Some(project) = project_data {
|
||||
Ok(HttpResponse::Ok().json(json! ({
|
||||
Ok(HttpResponse::Ok().json(&json! ({
|
||||
"id": models::ids::ProjectId::from(project.inner.id)
|
||||
})))
|
||||
} else {
|
||||
@ -996,18 +995,18 @@ pub struct DependencyInfo {
|
||||
|
||||
pub async fn dependency_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = db_models::Project::get(&string, &**pool, &redis).await?;
|
||||
let result = db_models::Project::get(&string, &*pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -1025,7 +1024,7 @@ pub async fn dependency_list(
|
||||
|
||||
let dependencies = database::Project::get_dependencies(
|
||||
project.inner.id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -1051,8 +1050,8 @@ pub async fn dependency_list(
|
||||
.unique()
|
||||
.collect::<Vec<db_models::VersionId>>();
|
||||
let (projects_result, versions_result) = futures::future::try_join(
|
||||
database::Project::get_many_ids(&project_ids, &**pool, &redis),
|
||||
database::Version::get_many(&dep_version_ids, &**pool, &redis),
|
||||
database::Project::get_many_ids(&project_ids, &*pool, &redis),
|
||||
database::Version::get_many(&dep_version_ids, &*pool, &redis),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1077,7 +1076,7 @@ pub async fn dependency_list(
|
||||
versions.sort_by(|a, b| b.date_published.cmp(&a.date_published));
|
||||
versions.dedup_by(|a, b| a.id == b.id);
|
||||
|
||||
Ok(HttpResponse::Ok().json(DependencyInfo { projects, versions }))
|
||||
Ok(HttpResponse::Ok().json(&DependencyInfo { projects, versions }))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -1112,15 +1111,15 @@ pub struct BulkEditProject {
|
||||
|
||||
pub async fn projects_edit(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
bulk_edit_project: web::Json<BulkEditProject>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ProjectIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
bulk_edit_project: web::types::Json<BulkEditProject>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -1139,7 +1138,7 @@ pub async fn projects_edit(
|
||||
.collect();
|
||||
|
||||
let projects_data =
|
||||
db_models::Project::get_many_ids(&project_ids, &**pool, &redis).await?;
|
||||
db_models::Project::get_many_ids(&project_ids, &*pool, &redis).await?;
|
||||
|
||||
if let Some(id) = project_ids
|
||||
.iter()
|
||||
@ -1156,7 +1155,7 @@ pub async fn projects_edit(
|
||||
.map(|x| x.inner.team_id)
|
||||
.collect::<Vec<db_models::TeamId>>();
|
||||
let team_members = db_models::TeamMember::get_from_team_full_many(
|
||||
&team_ids, &**pool, &redis,
|
||||
&team_ids, &*pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1166,7 +1165,7 @@ pub async fn projects_edit(
|
||||
.collect::<Vec<db_models::OrganizationId>>();
|
||||
let organizations = db_models::Organization::get_many_ids(
|
||||
&organization_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -1178,15 +1177,15 @@ pub async fn projects_edit(
|
||||
let organization_team_members =
|
||||
db_models::TeamMember::get_from_team_full_many(
|
||||
&organization_team_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let categories =
|
||||
db_models::categories::Category::list(&**pool, &redis).await?;
|
||||
db_models::categories::Category::list(&*pool, &redis).await?;
|
||||
let link_platforms =
|
||||
db_models::categories::LinkPlatform::list(&**pool, &redis).await?;
|
||||
db_models::categories::LinkPlatform::list(&*pool, &redis).await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
@ -1407,18 +1406,18 @@ pub struct Extension {
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn project_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -1427,7 +1426,7 @@ pub async fn project_icon_edit(
|
||||
.1;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_item = db_models::Project::get(&string, &**pool, &redis)
|
||||
let project_item = db_models::Project::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -1440,7 +1439,7 @@ pub async fn project_icon_edit(
|
||||
db_models::TeamMember::get_for_project_permissions(
|
||||
&project_item.inner,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1469,7 +1468,7 @@ pub async fn project_icon_edit(
|
||||
delete_old_images(
|
||||
project_item.inner.icon_url,
|
||||
project_item.inner.raw_icon_url,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1487,7 +1486,7 @@ pub async fn project_icon_edit(
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1521,15 +1520,15 @@ pub async fn project_icon_edit(
|
||||
|
||||
pub async fn delete_project_icon(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -1538,7 +1537,7 @@ pub async fn delete_project_icon(
|
||||
.1;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_item = db_models::Project::get(&string, &**pool, &redis)
|
||||
let project_item = db_models::Project::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -1551,7 +1550,7 @@ pub async fn delete_project_icon(
|
||||
db_models::TeamMember::get_for_project_permissions(
|
||||
&project_item.inner,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1579,7 +1578,7 @@ pub async fn delete_project_icon(
|
||||
delete_old_images(
|
||||
project_item.inner.icon_url,
|
||||
project_item.inner.raw_icon_url,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1620,15 +1619,15 @@ pub struct GalleryCreateQuery {
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn add_gallery_item(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryCreateQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(item): web::types::Query<GalleryCreateQuery>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
item.validate().map_err(|err| {
|
||||
ApiError::Validation(validation_errors_to_string(err, None))
|
||||
@ -1636,7 +1635,7 @@ pub async fn add_gallery_item(
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -1645,7 +1644,7 @@ pub async fn add_gallery_item(
|
||||
.1;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_item = db_models::Project::get(&string, &**pool, &redis)
|
||||
let project_item = db_models::Project::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -1665,7 +1664,7 @@ pub async fn add_gallery_item(
|
||||
db_models::TeamMember::get_for_project_permissions(
|
||||
&project_item.inner,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1705,7 +1704,7 @@ pub async fn add_gallery_item(
|
||||
&ext.ext,
|
||||
Some(350),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1787,14 +1786,14 @@ pub struct GalleryEditQuery {
|
||||
|
||||
pub async fn edit_gallery_item(
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryEditQuery>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(item): web::types::Query<GalleryEditQuery>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -1813,7 +1812,7 @@ pub async fn edit_gallery_item(
|
||||
",
|
||||
item.url
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.fetch_optional(&*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
@ -1824,7 +1823,7 @@ pub async fn edit_gallery_item(
|
||||
|
||||
let project_item = db_models::Project::get_id(
|
||||
database::models::ProjectId(result.mod_id),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -1839,7 +1838,7 @@ pub async fn edit_gallery_item(
|
||||
db_models::TeamMember::get_for_project_permissions(
|
||||
&project_item.inner,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -1953,15 +1952,15 @@ pub struct GalleryDeleteQuery {
|
||||
|
||||
pub async fn delete_gallery_item(
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryDeleteQuery>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(item): web::types::Query<GalleryDeleteQuery>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -1976,7 +1975,7 @@ pub async fn delete_gallery_item(
|
||||
",
|
||||
item.url
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.fetch_optional(&*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
@ -1987,7 +1986,7 @@ pub async fn delete_gallery_item(
|
||||
|
||||
let project_item = db_models::Project::get_id(
|
||||
database::models::ProjectId(item.mod_id),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -2002,7 +2001,7 @@ pub async fn delete_gallery_item(
|
||||
db_models::TeamMember::get_for_project_permissions(
|
||||
&project_item.inner,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -2031,7 +2030,7 @@ pub async fn delete_gallery_item(
|
||||
delete_old_images(
|
||||
Some(item.image_url),
|
||||
Some(item.raw_image_url),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -2062,15 +2061,15 @@ pub async fn delete_gallery_item(
|
||||
|
||||
pub async fn project_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
search_config: web::types::State<SearchConfig>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_DELETE]),
|
||||
@ -2079,7 +2078,7 @@ pub async fn project_delete(
|
||||
.1;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project = db_models::Project::get(&string, &**pool, &redis)
|
||||
let project = db_models::Project::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -2092,7 +2091,7 @@ pub async fn project_delete(
|
||||
db_models::TeamMember::get_for_project_permissions(
|
||||
&project.inner,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -2162,14 +2161,14 @@ pub async fn project_delete(
|
||||
|
||||
pub async fn project_follow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
@ -2178,7 +2177,7 @@ pub async fn project_follow(
|
||||
.1;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = db_models::Project::get(&string, &**pool, &redis)
|
||||
let result = db_models::Project::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -2200,7 +2199,7 @@ pub async fn project_follow(
|
||||
user_id as db_ids::UserId,
|
||||
project_id as db_ids::ProjectId
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.fetch_one(&*pool)
|
||||
.await?
|
||||
.exists
|
||||
.unwrap_or(false);
|
||||
@ -2242,14 +2241,14 @@ pub async fn project_follow(
|
||||
|
||||
pub async fn project_unfollow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
@ -2258,7 +2257,7 @@ pub async fn project_unfollow(
|
||||
.1;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = db_models::Project::get(&string, &**pool, &redis)
|
||||
let result = db_models::Project::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -2276,7 +2275,7 @@ pub async fn project_unfollow(
|
||||
user_id as db_ids::UserId,
|
||||
project_id as db_ids::ProjectId
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.fetch_one(&*pool)
|
||||
.await?
|
||||
.exists
|
||||
.unwrap_or(false);
|
||||
@ -2318,14 +2317,14 @@ pub async fn project_unfollow(
|
||||
|
||||
pub async fn project_get_organization(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ, Scopes::ORGANIZATION_READ]),
|
||||
@ -2336,7 +2335,7 @@ pub async fn project_get_organization(
|
||||
let user_id = current_user.as_ref().map(|x| x.id.into());
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let result = db_models::Project::get(&string, &**pool, &redis)
|
||||
let result = db_models::Project::get(&string, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -2350,7 +2349,7 @@ pub async fn project_get_organization(
|
||||
))
|
||||
} else if let Some(organization_id) = result.inner.organization_id {
|
||||
let organization =
|
||||
db_models::Organization::get_id(organization_id, &**pool, &redis)
|
||||
db_models::Organization::get_id(organization_id, &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -2360,14 +2359,14 @@ pub async fn project_get_organization(
|
||||
|
||||
let members_data = TeamMember::get_from_team_full(
|
||||
organization.team_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let users = crate::database::models::User::get_many_ids(
|
||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -2405,7 +2404,7 @@ pub async fn project_get_organization(
|
||||
organization,
|
||||
team_members,
|
||||
);
|
||||
return Ok(HttpResponse::Ok().json(organization));
|
||||
return Ok(HttpResponse::Ok().json(&organization));
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
@ -16,9 +16,10 @@ use crate::models::threads::{MessageBody, ThreadType};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::img;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use bytes::BytesMut;
|
||||
use chrono::Utc;
|
||||
use futures::StreamExt;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
@ -46,16 +47,16 @@ pub struct CreateReport {
|
||||
|
||||
pub async fn report_create(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
mut body: web::Payload,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
mut body: web::types::Payload,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_CREATE]),
|
||||
@ -63,7 +64,7 @@ pub async fn report_create(
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let mut bytes = web::BytesMut::new();
|
||||
let mut bytes = BytesMut::new();
|
||||
while let Some(item) = body.next().await {
|
||||
bytes.extend_from_slice(&item.map_err(|_| {
|
||||
ApiError::InvalidInput(
|
||||
@ -216,7 +217,7 @@ pub async fn report_create(
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(Report {
|
||||
Ok(HttpResponse::Ok().json(&Report {
|
||||
id: id.into(),
|
||||
report_type: new_report.report_type.clone(),
|
||||
item_id: new_report.item_id.clone(),
|
||||
@ -246,14 +247,14 @@ fn default_all() -> bool {
|
||||
|
||||
pub async fn reports(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ReportsRequestOptions>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
count: web::types::Query<ReportsRequestOptions>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_READ]),
|
||||
@ -273,7 +274,7 @@ pub async fn reports(
|
||||
",
|
||||
count.count as i64
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.fetch(&*pool)
|
||||
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
|
||||
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
||||
.await?
|
||||
@ -288,7 +289,7 @@ pub async fn reports(
|
||||
user.id.0 as i64,
|
||||
count.count as i64
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.fetch(&*pool)
|
||||
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
|
||||
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
||||
.await?
|
||||
@ -296,7 +297,7 @@ pub async fn reports(
|
||||
|
||||
let query_reports = crate::database::models::report_item::Report::get_many(
|
||||
&report_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -306,7 +307,7 @@ pub async fn reports(
|
||||
reports.push(x.into());
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(reports))
|
||||
Ok(HttpResponse::Ok().json(&reports))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@ -316,10 +317,10 @@ pub struct ReportIds {
|
||||
|
||||
pub async fn reports_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ReportIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ReportIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let report_ids: Vec<crate::database::models::ids::ReportId> =
|
||||
serde_json::from_str::<Vec<crate::models::ids::ReportId>>(&ids.ids)?
|
||||
@ -329,13 +330,13 @@ pub async fn reports_get(
|
||||
|
||||
let reports_data = crate::database::models::report_item::Report::get_many(
|
||||
&report_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_READ]),
|
||||
@ -349,19 +350,19 @@ pub async fn reports_get(
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<Report>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(all_reports))
|
||||
Ok(HttpResponse::Ok().json(&all_reports))
|
||||
}
|
||||
|
||||
pub async fn report_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
info: web::types::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_READ]),
|
||||
@ -371,7 +372,7 @@ pub async fn report_get(
|
||||
let id = info.into_inner().0.into();
|
||||
|
||||
let report =
|
||||
crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||
crate::database::models::report_item::Report::get(id, &*pool).await?;
|
||||
|
||||
if let Some(report) = report {
|
||||
if !user.role.is_mod() && report.reporter != user.id.into() {
|
||||
@ -379,7 +380,7 @@ pub async fn report_get(
|
||||
}
|
||||
|
||||
let report: Report = report.into();
|
||||
Ok(HttpResponse::Ok().json(report))
|
||||
Ok(HttpResponse::Ok().json(&report))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -394,15 +395,15 @@ pub struct EditReport {
|
||||
|
||||
pub async fn report_edit(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
edit_report: web::Json<EditReport>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
info: web::types::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
edit_report: web::types::Json<EditReport>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_WRITE]),
|
||||
@ -412,7 +413,7 @@ pub async fn report_edit(
|
||||
let id = info.into_inner().0.into();
|
||||
|
||||
let report =
|
||||
crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||
crate::database::models::report_item::Report::get(id, &*pool).await?;
|
||||
|
||||
if let Some(report) = report {
|
||||
if !user.role.is_mod() && report.reporter != user.id.into() {
|
||||
@ -494,14 +495,14 @@ pub async fn report_edit(
|
||||
|
||||
pub async fn report_delete(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
info: web::types::Path<(crate::models::reports::ReportId,)>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_DELETE]),
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use ntex::web::{self, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
@ -15,7 +15,7 @@ pub struct V3Stats {
|
||||
}
|
||||
|
||||
pub async fn get_stats(
|
||||
pool: web::Data<PgPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let projects = sqlx::query!(
|
||||
"
|
||||
@ -28,7 +28,7 @@ pub async fn get_stats(
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.fetch_one(&*pool)
|
||||
.await?;
|
||||
|
||||
let versions = sqlx::query!(
|
||||
@ -47,7 +47,7 @@ pub async fn get_stats(
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.fetch_one(&*pool)
|
||||
.await?;
|
||||
|
||||
let authors = sqlx::query!(
|
||||
@ -62,7 +62,7 @@ pub async fn get_stats(
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.fetch_one(&*pool)
|
||||
.await?;
|
||||
|
||||
let files = sqlx::query!(
|
||||
@ -80,7 +80,7 @@ pub async fn get_stats(
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.fetch_one(&*pool)
|
||||
.await?;
|
||||
|
||||
let v3_stats = V3Stats {
|
||||
@ -90,5 +90,5 @@ pub async fn get_stats(
|
||||
files: files.count,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(v3_stats))
|
||||
Ok(HttpResponse::Ok().json(&v3_stats))
|
||||
}
|
||||
|
||||
@ -8,7 +8,7 @@ use crate::database::models::loader_fields::{
|
||||
Game, Loader, LoaderField, LoaderFieldEnumValue, LoaderFieldType,
|
||||
};
|
||||
use crate::database::redis::RedisPool;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use ntex::web::{self, HttpResponse};
|
||||
|
||||
use itertools::Itertools;
|
||||
use serde_json::Value;
|
||||
@ -38,10 +38,10 @@ pub struct GameData {
|
||||
}
|
||||
|
||||
pub async fn games_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = Game::list(&**pool, &redis)
|
||||
let results = Game::list(&*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| GameData {
|
||||
@ -52,7 +52,7 @@ pub async fn games_list(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
@ -64,10 +64,10 @@ pub struct CategoryData {
|
||||
}
|
||||
|
||||
pub async fn category_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = Category::list(&**pool, &redis)
|
||||
let results = Category::list(&*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| CategoryData {
|
||||
@ -78,7 +78,7 @@ pub async fn category_list(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
@ -92,14 +92,14 @@ pub struct LoaderData {
|
||||
}
|
||||
|
||||
pub async fn loader_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let loaders = Loader::list(&**pool, &redis).await?;
|
||||
let loaders = Loader::list(&*pool, &redis).await?;
|
||||
|
||||
let loader_fields = LoaderField::get_fields_per_loader(
|
||||
&loaders.iter().map(|x| x.id).collect_vec(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -121,7 +121,7 @@ pub async fn loader_list(
|
||||
|
||||
results.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize)]
|
||||
@ -132,12 +132,12 @@ pub struct LoaderFieldsEnumQuery {
|
||||
|
||||
// Provides the variants for any enumerable loader field.
|
||||
pub async fn loader_fields_list(
|
||||
pool: web::Data<PgPool>,
|
||||
query: web::Query<LoaderFieldsEnumQuery>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
query: web::types::Query<LoaderFieldsEnumQuery>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let query = query.into_inner();
|
||||
let loader_field = LoaderField::get_fields_all(&**pool, &redis)
|
||||
let loader_field = LoaderField::get_fields_all(&*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.find(|x| x.field == query.loader_field)
|
||||
@ -164,16 +164,15 @@ pub async fn loader_fields_list(
|
||||
LoaderFieldEnumValue::list_filter(
|
||||
loader_field_enum_id,
|
||||
filters,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
LoaderFieldEnumValue::list(loader_field_enum_id, &**pool, &redis)
|
||||
.await?
|
||||
LoaderFieldEnumValue::list(loader_field_enum_id, &*pool, &redis).await?
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
@ -193,7 +192,7 @@ pub async fn license_list() -> HttpResponse {
|
||||
});
|
||||
}
|
||||
|
||||
HttpResponse::Ok().json(results)
|
||||
HttpResponse::Ok().json(&results)
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
@ -203,19 +202,19 @@ pub struct LicenseText {
|
||||
}
|
||||
|
||||
pub async fn license_text(
|
||||
params: web::Path<(String,)>,
|
||||
params: web::types::Path<(String,)>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let license_id = params.into_inner().0;
|
||||
|
||||
if license_id == *crate::models::projects::DEFAULT_LICENSE_ID {
|
||||
return Ok(HttpResponse::Ok().json(LicenseText {
|
||||
return Ok(HttpResponse::Ok().json(&LicenseText {
|
||||
title: "All Rights Reserved".to_string(),
|
||||
body: "All rights reserved unless explicitly stated.".to_string(),
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(license) = spdx::license_id(&license_id) {
|
||||
return Ok(HttpResponse::Ok().json(LicenseText {
|
||||
return Ok(HttpResponse::Ok().json(&LicenseText {
|
||||
title: license.full_name.to_string(),
|
||||
body: license.text().to_string(),
|
||||
}));
|
||||
@ -233,11 +232,11 @@ pub struct LinkPlatformQueryData {
|
||||
}
|
||||
|
||||
pub async fn link_platform_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results: Vec<LinkPlatformQueryData> =
|
||||
LinkPlatform::list(&**pool, &redis)
|
||||
LinkPlatform::list(&*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| LinkPlatformQueryData {
|
||||
@ -245,21 +244,21 @@ pub async fn link_platform_list(
|
||||
donation: x.donation,
|
||||
})
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
pub async fn report_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = ReportType::list(&**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
let results = ReportType::list(&*pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
pub async fn project_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = ProjectType::list(&**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
let results = ProjectType::list(&*pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(&results))
|
||||
}
|
||||
|
||||
@ -13,7 +13,7 @@ use crate::models::teams::{
|
||||
use crate::models::users::UserId;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@ -42,19 +42,19 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
// They can be differentiated by the "organization_permissions" field being null or not
|
||||
pub async fn team_members_get_project(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
let project_data =
|
||||
crate::database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
crate::database::models::Project::get(&string, &*pool, &redis).await?;
|
||||
|
||||
if let Some(project) = project_data {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -70,13 +70,13 @@ pub async fn team_members_get_project(
|
||||
}
|
||||
let members_data = TeamMember::get_from_team_full(
|
||||
project.inner.team_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let users = User::get_many_ids(
|
||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -87,7 +87,7 @@ pub async fn team_members_get_project(
|
||||
TeamMember::get_for_project_permissions(
|
||||
&project.inner,
|
||||
user_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -118,7 +118,7 @@ pub async fn team_members_get_project(
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(team_members))
|
||||
Ok(HttpResponse::Ok().json(&team_members))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -126,20 +126,20 @@ pub async fn team_members_get_project(
|
||||
|
||||
pub async fn team_members_get_organization(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
let organization_data =
|
||||
crate::database::models::Organization::get(&string, &**pool, &redis)
|
||||
crate::database::models::Organization::get(&string, &*pool, &redis)
|
||||
.await?;
|
||||
|
||||
if let Some(organization) = organization_data {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ORGANIZATION_READ]),
|
||||
@ -150,13 +150,13 @@ pub async fn team_members_get_organization(
|
||||
|
||||
let members_data = TeamMember::get_from_team_full(
|
||||
organization.team_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let users = crate::database::models::User::get_many_ids(
|
||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -193,7 +193,7 @@ pub async fn team_members_get_organization(
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(team_members))
|
||||
Ok(HttpResponse::Ok().json(&team_members))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -202,24 +202,24 @@ pub async fn team_members_get_organization(
|
||||
// Returns all members of a team, but not necessarily those of a project-team's organization (unlike team_members_get_project)
|
||||
pub async fn team_members_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
let members_data =
|
||||
TeamMember::get_from_team_full(id.into(), &**pool, &redis).await?;
|
||||
TeamMember::get_from_team_full(id.into(), &*pool, &redis).await?;
|
||||
let users = crate::database::models::User::get_many_ids(
|
||||
&members_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -257,7 +257,7 @@ pub async fn team_members_get(
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(team_members))
|
||||
Ok(HttpResponse::Ok().json(&team_members))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -267,10 +267,10 @@ pub struct TeamIds {
|
||||
|
||||
pub async fn teams_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<TeamIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<TeamIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -280,17 +280,17 @@ pub async fn teams_get(
|
||||
.collect::<Vec<crate::database::models::ids::TeamId>>();
|
||||
|
||||
let teams_data =
|
||||
TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
|
||||
TeamMember::get_from_team_full_many(&team_ids, &*pool, &redis).await?;
|
||||
let users = crate::database::models::User::get_many_ids(
|
||||
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -331,20 +331,20 @@ pub async fn teams_get(
|
||||
teams.push(team_members.collect());
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(teams))
|
||||
Ok(HttpResponse::Ok().json(&teams))
|
||||
}
|
||||
|
||||
pub async fn join_team(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let team_id = info.into_inner().0.into();
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -355,7 +355,7 @@ pub async fn join_team(
|
||||
let member = TeamMember::get_from_user_id_pending(
|
||||
team_id,
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -421,11 +421,11 @@ pub struct NewTeamMember {
|
||||
|
||||
pub async fn add_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_member: web::Json<NewTeamMember>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
new_member: web::types::Json<NewTeamMember>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let team_id = info.into_inner().0.into();
|
||||
|
||||
@ -433,14 +433,14 @@ pub async fn add_team_member(
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let team_association = Team::get_association(team_id, &**pool)
|
||||
let team_association = Team::get_association(team_id, &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -448,14 +448,14 @@ pub async fn add_team_member(
|
||||
)
|
||||
})?;
|
||||
let member =
|
||||
TeamMember::get_from_user_id(team_id, current_user.id.into(), &**pool)
|
||||
TeamMember::get_from_user_id(team_id, current_user.id.into(), &*pool)
|
||||
.await?;
|
||||
match team_association {
|
||||
// If team is associated with a project, check if they have permissions to invite users to that project
|
||||
TeamAssociationId::Project(pid) => {
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(
|
||||
pid, &**pool,
|
||||
pid, &*pool,
|
||||
)
|
||||
.await?;
|
||||
let organization_team_member =
|
||||
@ -463,7 +463,7 @@ pub async fn add_team_member(
|
||||
TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
@ -541,7 +541,7 @@ pub async fn add_team_member(
|
||||
let request = TeamMember::get_from_user_id_pending(
|
||||
team_id,
|
||||
new_member.user_id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -559,7 +559,7 @@ pub async fn add_team_member(
|
||||
}
|
||||
let new_user = crate::database::models::User::get_id(
|
||||
new_member.user_id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -571,14 +571,14 @@ pub async fn add_team_member(
|
||||
if let TeamAssociationId::Project(pid) = team_association {
|
||||
// We cannot add the owner to a project team in their own org
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(pid, &**pool)
|
||||
Organization::get_associated_organization_project_id(pid, &*pool)
|
||||
.await?;
|
||||
let new_user_organization_team_member =
|
||||
if let Some(organization) = &organization {
|
||||
TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
new_user.id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
@ -671,11 +671,11 @@ pub struct EditTeamMember {
|
||||
|
||||
pub async fn edit_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
edit_member: web::Json<EditTeamMember>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId, UserId)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
edit_member: web::types::Json<EditTeamMember>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = info.into_inner();
|
||||
let id = ids.0.into();
|
||||
@ -683,7 +683,7 @@ pub async fn edit_team_member(
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -692,16 +692,16 @@ pub async fn edit_team_member(
|
||||
.1;
|
||||
|
||||
let team_association =
|
||||
Team::get_association(id, &**pool).await?.ok_or_else(|| {
|
||||
Team::get_association(id, &*pool).await?.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The team specified does not exist".to_string(),
|
||||
)
|
||||
})?;
|
||||
let member =
|
||||
TeamMember::get_from_user_id(id, current_user.id.into(), &**pool)
|
||||
TeamMember::get_from_user_id(id, current_user.id.into(), &*pool)
|
||||
.await?;
|
||||
let edit_member_db =
|
||||
TeamMember::get_from_user_id_pending(id, user_id, &**pool)
|
||||
TeamMember::get_from_user_id_pending(id, user_id, &*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::CustomAuthentication(
|
||||
@ -725,7 +725,7 @@ pub async fn edit_team_member(
|
||||
TeamAssociationId::Project(project_id) => {
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(
|
||||
project_id, &**pool,
|
||||
project_id, &*pool,
|
||||
)
|
||||
.await?;
|
||||
let organization_team_member =
|
||||
@ -733,7 +733,7 @@ pub async fn edit_team_member(
|
||||
TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
@ -859,17 +859,17 @@ pub struct TransferOwnership {
|
||||
|
||||
pub async fn transfer_ownership(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_owner: web::Json<TransferOwnership>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
new_owner: web::types::Json<TransferOwnership>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -880,9 +880,9 @@ pub async fn transfer_ownership(
|
||||
// Forbid transferring ownership of a project team that is owned by an organization
|
||||
// These are owned by the organization owner, and must be removed from the organization first
|
||||
// There shouldnt be an ownr on these projects in these cases, but just in case.
|
||||
let team_association_id = Team::get_association(id.into(), &**pool).await?;
|
||||
let team_association_id = Team::get_association(id.into(), &*pool).await?;
|
||||
if let Some(TeamAssociationId::Project(pid)) = team_association_id {
|
||||
let result = Project::get_id(pid, &**pool, &redis).await?;
|
||||
let result = Project::get_id(pid, &*pool, &redis).await?;
|
||||
if let Some(project_item) = result {
|
||||
if project_item.inner.organization_id.is_some() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
@ -897,7 +897,7 @@ pub async fn transfer_ownership(
|
||||
let member = TeamMember::get_from_user_id(
|
||||
id.into(),
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@ -918,7 +918,7 @@ pub async fn transfer_ownership(
|
||||
let new_member = TeamMember::get_from_user_id(
|
||||
id.into(),
|
||||
new_owner.user_id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@ -1021,10 +1021,10 @@ pub async fn transfer_ownership(
|
||||
|
||||
pub async fn remove_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(TeamId, UserId)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = info.into_inner();
|
||||
let id = ids.0.into();
|
||||
@ -1032,7 +1032,7 @@ pub async fn remove_team_member(
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_WRITE]),
|
||||
@ -1041,17 +1041,17 @@ pub async fn remove_team_member(
|
||||
.1;
|
||||
|
||||
let team_association =
|
||||
Team::get_association(id, &**pool).await?.ok_or_else(|| {
|
||||
Team::get_association(id, &*pool).await?.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The team specified does not exist".to_string(),
|
||||
)
|
||||
})?;
|
||||
let member =
|
||||
TeamMember::get_from_user_id(id, current_user.id.into(), &**pool)
|
||||
TeamMember::get_from_user_id(id, current_user.id.into(), &*pool)
|
||||
.await?;
|
||||
|
||||
let delete_member =
|
||||
TeamMember::get_from_user_id_pending(id, user_id, &**pool).await?;
|
||||
TeamMember::get_from_user_id_pending(id, user_id, &*pool).await?;
|
||||
|
||||
if let Some(delete_member) = delete_member {
|
||||
if delete_member.is_owner {
|
||||
@ -1068,7 +1068,7 @@ pub async fn remove_team_member(
|
||||
TeamAssociationId::Project(pid) => {
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(
|
||||
pid, &**pool,
|
||||
pid, &*pool,
|
||||
)
|
||||
.await?;
|
||||
let organization_team_member =
|
||||
@ -1076,7 +1076,7 @@ pub async fn remove_team_member(
|
||||
TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
|
||||
@ -16,8 +16,8 @@ use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType};
|
||||
use crate::models::users::User;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use futures::TryStreamExt;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -96,7 +96,7 @@ pub async fn is_authorized_thread(
|
||||
pub async fn filter_authorized_threads(
|
||||
threads: Vec<database::models::Thread>,
|
||||
user: &User,
|
||||
pool: &web::Data<PgPool>,
|
||||
pool: &web::types::State<PgPool>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Thread>, ApiError> {
|
||||
let user_id: database::models::UserId = user.id.into();
|
||||
@ -132,7 +132,7 @@ pub async fn filter_authorized_threads(
|
||||
&*project_thread_ids,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch(&***pool)
|
||||
.fetch(&**pool)
|
||||
.map_ok(|row| {
|
||||
check_threads.retain(|x| {
|
||||
let bool = x.project_id.map(|x| x.0) == Some(row.id);
|
||||
@ -165,7 +165,7 @@ pub async fn filter_authorized_threads(
|
||||
&*project_thread_ids,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch(&***pool)
|
||||
.fetch(&**pool)
|
||||
.map_ok(|row| {
|
||||
check_threads.retain(|x| {
|
||||
let bool = x.project_id.map(|x| x.0) == Some(row.id);
|
||||
@ -196,7 +196,7 @@ pub async fn filter_authorized_threads(
|
||||
&*report_thread_ids,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch(&***pool)
|
||||
.fetch(&**pool)
|
||||
.map_ok(|row| {
|
||||
check_threads.retain(|x| {
|
||||
let bool = x.report_id.map(|x| x.0) == Some(row.id);
|
||||
@ -230,7 +230,7 @@ pub async fn filter_authorized_threads(
|
||||
);
|
||||
|
||||
let users: Vec<User> =
|
||||
database::models::User::get_many_ids(&user_ids, &***pool, redis)
|
||||
database::models::User::get_many_ids(&user_ids, &**pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
@ -271,18 +271,18 @@ pub async fn filter_authorized_threads(
|
||||
|
||||
pub async fn thread_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(ThreadId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0.into();
|
||||
|
||||
let thread_data = database::models::Thread::get(string, &**pool).await?;
|
||||
let thread_data = database::models::Thread::get(string, &*pool).await?;
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_READ]),
|
||||
@ -309,14 +309,14 @@ pub async fn thread_get(
|
||||
);
|
||||
|
||||
let users: Vec<User> =
|
||||
database::models::User::get_many_ids(authors, &**pool, &redis)
|
||||
database::models::User::get_many_ids(authors, &*pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect();
|
||||
|
||||
return Ok(
|
||||
HttpResponse::Ok().json(Thread::from(data, users, &user))
|
||||
HttpResponse::Ok().json(&Thread::from(data, users, &user))
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -330,14 +330,14 @@ pub struct ThreadIds {
|
||||
|
||||
pub async fn threads_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ThreadIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<ThreadIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_READ]),
|
||||
@ -352,12 +352,12 @@ pub async fn threads_get(
|
||||
.collect();
|
||||
|
||||
let threads_data =
|
||||
database::models::Thread::get_many(&thread_ids, &**pool).await?;
|
||||
database::models::Thread::get_many(&thread_ids, &*pool).await?;
|
||||
|
||||
let threads =
|
||||
filter_authorized_threads(threads_data, &user, &pool, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(threads))
|
||||
Ok(HttpResponse::Ok().json(&threads))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@ -367,15 +367,15 @@ pub struct NewThreadMessage {
|
||||
|
||||
pub async fn thread_send_message(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_message: web::Json<NewThreadMessage>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(ThreadId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
new_message: web::types::Json<NewThreadMessage>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_WRITE]),
|
||||
@ -407,7 +407,7 @@ pub async fn thread_send_message(
|
||||
if let Some(replying_to) = replying_to {
|
||||
let thread_message = database::models::ThreadMessage::get(
|
||||
(*replying_to).into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -430,7 +430,7 @@ pub async fn thread_send_message(
|
||||
));
|
||||
}
|
||||
|
||||
let result = database::models::Thread::get(string, &**pool).await?;
|
||||
let result = database::models::Thread::get(string, &*pool).await?;
|
||||
|
||||
if let Some(thread) = result {
|
||||
if !is_authorized_thread(&thread, &user, &pool).await? {
|
||||
@ -450,7 +450,7 @@ pub async fn thread_send_message(
|
||||
|
||||
if let Some(project_id) = thread.project_id {
|
||||
let project =
|
||||
database::models::Project::get_id(project_id, &**pool, &redis)
|
||||
database::models::Project::get_id(project_id, &*pool, &redis)
|
||||
.await?;
|
||||
|
||||
if let Some(project) = project {
|
||||
@ -460,7 +460,7 @@ pub async fn thread_send_message(
|
||||
let members =
|
||||
database::models::TeamMember::get_from_team_full(
|
||||
project.inner.team_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -483,7 +483,7 @@ pub async fn thread_send_message(
|
||||
}
|
||||
} else if let Some(report_id) = thread.report_id {
|
||||
let report =
|
||||
database::models::report_item::Report::get(report_id, &**pool)
|
||||
database::models::report_item::Report::get(report_id, &*pool)
|
||||
.await?;
|
||||
|
||||
if let Some(report) = report {
|
||||
@ -565,15 +565,15 @@ pub async fn thread_send_message(
|
||||
|
||||
pub async fn message_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadMessageId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
info: web::types::Path<(ThreadMessageId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_WRITE]),
|
||||
@ -583,7 +583,7 @@ pub async fn message_delete(
|
||||
|
||||
let result = database::models::ThreadMessage::get(
|
||||
info.into_inner().0.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use lazy_static::lazy_static;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@ -46,14 +46,14 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
pub async fn projects_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -62,20 +62,20 @@ pub async fn projects_list(
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
let project_data = User::get_projects(id, &**pool, &redis).await?;
|
||||
let project_data = User::get_projects(id, &*pool, &redis).await?;
|
||||
|
||||
let projects: Vec<_> = crate::database::Project::get_many_ids(
|
||||
&project_data,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let projects =
|
||||
filter_visible_projects(projects, &user, &pool, true).await?;
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
Ok(HttpResponse::Ok().json(&projects))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -83,13 +83,13 @@ pub async fn projects_list(
|
||||
|
||||
pub async fn user_auth_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, mut user) = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_READ]),
|
||||
@ -104,7 +104,7 @@ pub async fn user_auth_get(
|
||||
user.payout_data = None;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
Ok(HttpResponse::Ok().json(&user))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -113,30 +113,30 @@ pub struct UserIds {
|
||||
}
|
||||
|
||||
pub async fn users_get(
|
||||
web::Query(ids): web::Query<UserIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
web::types::Query(ids): web::types::Query<UserIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_ids = serde_json::from_str::<Vec<String>>(&ids.ids)?;
|
||||
|
||||
let users_data = User::get_many(&user_ids, &**pool, &redis).await?;
|
||||
let users_data = User::get_many(&user_ids, &*pool, &redis).await?;
|
||||
|
||||
let users: Vec<crate::models::users::User> =
|
||||
users_data.into_iter().map(From::from).collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(users))
|
||||
Ok(HttpResponse::Ok().json(&users))
|
||||
}
|
||||
|
||||
pub async fn user_get(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let user_data = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(data) = user_data {
|
||||
let response: crate::models::users::User = data.into();
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -144,14 +144,14 @@ pub async fn user_get(
|
||||
|
||||
pub async fn collections_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_READ]),
|
||||
@ -160,7 +160,7 @@ pub async fn collections_list(
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
let user_id: UserId = id.into();
|
||||
@ -169,11 +169,11 @@ pub async fn collections_list(
|
||||
.map(|y| y.role.is_mod() || y.id == user_id)
|
||||
.unwrap_or(false);
|
||||
|
||||
let project_data = User::get_collections(id, &**pool).await?;
|
||||
let project_data = User::get_collections(id, &*pool).await?;
|
||||
|
||||
let response: Vec<_> = crate::database::models::Collection::get_many(
|
||||
&project_data,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -184,7 +184,7 @@ pub async fn collections_list(
|
||||
.map(Collection::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -192,14 +192,14 @@ pub async fn collections_list(
|
||||
|
||||
pub async fn orgs_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
@ -208,14 +208,14 @@ pub async fn orgs_list(
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
let org_data = User::get_organizations(id, &**pool).await?;
|
||||
let org_data = User::get_organizations(id, &*pool).await?;
|
||||
|
||||
let organizations_data =
|
||||
crate::database::models::organization_item::Organization::get_many_ids(
|
||||
&org_data, &**pool, &redis,
|
||||
&org_data, &*pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -226,12 +226,12 @@ pub async fn orgs_list(
|
||||
|
||||
let teams_data =
|
||||
crate::database::models::TeamMember::get_from_team_full_many(
|
||||
&team_ids, &**pool, &redis,
|
||||
&team_ids, &*pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
let users = User::get_many_ids(
|
||||
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -275,7 +275,7 @@ pub async fn orgs_list(
|
||||
organizations.push(organization);
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(organizations))
|
||||
Ok(HttpResponse::Ok().json(&organizations))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -305,15 +305,15 @@ pub struct EditUser {
|
||||
|
||||
pub async fn user_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
new_user: web::Json<EditUser>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
new_user: web::types::Json<EditUser>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, user) = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
@ -324,7 +324,7 @@ pub async fn user_edit(
|
||||
ApiError::Validation(validation_errors_to_string(err, None))
|
||||
})?;
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(actual_user) = id_option {
|
||||
let id = actual_user.id;
|
||||
@ -335,7 +335,7 @@ pub async fn user_edit(
|
||||
|
||||
if let Some(username) = &new_user.username {
|
||||
let existing_user_id_option =
|
||||
User::get(username, &**pool, &redis).await?;
|
||||
User::get(username, &*pool, &redis).await?;
|
||||
|
||||
if existing_user_id_option
|
||||
.map(|x| UserId::from(x.id))
|
||||
@ -474,25 +474,25 @@ pub struct Extension {
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn user_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
web::types::Query(ext): web::types::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::types::Payload,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(actual_user) = id_option {
|
||||
if user.id != actual_user.id.into() && !user.role.is_mod() {
|
||||
@ -505,7 +505,7 @@ pub async fn user_icon_edit(
|
||||
delete_old_images(
|
||||
actual_user.avatar_url,
|
||||
actual_user.raw_avatar_url,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -523,7 +523,7 @@ pub async fn user_icon_edit(
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -537,7 +537,7 @@ pub async fn user_icon_edit(
|
||||
upload_result.raw_url,
|
||||
actual_user.id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&**pool)
|
||||
.execute(&*pool)
|
||||
.await?;
|
||||
User::clear_caches(&[(actual_user.id, None)], &redis).await?;
|
||||
|
||||
@ -549,21 +549,21 @@ pub async fn user_icon_edit(
|
||||
|
||||
pub async fn user_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_DELETE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
@ -590,21 +590,21 @@ pub async fn user_delete(
|
||||
|
||||
pub async fn user_follows(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
@ -613,10 +613,10 @@ pub async fn user_follows(
|
||||
));
|
||||
}
|
||||
|
||||
let project_ids = User::get_follows(id, &**pool).await?;
|
||||
let project_ids = User::get_follows(id, &*pool).await?;
|
||||
let projects: Vec<_> = crate::database::Project::get_many_ids(
|
||||
&project_ids,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -624,7 +624,7 @@ pub async fn user_follows(
|
||||
.map(Project::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
Ok(HttpResponse::Ok().json(&projects))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -632,21 +632,21 @@ pub async fn user_follows(
|
||||
|
||||
pub async fn user_notifications(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &*pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
@ -657,7 +657,7 @@ pub async fn user_notifications(
|
||||
|
||||
let mut notifications: Vec<Notification> =
|
||||
crate::database::models::notification_item::Notification::get_many_user(
|
||||
id, &**pool, &redis,
|
||||
id, &*pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
@ -665,7 +665,7 @@ pub async fn user_notifications(
|
||||
.collect();
|
||||
|
||||
notifications.sort_by(|a, b| b.created.cmp(&a.created));
|
||||
Ok(HttpResponse::Ok().json(notifications))
|
||||
Ok(HttpResponse::Ok().json(¬ifications))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
@ -25,12 +25,11 @@ use crate::queue::session::AuthQueue;
|
||||
use crate::util::routes::read_from_field;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use crate::validate::{validate_file, ValidationResult};
|
||||
use actix_multipart::{Field, Multipart};
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use futures::stream::StreamExt;
|
||||
use itertools::Itertools;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use ntex_multipart::{Field, Multipart};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
@ -101,11 +100,11 @@ struct InitialFileData {
|
||||
pub async fn version_create(
|
||||
req: HttpRequest,
|
||||
mut payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
moderation_queue: web::Data<AutomatedModerationQueue>,
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
moderation_queue: web::types::State<AutomatedModerationQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
let mut uploaded_files = Vec::new();
|
||||
@ -115,7 +114,7 @@ pub async fn version_create(
|
||||
&mut payload,
|
||||
&mut transaction,
|
||||
&redis,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
&mut uploaded_files,
|
||||
&client,
|
||||
&session_queue,
|
||||
@ -125,7 +124,7 @@ pub async fn version_create(
|
||||
|
||||
if result.is_err() {
|
||||
let undo_result = super::project_creation::undo_uploads(
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
&uploaded_files,
|
||||
)
|
||||
.await;
|
||||
@ -526,17 +525,17 @@ async fn version_create_inner(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
|
||||
pub async fn upload_file_to_version(
|
||||
req: HttpRequest,
|
||||
url_data: web::Path<(VersionId,)>,
|
||||
url_data: web::types::Path<(VersionId,)>,
|
||||
mut payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
client: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: web::types::State<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
let mut uploaded_files = Vec::new();
|
||||
@ -549,7 +548,7 @@ pub async fn upload_file_to_version(
|
||||
client,
|
||||
&mut transaction,
|
||||
redis,
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
&mut uploaded_files,
|
||||
version_id,
|
||||
&session_queue,
|
||||
@ -558,7 +557,7 @@ pub async fn upload_file_to_version(
|
||||
|
||||
if result.is_err() {
|
||||
let undo_result = super::project_creation::undo_uploads(
|
||||
&***file_host,
|
||||
&**file_host,
|
||||
&uploaded_files,
|
||||
)
|
||||
.await;
|
||||
@ -579,9 +578,9 @@ pub async fn upload_file_to_version(
|
||||
async fn upload_file_to_version_inner(
|
||||
req: HttpRequest,
|
||||
payload: &mut Multipart,
|
||||
client: Data<PgPool>,
|
||||
client: web::types::State<PgPool>,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: Data<RedisPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
version_id: models::VersionId,
|
||||
@ -594,7 +593,7 @@ async fn upload_file_to_version_inner(
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**client,
|
||||
&*client,
|
||||
&redis,
|
||||
session_queue,
|
||||
Some(&[Scopes::VERSION_WRITE]),
|
||||
@ -602,7 +601,7 @@ async fn upload_file_to_version_inner(
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let result = models::Version::get(version_id, &**client, &redis).await?;
|
||||
let result = models::Version::get(version_id, &*client, &redis).await?;
|
||||
|
||||
let version = match result {
|
||||
Some(v) => v,
|
||||
@ -652,7 +651,7 @@ async fn upload_file_to_version_inner(
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(
|
||||
version.inner.project_id,
|
||||
&**client,
|
||||
&*client,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@ -8,10 +8,10 @@ use crate::models::projects::VersionType;
|
||||
use crate::models::teams::ProjectPermissions;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::{database, models};
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use dashmap::DashMap;
|
||||
use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@ -35,15 +35,15 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
pub async fn get_version_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
@ -60,13 +60,13 @@ pub async fn get_version_from_hash(
|
||||
algorithm,
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
if let Some(file) = file {
|
||||
let version =
|
||||
database::models::Version::get(file.version_id, &**pool, &redis)
|
||||
database::models::Version::get(file.version_id, &*pool, &redis)
|
||||
.await?;
|
||||
if let Some(version) = version {
|
||||
if !is_visible_version(&version.inner, &user_option, &pool, &redis)
|
||||
@ -76,7 +76,7 @@ pub async fn get_version_from_hash(
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.json(models::projects::Version::from(version)))
|
||||
.json(&models::projects::Version::from(version)))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -121,16 +121,16 @@ pub struct UpdateData {
|
||||
|
||||
pub async fn get_update_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
update_data: web::Json<UpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
update_data: web::types::Json<UpdateData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
@ -146,18 +146,18 @@ pub async fn get_update_from_hash(
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
if let Some(project) =
|
||||
database::models::Project::get_id(file.project_id, &**pool, &redis)
|
||||
database::models::Project::get_id(file.project_id, &*pool, &redis)
|
||||
.await?
|
||||
{
|
||||
let versions = database::models::Version::get_many(
|
||||
&project.versions,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -204,7 +204,7 @@ pub async fn get_update_from_hash(
|
||||
}
|
||||
|
||||
return Ok(HttpResponse::Ok()
|
||||
.json(models::projects::Version::from(first)));
|
||||
.json(&models::projects::Version::from(first)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -220,14 +220,14 @@ pub struct FileHashes {
|
||||
|
||||
pub async fn get_versions_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_data: web::types::Json<FileHashes>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
@ -244,14 +244,14 @@ pub async fn get_versions_from_hashes(
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
algorithm.clone(),
|
||||
&file_data.hashes,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let version_ids = files.iter().map(|x| x.version_id).collect::<Vec<_>>();
|
||||
let versions_data = filter_visible_versions(
|
||||
database::models::Version::get_many(&version_ids, &**pool, &redis)
|
||||
database::models::Version::get_many(&version_ids, &*pool, &redis)
|
||||
.await?,
|
||||
&user_option,
|
||||
&pool,
|
||||
@ -269,19 +269,19 @@ pub async fn get_versions_from_hashes(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
|
||||
pub async fn get_projects_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
file_data: web::types::Json<FileHashes>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
||||
@ -297,7 +297,7 @@ pub async fn get_projects_from_hashes(
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
algorithm.clone(),
|
||||
&file_data.hashes,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -305,7 +305,7 @@ pub async fn get_projects_from_hashes(
|
||||
let project_ids = files.iter().map(|x| x.project_id).collect::<Vec<_>>();
|
||||
|
||||
let projects_data = filter_visible_projects(
|
||||
database::models::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
database::models::Project::get_many_ids(&project_ids, &*pool, &redis)
|
||||
.await?,
|
||||
&user_option,
|
||||
&pool,
|
||||
@ -323,7 +323,7 @@ pub async fn get_projects_from_hashes(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@ -335,9 +335,9 @@ pub struct ManyUpdateData {
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
pub async fn update_files(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyUpdateData>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
update_data: web::types::Json<ManyUpdateData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let algorithm = update_data
|
||||
.algorithm
|
||||
@ -346,7 +346,7 @@ pub async fn update_files(
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
algorithm.clone(),
|
||||
&update_data.hashes,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -369,7 +369,7 @@ pub async fn update_files(
|
||||
&update_data.loaders.clone().unwrap_or_default(),
|
||||
&update_data.version_types.clone().unwrap_or_default().iter().map(|x| x.to_string()).collect::<Vec<_>>(),
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.fetch(&*pool)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<database::models::ids::VersionId>>, m| {
|
||||
acc.entry(database::models::ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
@ -383,7 +383,7 @@ pub async fn update_files(
|
||||
.into_iter()
|
||||
.filter_map(|x| x.1.last().copied())
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -403,7 +403,7 @@ pub async fn update_files(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -422,14 +422,14 @@ pub struct ManyFileUpdateData {
|
||||
|
||||
pub async fn update_individual_files(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyFileUpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
update_data: web::types::Json<ManyFileUpdateData>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
@ -454,14 +454,14 @@ pub async fn update_individual_files(
|
||||
.iter()
|
||||
.map(|x| x.hash.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let projects = database::models::Project::get_many_ids(
|
||||
&files.iter().map(|x| x.project_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -470,7 +470,7 @@ pub async fn update_individual_files(
|
||||
.iter()
|
||||
.flat_map(|x| x.versions.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -547,21 +547,21 @@ pub async fn update_individual_files(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
}
|
||||
|
||||
// under /api/v1/version_file/{hash}
|
||||
pub async fn delete_file(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_WRITE]),
|
||||
@ -578,7 +578,7 @@ pub async fn delete_file(
|
||||
algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -589,7 +589,7 @@ pub async fn delete_file(
|
||||
database::models::TeamMember::get_from_user_id_version(
|
||||
row.version_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
@ -597,7 +597,7 @@ pub async fn delete_file(
|
||||
let organization =
|
||||
database::models::Organization::get_associated_organization_project_id(
|
||||
row.project_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
@ -608,7 +608,7 @@ pub async fn delete_file(
|
||||
organization.id,
|
||||
user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?
|
||||
@ -632,7 +632,7 @@ pub async fn delete_file(
|
||||
}
|
||||
|
||||
let version =
|
||||
database::models::Version::get(row.version_id, &**pool, &redis)
|
||||
database::models::Version::get(row.version_id, &*pool, &redis)
|
||||
.await?;
|
||||
if let Some(version) = version {
|
||||
if version.files.len() < 2 {
|
||||
@ -683,15 +683,15 @@ pub struct DownloadRedirect {
|
||||
// under /api/v1/version_file/{hash}/download
|
||||
pub async fn download_version(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
hash_query: web::types::Query<HashQuery>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
@ -709,14 +709,14 @@ pub async fn download_version(
|
||||
algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(file) = file {
|
||||
let version =
|
||||
database::models::Version::get(file.version_id, &**pool, &redis)
|
||||
database::models::Version::get(file.version_id, &*pool, &redis)
|
||||
.await?;
|
||||
|
||||
if let Some(version) = version {
|
||||
@ -727,8 +727,8 @@ pub async fn download_version(
|
||||
}
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*file.url))
|
||||
.json(DownloadRedirect { url: file.url }))
|
||||
.header("Location", &*file.url)
|
||||
.json(&DownloadRedirect { url: file.url }))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
@ -27,38 +27,37 @@ use crate::search::indexing::remove_documents;
|
||||
use crate::search::SearchConfig;
|
||||
use crate::util::img;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use itertools::Itertools;
|
||||
use ntex::web::{self, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route(
|
||||
"version",
|
||||
web::post().to(super::version_creation::version_create),
|
||||
);
|
||||
// cfg.route(
|
||||
// "version",
|
||||
// web::post().to(super::version_creation::version_create),
|
||||
// );
|
||||
cfg.route("versions", web::get().to(versions_get));
|
||||
|
||||
cfg.service(
|
||||
web::scope("version")
|
||||
.route("{id}", web::get().to(version_get))
|
||||
.route("{id}", web::patch().to(version_edit))
|
||||
.route("{id}", web::delete().to(version_delete))
|
||||
.route(
|
||||
"{version_id}/file",
|
||||
web::post().to(super::version_creation::upload_file_to_version),
|
||||
),
|
||||
.route("{id}", web::delete().to(version_delete)), // .route(
|
||||
// "{version_id}/file",
|
||||
// web::post().to(super::version_creation::upload_file_to_version),
|
||||
// ),
|
||||
);
|
||||
}
|
||||
|
||||
// Given a project ID/slug and a version slug
|
||||
pub async fn version_project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String, String)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let info = info.into_inner();
|
||||
version_project_get_helper(req, info, pool, redis, session_queue).await
|
||||
@ -66,15 +65,15 @@ pub async fn version_project_get(
|
||||
pub async fn version_project_get_helper(
|
||||
req: HttpRequest,
|
||||
id: (String, String),
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let result = database::models::Project::get(&id.0, &**pool, &redis).await?;
|
||||
let result = database::models::Project::get(&id.0, &*pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
||||
@ -92,7 +91,7 @@ pub async fn version_project_get_helper(
|
||||
|
||||
let versions = database::models::Version::get_many(
|
||||
&project.versions,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
@ -108,7 +107,7 @@ pub async fn version_project_get_helper(
|
||||
.await?
|
||||
{
|
||||
return Ok(HttpResponse::Ok()
|
||||
.json(models::projects::Version::from(version)));
|
||||
.json(&models::projects::Version::from(version)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -123,10 +122,10 @@ pub struct VersionIds {
|
||||
|
||||
pub async fn versions_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<VersionIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
web::types::Query(ids): web::types::Query<VersionIds>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let version_ids =
|
||||
serde_json::from_str::<Vec<models::ids::VersionId>>(&ids.ids)?
|
||||
@ -134,12 +133,12 @@ pub async fn versions_get(
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<database::models::VersionId>>();
|
||||
let versions_data =
|
||||
database::models::Version::get_many(&version_ids, &**pool, &redis)
|
||||
database::models::Version::get_many(&version_ids, &*pool, &redis)
|
||||
.await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
@ -152,15 +151,15 @@ pub async fn versions_get(
|
||||
filter_visible_versions(versions_data, &user_option, &pool, &redis)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(versions))
|
||||
Ok(HttpResponse::Ok().json(&versions))
|
||||
}
|
||||
|
||||
pub async fn version_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(models::ids::VersionId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
version_get_helper(req, id, pool, redis, session_queue).await
|
||||
@ -169,16 +168,16 @@ pub async fn version_get(
|
||||
pub async fn version_get_helper(
|
||||
req: HttpRequest,
|
||||
id: models::ids::VersionId,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let version_data =
|
||||
database::models::Version::get(id.into(), &**pool, &redis).await?;
|
||||
database::models::Version::get(id.into(), &*pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
@ -190,7 +189,7 @@ pub async fn version_get_helper(
|
||||
if let Some(data) = version_data {
|
||||
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? {
|
||||
return Ok(
|
||||
HttpResponse::Ok().json(models::projects::Version::from(data))
|
||||
HttpResponse::Ok().json(&models::projects::Version::from(data))
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -247,11 +246,11 @@ pub struct EditVersionFileType {
|
||||
|
||||
pub async fn version_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
new_version: web::Json<serde_json::Value>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(VersionId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
new_version: web::types::Json<serde_json::Value>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_version: EditVersion =
|
||||
serde_json::from_value(new_version.into_inner())?;
|
||||
@ -268,14 +267,14 @@ pub async fn version_edit(
|
||||
pub async fn version_edit_helper(
|
||||
req: HttpRequest,
|
||||
info: (VersionId,),
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
new_version: EditVersion,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_WRITE]),
|
||||
@ -290,7 +289,7 @@ pub async fn version_edit_helper(
|
||||
let version_id = info.0;
|
||||
let id = version_id.into();
|
||||
|
||||
let result = database::models::Version::get(id, &**pool, &redis).await?;
|
||||
let result = database::models::Version::get(id, &*pool, &redis).await?;
|
||||
|
||||
if let Some(version_item) = result {
|
||||
let team_member =
|
||||
@ -298,14 +297,14 @@ pub async fn version_edit_helper(
|
||||
version_item.inner.project_id,
|
||||
user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(
|
||||
version_item.inner.project_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -314,7 +313,7 @@ pub async fn version_edit_helper(
|
||||
database::models::TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
@ -621,7 +620,7 @@ pub async fn version_edit_helper(
|
||||
file_type.hash.as_bytes(),
|
||||
file_type.algorithm
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.fetch_optional(&*pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
@ -714,20 +713,20 @@ pub struct VersionListFilters {
|
||||
|
||||
pub async fn version_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
web::Query(filters): web::Query<VersionListFilters>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::types::Path<(String,)>,
|
||||
web::types::Query(filters): web::types::Query<VersionListFilters>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result =
|
||||
database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
database::models::Project::get(&string, &*pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
||||
@ -752,7 +751,7 @@ pub async fn version_list(
|
||||
});
|
||||
let mut versions = database::models::Version::get_many(
|
||||
&project.versions,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
@ -806,11 +805,11 @@ pub async fn version_list(
|
||||
// TODO: This is a bandaid fix for detecting auto-featured versions.
|
||||
// In the future, not all versions will have 'game_versions' fields, so this will need to be changed.
|
||||
let (loaders, game_versions) = futures::future::try_join(
|
||||
database::models::loader_fields::Loader::list(&**pool, &redis),
|
||||
database::models::loader_fields::Loader::list(&*pool, &redis),
|
||||
database::models::legacy_loader_fields::MinecraftGameVersion::list(
|
||||
None,
|
||||
Some(true),
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
),
|
||||
)
|
||||
@ -858,7 +857,7 @@ pub async fn version_list(
|
||||
filter_visible_versions(response, &user_option, &pool, &redis)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
Ok(HttpResponse::Ok().json(&response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@ -866,15 +865,15 @@ pub async fn version_list(
|
||||
|
||||
pub async fn version_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
info: web::types::Path<(VersionId,)>,
|
||||
pool: web::types::State<PgPool>,
|
||||
redis: web::types::State<RedisPool>,
|
||||
session_queue: web::types::State<AuthQueue>,
|
||||
search_config: web::types::State<SearchConfig>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&*pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_DELETE]),
|
||||
@ -883,7 +882,7 @@ pub async fn version_delete(
|
||||
.1;
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let version = database::models::Version::get(id.into(), &**pool, &redis)
|
||||
let version = database::models::Version::get(id.into(), &*pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
@ -897,7 +896,7 @@ pub async fn version_delete(
|
||||
version.inner.project_id,
|
||||
user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
@ -905,7 +904,7 @@ pub async fn version_delete(
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(
|
||||
version.inner.project_id,
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -914,7 +913,7 @@ pub async fn version_delete(
|
||||
database::models::TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&*pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
|
||||
@ -1,53 +1,25 @@
|
||||
use actix_rt::Arbiter;
|
||||
use futures::StreamExt;
|
||||
use std::time::Duration;
|
||||
|
||||
pub struct Scheduler {
|
||||
arbiter: Arbiter,
|
||||
}
|
||||
|
||||
impl Default for Scheduler {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Scheduler {
|
||||
pub fn new() -> Self {
|
||||
Scheduler {
|
||||
arbiter: Arbiter::new(),
|
||||
pub fn schedule<F, R>(interval: Duration, mut task: F)
|
||||
where
|
||||
F: FnMut() -> R + Send + 'static,
|
||||
R: std::future::Future<Output = ()> + Send + 'static,
|
||||
{
|
||||
tokio::task::spawn(async move {
|
||||
let mut interval_stream = tokio::time::interval(interval);
|
||||
loop {
|
||||
interval_stream.tick().await;
|
||||
task().await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run<F, R>(&mut self, interval: std::time::Duration, mut task: F)
|
||||
where
|
||||
F: FnMut() -> R + Send + 'static,
|
||||
R: std::future::Future<Output = ()> + Send + 'static,
|
||||
{
|
||||
let future = IntervalStream::new(actix_rt::time::interval(interval))
|
||||
.for_each_concurrent(2, move |_| task());
|
||||
|
||||
self.arbiter.spawn(future);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
impl Drop for Scheduler {
|
||||
fn drop(&mut self) {
|
||||
self.arbiter.stop();
|
||||
}
|
||||
}
|
||||
|
||||
use log::{info, warn};
|
||||
|
||||
pub fn schedule_versions(
|
||||
scheduler: &mut Scheduler,
|
||||
pool: sqlx::Pool<sqlx::Postgres>,
|
||||
redis: RedisPool,
|
||||
) {
|
||||
pub fn schedule_versions(pool: sqlx::Pool<sqlx::Postgres>, redis: RedisPool) {
|
||||
let version_index_interval = std::time::Duration::from_secs(
|
||||
parse_var("VERSION_INDEX_INTERVAL").unwrap_or(1800),
|
||||
);
|
||||
|
||||
scheduler.run(version_index_interval, move || {
|
||||
schedule(version_index_interval, move || {
|
||||
let pool_ref = pool.clone();
|
||||
let redis = redis.clone();
|
||||
async move {
|
||||
@ -78,8 +50,8 @@ use crate::{
|
||||
util::env::parse_var,
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{info, warn};
|
||||
use serde::Deserialize;
|
||||
use tokio_stream::wrappers::IntervalStream;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct InputFormat<'a> {
|
||||
|
||||
@ -54,8 +54,8 @@ pub async fn remove_documents(
|
||||
}
|
||||
|
||||
pub async fn index_projects(
|
||||
pool: PgPool,
|
||||
redis: RedisPool,
|
||||
pool: &PgPool,
|
||||
redis: &RedisPool,
|
||||
config: &SearchConfig,
|
||||
) -> Result<(), IndexingError> {
|
||||
info!("Indexing projects.");
|
||||
@ -73,7 +73,7 @@ pub async fn index_projects(
|
||||
|
||||
let all_loader_fields =
|
||||
crate::database::models::loader_fields::LoaderField::get_fields_all(
|
||||
&pool, &redis,
|
||||
pool, redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
use crate::models::error::ApiError;
|
||||
use crate::models::projects::SearchRequest;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::HttpResponse;
|
||||
use chrono::{DateTime, Utc};
|
||||
use itertools::Itertools;
|
||||
use meilisearch_sdk::client::Client;
|
||||
use ntex::http::StatusCode;
|
||||
use ntex::web::{HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::borrow::Cow;
|
||||
@ -30,7 +30,7 @@ pub enum SearchError {
|
||||
InvalidIndex(String),
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for SearchError {
|
||||
impl ntex::web::WebResponseError for SearchError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SearchError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
@ -42,8 +42,8 @@ impl actix_web::ResponseError for SearchError {
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(ApiError {
|
||||
fn error_response(&self, _req: &HttpRequest) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(&ApiError {
|
||||
error: match self {
|
||||
SearchError::Env(..) => "environment_error",
|
||||
SearchError::MeiliSearch(..) => "meilisearch_error",
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use actix_web::test::TestRequest;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use image::EncodableLayout;
|
||||
use ntex::web::test::TestRequest;
|
||||
|
||||
// Multipart functionality for actix
|
||||
// Primarily for testing or some implementations of route-redirection
|
||||
@ -31,12 +32,14 @@ impl AppendsMultipart for TestRequest {
|
||||
self,
|
||||
data: impl IntoIterator<Item = MultipartSegment>,
|
||||
) -> Self {
|
||||
let (boundary, payload) = generate_multipart(data);
|
||||
self.append_header((
|
||||
"Content-Type",
|
||||
format!("multipart/form-data; boundary={}", boundary),
|
||||
))
|
||||
.set_payload(payload)
|
||||
// TODO: fix me
|
||||
// let (boundary, payload) = generate_multipart(data);
|
||||
// self.header(
|
||||
// "Content-Type",
|
||||
// format!("multipart/form-data; boundary={}", boundary),
|
||||
// )
|
||||
// .set_payload(payload.as_bytes())
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::env::parse_var;
|
||||
use actix_web::HttpRequest;
|
||||
use ntex::web::HttpRequest;
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
|
||||
@ -8,15 +8,14 @@ pub async fn check_hcaptcha(
|
||||
req: &HttpRequest,
|
||||
challenge: &str,
|
||||
) -> Result<bool, ApiError> {
|
||||
let conn_info = req.connection_info().clone();
|
||||
let ip_addr = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
|
||||
if let Some(header) = req.headers().get("CF-Connecting-IP") {
|
||||
header.to_str().ok()
|
||||
header.to_str().ok().map(|x| x.to_string())
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
req.peer_addr().map(|x| x.to_string())
|
||||
}
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
req.peer_addr().map(|x| x.to_string())
|
||||
};
|
||||
|
||||
let ip_addr = ip_addr.ok_or(ApiError::Turnstile)?;
|
||||
@ -33,7 +32,7 @@ pub async fn check_hcaptcha(
|
||||
let secret = dotenvy::var("HCAPTCHA_SECRET")?;
|
||||
form.insert("response", challenge);
|
||||
form.insert("secret", &*secret);
|
||||
form.insert("remoteip", ip_addr);
|
||||
form.insert("remoteip", &ip_addr);
|
||||
|
||||
let val: Response = client
|
||||
.post("https://api.hcaptcha.com/siteverify")
|
||||
|
||||
@ -1,10 +1,5 @@
|
||||
use actix_cors::Cors;
|
||||
use ntex_cors::{Cors, CorsFactory};
|
||||
|
||||
pub fn default_cors() -> Cors {
|
||||
pub fn default_cors<Err>() -> CorsFactory<Err> {
|
||||
Cors::default()
|
||||
.allow_any_origin()
|
||||
.allow_any_header()
|
||||
.allow_any_method()
|
||||
.max_age(3600)
|
||||
.send_wildcard()
|
||||
}
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
use actix_web::guard::GuardContext;
|
||||
|
||||
pub const ADMIN_KEY_HEADER: &str = "Modrinth-Admin";
|
||||
pub fn admin_key_guard(ctx: &GuardContext) -> bool {
|
||||
let admin_key = std::env::var("LABRINTH_ADMIN_KEY").expect(
|
||||
"No admin key provided, this should have been caught by check_env_vars",
|
||||
);
|
||||
ctx.head()
|
||||
.headers()
|
||||
.get(ADMIN_KEY_HEADER)
|
||||
.map_or(false, |it| it.as_bytes() == admin_key.as_bytes())
|
||||
}
|
||||
// TODO: fix me
|
||||
// use actix_web::guard::GuardContext;
|
||||
//
|
||||
// pub const ADMIN_KEY_HEADER: &str = "Modrinth-Admin";
|
||||
// pub fn admin_key_guard(ctx: &GuardContext) -> bool {
|
||||
// let admin_key = std::env::var("LABRINTH_ADMIN_KEY").expect(
|
||||
// "No admin key provided, this should have been caught by check_env_vars",
|
||||
// );
|
||||
// ctx.head()
|
||||
// .headers()
|
||||
// .get(ADMIN_KEY_HEADER)
|
||||
// .map_or(false, |it| it.as_bytes() == admin_key.as_bytes())
|
||||
// }
|
||||
|
||||
@ -5,13 +5,9 @@ use std::sync::Arc;
|
||||
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::env::parse_var;
|
||||
use actix_web::{
|
||||
body::EitherBody,
|
||||
dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform},
|
||||
Error, ResponseError,
|
||||
};
|
||||
use futures_util::future::LocalBoxFuture;
|
||||
use futures_util::future::{ready, Ready};
|
||||
use ntex::service::{Middleware, Service, ServiceCtx};
|
||||
use ntex::web;
|
||||
use ntex::web::{WebResponse, WebResponseError};
|
||||
|
||||
pub type KeyedRateLimiter<
|
||||
K = String,
|
||||
@ -22,23 +18,14 @@ pub type KeyedRateLimiter<
|
||||
|
||||
pub struct RateLimit(pub KeyedRateLimiter);
|
||||
|
||||
impl<S, B> Transform<S, ServiceRequest> for RateLimit
|
||||
where
|
||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
S::Future: 'static,
|
||||
B: 'static,
|
||||
{
|
||||
type Response = ServiceResponse<EitherBody<B>>;
|
||||
type Error = Error;
|
||||
type Transform = RateLimitService<S>;
|
||||
type InitError = ();
|
||||
type Future = Ready<Result<Self::Transform, Self::InitError>>;
|
||||
impl<S> Middleware<S> for RateLimit {
|
||||
type Service = RateLimitService<S>;
|
||||
|
||||
fn new_transform(&self, service: S) -> Self::Future {
|
||||
ready(Ok(RateLimitService {
|
||||
fn create(&self, service: S) -> Self::Service {
|
||||
RateLimitService {
|
||||
service,
|
||||
rate_limiter: Arc::clone(&self.0),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -48,41 +35,42 @@ pub struct RateLimitService<S> {
|
||||
rate_limiter: KeyedRateLimiter,
|
||||
}
|
||||
|
||||
impl<S, B> Service<ServiceRequest> for RateLimitService<S>
|
||||
impl<S, Err> Service<web::WebRequest<Err>> for RateLimitService<S>
|
||||
where
|
||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
S::Future: 'static,
|
||||
B: 'static,
|
||||
S: Service<
|
||||
web::WebRequest<Err>,
|
||||
Response = web::WebResponse,
|
||||
Error = web::Error,
|
||||
>,
|
||||
Err: web::ErrorRenderer,
|
||||
{
|
||||
type Response = ServiceResponse<EitherBody<B>>;
|
||||
type Error = Error;
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
||||
type Response = web::WebResponse;
|
||||
type Error = web::Error;
|
||||
|
||||
forward_ready!(service);
|
||||
ntex::forward_ready!(service);
|
||||
|
||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||
async fn call(
|
||||
&self,
|
||||
req: web::WebRequest<Err>,
|
||||
ctx: ServiceCtx<'_, Self>,
|
||||
) -> Result<Self::Response, Self::Error> {
|
||||
if let Some(key) = req.headers().get("x-ratelimit-key") {
|
||||
if key.to_str().ok()
|
||||
== dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref()
|
||||
{
|
||||
let res = self.service.call(req);
|
||||
|
||||
return Box::pin(async move {
|
||||
let service_response = res.await?;
|
||||
Ok(service_response.map_into_left_body())
|
||||
});
|
||||
let res = ctx.call(&self.service, req).await?;
|
||||
return Ok(res);
|
||||
}
|
||||
}
|
||||
|
||||
let conn_info = req.connection_info().clone();
|
||||
let ip = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
|
||||
if let Some(header) = req.headers().get("CF-Connecting-IP") {
|
||||
header.to_str().ok()
|
||||
header.to_str().ok().map(|x| x.to_string())
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
req.peer_addr().map(|x| x.to_string())
|
||||
}
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
req.peer_addr().map(|x| x.to_string())
|
||||
};
|
||||
|
||||
if let Some(ip) = ip {
|
||||
@ -90,98 +78,85 @@ where
|
||||
|
||||
match self.rate_limiter.check_key(&ip) {
|
||||
Ok(snapshot) => {
|
||||
let fut = self.service.call(req);
|
||||
let mut service_response =
|
||||
ctx.call(&self.service, req).await?;
|
||||
|
||||
Box::pin(async move {
|
||||
match fut.await {
|
||||
Ok(mut service_response) => {
|
||||
// Now you have a mutable reference to the ServiceResponse, so you can modify its headers.
|
||||
let headers = service_response.headers_mut();
|
||||
headers.insert(
|
||||
actix_web::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-limit",
|
||||
)
|
||||
.unwrap(),
|
||||
snapshot.quota().burst_size().get().into(),
|
||||
);
|
||||
headers.insert(
|
||||
actix_web::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-remaining",
|
||||
)
|
||||
.unwrap(),
|
||||
snapshot.remaining_burst_capacity().into(),
|
||||
);
|
||||
let headers = service_response.headers_mut();
|
||||
headers.insert(
|
||||
ntex::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-limit",
|
||||
)
|
||||
.unwrap(),
|
||||
snapshot.quota().burst_size().get().into(),
|
||||
);
|
||||
headers.insert(
|
||||
ntex::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-remaining",
|
||||
)
|
||||
.unwrap(),
|
||||
snapshot.remaining_burst_capacity().into(),
|
||||
);
|
||||
|
||||
headers.insert(
|
||||
actix_web::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-reset",
|
||||
)
|
||||
.unwrap(),
|
||||
snapshot
|
||||
.quota()
|
||||
.burst_size_replenished_in()
|
||||
.as_secs()
|
||||
.into(),
|
||||
);
|
||||
headers.insert(
|
||||
ntex::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-reset",
|
||||
)
|
||||
.unwrap(),
|
||||
snapshot
|
||||
.quota()
|
||||
.burst_size_replenished_in()
|
||||
.as_secs()
|
||||
.into(),
|
||||
);
|
||||
|
||||
// Return the modified response as Ok.
|
||||
Ok(service_response.map_into_left_body())
|
||||
}
|
||||
Err(e) => {
|
||||
// Handle error case
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
})
|
||||
Ok(service_response)
|
||||
}
|
||||
Err(negative) => {
|
||||
let wait_time =
|
||||
negative.wait_time_from(DefaultClock::default().now());
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let mut response = ApiError::RateLimitError(
|
||||
wait_time.as_millis(),
|
||||
negative.quota().burst_size().get(),
|
||||
)
|
||||
.error_response();
|
||||
.error_response(&req);
|
||||
|
||||
let headers = response.headers_mut();
|
||||
|
||||
headers.insert(
|
||||
actix_web::http::header::HeaderName::from_str(
|
||||
ntex::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-limit",
|
||||
)
|
||||
.unwrap(),
|
||||
negative.quota().burst_size().get().into(),
|
||||
);
|
||||
headers.insert(
|
||||
actix_web::http::header::HeaderName::from_str(
|
||||
ntex::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-remaining",
|
||||
)
|
||||
.unwrap(),
|
||||
0.into(),
|
||||
);
|
||||
headers.insert(
|
||||
actix_web::http::header::HeaderName::from_str(
|
||||
ntex::http::header::HeaderName::from_str(
|
||||
"x-ratelimit-reset",
|
||||
)
|
||||
.unwrap(),
|
||||
wait_time.as_secs().into(),
|
||||
);
|
||||
|
||||
Box::pin(async {
|
||||
Ok(req.into_response(response.map_into_right_body()))
|
||||
})
|
||||
Ok(WebResponse::new(response, req))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let (req, _) = req.into_parts();
|
||||
let response = ApiError::CustomAuthentication(
|
||||
"Unable to obtain user IP address!".to_string(),
|
||||
)
|
||||
.error_response();
|
||||
.error_response(&req);
|
||||
|
||||
Box::pin(async {
|
||||
Ok(req.into_response(response.map_into_right_body()))
|
||||
})
|
||||
Ok(WebResponse::new(response, req))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
use crate::routes::v3::project_creation::CreateError;
|
||||
// use crate::routes::v3::project_creation::CreateError;
|
||||
use crate::routes::ApiError;
|
||||
use actix_multipart::Field;
|
||||
use actix_web::web::Payload;
|
||||
use bytes::BytesMut;
|
||||
use futures::StreamExt;
|
||||
use ntex::web::types::Payload;
|
||||
use ntex_multipart::Field;
|
||||
|
||||
pub async fn read_from_payload(
|
||||
payload: &mut Payload,
|
||||
@ -25,18 +25,19 @@ pub async fn read_from_payload(
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub async fn read_from_field(
|
||||
field: &mut Field,
|
||||
cap: usize,
|
||||
err_msg: &'static str,
|
||||
) -> Result<BytesMut, CreateError> {
|
||||
let mut bytes = BytesMut::new();
|
||||
while let Some(chunk) = field.next().await {
|
||||
if bytes.len() >= cap {
|
||||
return Err(CreateError::InvalidInput(String::from(err_msg)));
|
||||
} else {
|
||||
bytes.extend_from_slice(&chunk?);
|
||||
}
|
||||
}
|
||||
Ok(bytes)
|
||||
}
|
||||
// TODO: fix me
|
||||
// pub async fn read_from_field(
|
||||
// field: &mut Field,
|
||||
// cap: usize,
|
||||
// err_msg: &'static str,
|
||||
// ) -> Result<BytesMut, CreateError> {
|
||||
// let mut bytes = BytesMut::new();
|
||||
// while let Some(chunk) = field.next().await {
|
||||
// if bytes.len() >= cap {
|
||||
// return Err(CreateError::InvalidInput(String::from(err_msg)));
|
||||
// } else {
|
||||
// bytes.extend_from_slice(&chunk?);
|
||||
// }
|
||||
// }
|
||||
// Ok(bytes)
|
||||
// }
|
||||
|
||||
@ -45,7 +45,7 @@ pub enum ValidationError {
|
||||
#[error("Invalid Input: {0}")]
|
||||
InvalidInput(std::borrow::Cow<'static, str>),
|
||||
#[error("Error while managing threads")]
|
||||
Blocking(#[from] actix_web::error::BlockingError),
|
||||
Blocking,
|
||||
#[error("Error while querying database")]
|
||||
Database(#[from] DatabaseError),
|
||||
}
|
||||
@ -152,7 +152,7 @@ async fn validate_minecraft_file(
|
||||
all_game_versions: Vec<MinecraftGameVersion>,
|
||||
file_type: Option<FileType>,
|
||||
) -> Result<ValidationResult, ValidationError> {
|
||||
actix_web::web::block(move || {
|
||||
ntex::web::block(move || {
|
||||
let reader = Cursor::new(data);
|
||||
let mut zip = ZipArchive::new(reader)?;
|
||||
|
||||
@ -216,7 +216,7 @@ async fn validate_minecraft_file(
|
||||
Ok(ValidationResult::Pass)
|
||||
}
|
||||
})
|
||||
.await?
|
||||
.await.map_err(|_| ValidationError::Blocking)
|
||||
}
|
||||
|
||||
// Write tests for this
|
||||
|
||||
@ -14,7 +14,7 @@ use rust_decimal::{prelude::ToPrimitive, Decimal};
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn analytics_revenue() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -153,7 +153,7 @@ fn to_f64_vec_rounded_up(d: Vec<Decimal>) -> Vec<f64> {
|
||||
d.into_iter().map(to_f64_rounded_up).collect_vec()
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn permissions_analytics_revenue() {
|
||||
with_test_environment(
|
||||
None,
|
||||
|
||||
@ -479,8 +479,8 @@ pub trait ApiVersion {
|
||||
pub trait AppendsOptionalPat {
|
||||
fn append_pat(self, pat: Option<&str>) -> Self;
|
||||
}
|
||||
// Impl this on all actix_web::test::TestRequest
|
||||
impl AppendsOptionalPat for actix_web::test::TestRequest {
|
||||
// Impl this on all ntex::web::test::TestRequest
|
||||
impl AppendsOptionalPat for ntex::web::test::TestRequest {
|
||||
fn append_pat(self, pat: Option<&str>) -> Self {
|
||||
if let Some(pat) = pat {
|
||||
self.append_header(("Authorization", pat))
|
||||
|
||||
@ -41,7 +41,7 @@ impl Api for ApiV2 {
|
||||
}
|
||||
|
||||
async fn reset_search_index(&self) -> ServiceResponse {
|
||||
let req = actix_web::test::TestRequest::post()
|
||||
let req = ntex::web::test::TestRequest::post()
|
||||
.uri("/v2/admin/_force_reindex")
|
||||
.append_header((
|
||||
"Modrinth-Admin",
|
||||
|
||||
@ -45,7 +45,7 @@ impl Api for ApiV3 {
|
||||
}
|
||||
|
||||
async fn reset_search_index(&self) -> ServiceResponse {
|
||||
let req = actix_web::test::TestRequest::post()
|
||||
let req = ntex::web::test::TestRequest::post()
|
||||
.uri("/_internal/admin/_force_reindex")
|
||||
.append_header((
|
||||
"Modrinth-Admin",
|
||||
|
||||
@ -154,7 +154,7 @@ pub async fn get_access_token(response: ServiceResponse) -> String {
|
||||
|
||||
pub fn get_redirect_location_query_params(
|
||||
response: &ServiceResponse,
|
||||
) -> actix_web::web::Query<HashMap<String, String>> {
|
||||
) -> actix_web::web::types::Query<HashMap<String, String>> {
|
||||
let redirect_location = response
|
||||
.headers()
|
||||
.get(LOCATION)
|
||||
|
||||
@ -2,13 +2,13 @@
|
||||
use std::io::{Cursor, Write};
|
||||
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test::{self, TestRequest};
|
||||
use labrinth::models::{
|
||||
oauth_clients::OAuthClient,
|
||||
organizations::Organization,
|
||||
pats::Scopes,
|
||||
projects::{Project, ProjectId, Version},
|
||||
};
|
||||
use ntex::web::test::{self, TestRequest};
|
||||
use serde_json::json;
|
||||
use sqlx::Executor;
|
||||
use zip::{write::FileOptions, CompressionMethod, ZipWriter};
|
||||
|
||||
@ -61,7 +61,7 @@ pub async fn with_test_environment_all<Fut, F>(
|
||||
}
|
||||
|
||||
// A complete test environment, with a test actix app and a database.
|
||||
// Must be called in an #[actix_rt::test] context. It also simulates a
|
||||
// Must be called in an #[ntex::test] context. It also simulates a
|
||||
// temporary sqlx db like #[sqlx::test] would.
|
||||
// Use .call(req) on it directly to make a test call as if test::call_service(req) were being used.
|
||||
#[derive(Clone)]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test;
|
||||
use bytes::Bytes;
|
||||
use common::api_common::ApiProject;
|
||||
use ntex::web::test;
|
||||
|
||||
use common::api_v3::ApiV3;
|
||||
use common::database::USER_USER_PAT;
|
||||
@ -9,7 +9,7 @@ use common::environment::{with_test_environment, TestEnvironment};
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn error_404_body() {
|
||||
with_test_environment(
|
||||
None,
|
||||
|
||||
@ -7,7 +7,7 @@ use common::{
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_games() {
|
||||
with_test_environment(
|
||||
None,
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test;
|
||||
use common::api_v3::ApiV3;
|
||||
use common::environment::{with_test_environment, TestEnvironment};
|
||||
use itertools::Itertools;
|
||||
use labrinth::database::models::legacy_loader_fields::MinecraftGameVersion;
|
||||
use labrinth::models::v3;
|
||||
use ntex::web::test;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::api_common::{ApiProject, ApiVersion};
|
||||
@ -20,7 +20,7 @@ use crate::common::dummy_data::{
|
||||
// importing common module.
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
|
||||
async fn creating_loader_fields() {
|
||||
with_test_environment(None, |test_env: TestEnvironment<ApiV3>| async move {
|
||||
@ -378,7 +378,7 @@ async fn creating_loader_fields() {
|
||||
.await
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_loader_fields_variants() {
|
||||
with_test_environment(None, |test_env: TestEnvironment<ApiV3>| async move {
|
||||
let api = &test_env.api;
|
||||
@ -414,7 +414,7 @@ async fn get_loader_fields_variants() {
|
||||
.await
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_available_loader_fields() {
|
||||
// Get available loader fields for a given loader
|
||||
// (ie: which fields are relevant for 'fabric', etc)
|
||||
@ -475,7 +475,7 @@ async fn get_available_loader_fields() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn test_multi_get_redis_cache() {
|
||||
// Ensures a multi-project get including both modpacks and mods ddoes not
|
||||
// incorrectly cache loader fields
|
||||
@ -576,7 +576,7 @@ async fn test_multi_get_redis_cache() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn minecraft_game_version_update() {
|
||||
// We simulate adding a Minecraft game version, to ensure other data doesn't get overwritten
|
||||
// This is basically a test for the insertion/concatenation query
|
||||
|
||||
@ -7,7 +7,7 @@ use crate::common::api_common::ApiTeams;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn get_user_notifications_after_team_invitation_returns_notification()
|
||||
{
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -39,7 +39,7 @@ pub async fn get_user_notifications_after_team_invitation_returns_notification()
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn get_user_notifications_after_reading_indicates_notification_read()
|
||||
{
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -69,7 +69,7 @@ pub async fn get_user_notifications_after_reading_indicates_notification_read()
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn get_user_notifications_after_deleting_does_not_show_notification()
|
||||
{
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test;
|
||||
use common::{
|
||||
api_v3::oauth::get_redirect_location_query_params,
|
||||
api_v3::{
|
||||
@ -14,11 +13,12 @@ use common::{
|
||||
environment::{with_test_environment, TestEnvironment},
|
||||
};
|
||||
use labrinth::auth::oauth::TokenResponse;
|
||||
use ntex::web::test;
|
||||
use reqwest::header::{CACHE_CONTROL, PRAGMA};
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn oauth_flow_happy_path() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha {
|
||||
@ -80,7 +80,7 @@ async fn oauth_flow_happy_path() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn oauth_authorize_for_already_authorized_scopes_returns_auth_code() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha { client_id, .. } =
|
||||
@ -114,7 +114,7 @@ async fn oauth_authorize_for_already_authorized_scopes_returns_auth_code() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_oauth_token_with_already_used_auth_code_fails() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha {
|
||||
@ -152,7 +152,7 @@ async fn get_oauth_token_with_already_used_auth_code_fails() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn authorize_with_broader_scopes_can_complete_flow() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha {
|
||||
@ -213,7 +213,7 @@ async fn authorize_with_broader_scopes_can_complete_flow() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn oauth_authorize_with_broader_scopes_requires_user_accept() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let client_id = env.dummy.oauth_client_alpha.client_id;
|
||||
@ -247,7 +247,7 @@ async fn oauth_authorize_with_broader_scopes_requires_user_accept() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn reject_authorize_ends_authorize_flow() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let client_id = env.dummy.oauth_client_alpha.client_id;
|
||||
@ -266,7 +266,7 @@ async fn reject_authorize_ends_authorize_flow() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn accept_authorize_after_already_accepting_fails() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let client_id = env.dummy.oauth_client_alpha.client_id;
|
||||
@ -284,7 +284,7 @@ async fn accept_authorize_after_already_accepting_fails() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn revoke_authorization_after_issuing_token_revokes_token() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha {
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test;
|
||||
use common::{
|
||||
api_v3::ApiV3,
|
||||
database::{FRIEND_USER_ID, FRIEND_USER_PAT, USER_USER_ID, USER_USER_PAT},
|
||||
@ -14,12 +13,13 @@ use labrinth::{
|
||||
},
|
||||
routes::v3::oauth_clients::OAuthClientEdit,
|
||||
};
|
||||
use ntex::web::test;
|
||||
|
||||
use common::database::USER_USER_ID_PARSED;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn can_create_edit_get_oauth_client() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let client_name = "test_client".to_string();
|
||||
@ -75,7 +75,7 @@ async fn can_create_edit_get_oauth_client() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn create_oauth_client_with_restricted_scopes_fails() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let resp = env
|
||||
@ -93,7 +93,7 @@ async fn create_oauth_client_with_restricted_scopes_fails() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_oauth_client_for_client_creator_succeeds() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha { client_id, .. } =
|
||||
@ -111,7 +111,7 @@ async fn get_oauth_client_for_client_creator_succeeds() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn can_delete_oauth_client() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let client_id = env.dummy.oauth_client_alpha.client_id.clone();
|
||||
@ -127,7 +127,7 @@ async fn can_delete_oauth_client() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn delete_oauth_client_after_issuing_access_tokens_revokes_tokens() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha {
|
||||
@ -159,7 +159,7 @@ async fn delete_oauth_client_after_issuing_access_tokens_revokes_tokens() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn can_list_user_oauth_authorizations() {
|
||||
with_test_environment(None, |env: TestEnvironment<ApiV3>| async move {
|
||||
let DummyOAuthClientAlpha {
|
||||
|
||||
@ -26,7 +26,7 @@ use serde_json::json;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn create_organization() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -125,7 +125,7 @@ async fn create_organization() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_project_organization() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -158,7 +158,7 @@ async fn get_project_organization() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn patch_organization() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -262,7 +262,7 @@ async fn patch_organization() {
|
||||
}
|
||||
|
||||
// add/remove icon
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn add_remove_icon() {
|
||||
with_test_environment(
|
||||
Some(10),
|
||||
@ -326,7 +326,7 @@ async fn add_remove_icon() {
|
||||
}
|
||||
|
||||
// delete org
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn delete_org() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -351,7 +351,7 @@ async fn delete_org() {
|
||||
}
|
||||
|
||||
// add/remove organization projects
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn add_remove_organization_projects() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -457,7 +457,7 @@ async fn add_remove_organization_projects() {
|
||||
}
|
||||
|
||||
// Like above, but specifically regarding ownership transferring
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn add_remove_organization_project_ownership_to_user() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -731,7 +731,7 @@ async fn add_remove_organization_project_ownership_to_user() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn delete_organization_means_all_projects_to_org_owner() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -910,7 +910,7 @@ async fn delete_organization_means_all_projects_to_org_owner() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_patch_organization() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -952,7 +952,7 @@ async fn permissions_patch_organization() {
|
||||
}
|
||||
|
||||
// Not covered by PATCH /organization
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_edit_details() {
|
||||
with_test_environment(
|
||||
Some(12),
|
||||
@ -1002,7 +1002,7 @@ async fn permissions_edit_details() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_manage_invites() {
|
||||
// Add member, remove member, edit member
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -1104,7 +1104,7 @@ async fn permissions_manage_invites() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_add_remove_project() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -1181,7 +1181,7 @@ async fn permissions_add_remove_project() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_delete_organization() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -1211,7 +1211,7 @@ async fn permissions_delete_organization() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_add_default_project_permissions() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
let zeta_organization_id =
|
||||
@ -1291,7 +1291,7 @@ async fn permissions_add_default_project_permissions() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_organization_permissions_consistency_test() {
|
||||
with_test_environment(
|
||||
None,
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test;
|
||||
use chrono::{Duration, Utc};
|
||||
use common::{database::*, environment::with_test_environment_all};
|
||||
use ntex::web::test;
|
||||
|
||||
use labrinth::models::pats::Scopes;
|
||||
use serde_json::json;
|
||||
@ -17,7 +17,7 @@ mod common;
|
||||
// - ensure PAT can be patched to change expiry
|
||||
// - ensure expired PATs cannot be used
|
||||
// - ensure PATs can be deleted
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn pat_full_test() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
// Create a PAT for a full test
|
||||
@ -165,7 +165,7 @@ pub async fn pat_full_test() {
|
||||
}
|
||||
|
||||
// Test illegal PAT setting, both in POST and PATCH
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn bad_pats() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
// Creating a PAT with no name should fail
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test;
|
||||
use common::api_v3::ApiV3;
|
||||
use common::database::*;
|
||||
use common::dummy_data::DUMMY_CATEGORIES;
|
||||
use ntex::web::test;
|
||||
|
||||
use common::environment::{
|
||||
with_test_environment, with_test_environment_all, TestEnvironment,
|
||||
@ -27,7 +27,7 @@ use crate::common::dummy_data::{
|
||||
};
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn test_get_project() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -99,7 +99,7 @@ async fn test_get_project() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn test_add_remove_project() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment(
|
||||
@ -303,7 +303,7 @@ async fn test_add_remove_project() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn test_patch_project() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -489,7 +489,7 @@ pub async fn test_patch_project() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn test_patch_v3() {
|
||||
// Hits V3-specific patchable fields
|
||||
with_test_environment(
|
||||
@ -525,7 +525,7 @@ pub async fn test_patch_v3() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn test_bulk_edit_categories() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
let api = &test_env.api;
|
||||
@ -566,7 +566,7 @@ pub async fn test_bulk_edit_categories() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn test_bulk_edit_links() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -627,7 +627,7 @@ pub async fn test_bulk_edit_links() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_patch_project_v3() {
|
||||
with_test_environment(Some(8), |test_env: TestEnvironment<ApiV3>| async move {
|
||||
let alpha_project_id = &test_env.dummy.project_alpha.project_id;
|
||||
@ -746,7 +746,7 @@ async fn permissions_patch_project_v3() {
|
||||
}
|
||||
|
||||
// TODO: Project scheduling has been temporarily disabled, so this test is disabled as well
|
||||
// #[actix_rt::test]
|
||||
// #[ntex::test]
|
||||
// async fn permissions_schedule() {
|
||||
// with_test_environment(None, |test_env : TestEnvironment<ApiV3>| async move {
|
||||
// let DummyProjectAlpha {
|
||||
@ -796,7 +796,7 @@ async fn permissions_patch_project_v3() {
|
||||
// }
|
||||
|
||||
// Not covered by PATCH /project
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_edit_details() {
|
||||
with_test_environment_all(Some(10), |test_env| async move {
|
||||
let DummyProjectAlpha {
|
||||
@ -907,7 +907,7 @@ async fn permissions_edit_details() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_upload_version() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -1010,7 +1010,7 @@ async fn permissions_upload_version() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_manage_invites() {
|
||||
// Add member, remove member, edit member
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -1112,7 +1112,7 @@ async fn permissions_manage_invites() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn permissions_delete_project() {
|
||||
// Add member, remove member, edit member
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -1136,7 +1136,7 @@ async fn permissions_delete_project() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn project_permissions_consistency_test() {
|
||||
with_test_environment_all(Some(10), |test_env| async move {
|
||||
// Test that the permissions are consistent with each other
|
||||
@ -1183,7 +1183,7 @@ async fn project_permissions_consistency_test() {
|
||||
}
|
||||
|
||||
// TODO: Re-add this if we want to match v3 Projects structure to v3 Search Result structure, otherwise, delete
|
||||
// #[actix_rt::test]
|
||||
// #[ntex::test]
|
||||
// async fn align_search_projects() {
|
||||
// // Test setup and dummy data
|
||||
// with_test_environment(Some(10), |test_env: TestEnvironment<ApiV3>| async move {
|
||||
@ -1219,7 +1219,7 @@ async fn project_permissions_consistency_test() {
|
||||
// .await
|
||||
// }
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn projects_various_visibility() {
|
||||
// For testing the filter_visible_projects and is_visible_project
|
||||
with_test_environment(
|
||||
|
||||
@ -7,7 +7,6 @@ use crate::common::dummy_data::{
|
||||
DummyImage, DummyProjectAlpha, DummyProjectBeta,
|
||||
};
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::test;
|
||||
use chrono::{Duration, Utc};
|
||||
use common::api_common::models::CommonItemType;
|
||||
use common::api_common::Api;
|
||||
@ -22,6 +21,7 @@ use labrinth::models::ids::base62_impl::parse_base62;
|
||||
use labrinth::models::pats::Scopes;
|
||||
use labrinth::models::projects::ProjectId;
|
||||
use labrinth::models::users::UserId;
|
||||
use ntex::web::test;
|
||||
use serde_json::json;
|
||||
|
||||
// For each scope, we (using test_scope):
|
||||
@ -33,7 +33,7 @@ use serde_json::json;
|
||||
mod common;
|
||||
|
||||
// Test for users, emails, and payout scopes (not user auth scope or notifs)
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn user_scopes() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -113,7 +113,7 @@ async fn user_scopes() {
|
||||
}
|
||||
|
||||
// Notifications
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn notifications_scopes() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
let api = &test_env.api;
|
||||
@ -237,7 +237,7 @@ pub async fn notifications_scopes() {
|
||||
}
|
||||
|
||||
// Project version creation scopes
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn project_version_create_scopes_v3() {
|
||||
with_test_environment(
|
||||
None,
|
||||
@ -284,7 +284,7 @@ pub async fn project_version_create_scopes_v3() {
|
||||
}
|
||||
|
||||
// Project management scopes
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn project_version_reads_scopes() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
let api = &test_env.api;
|
||||
@ -511,7 +511,7 @@ pub async fn project_version_reads_scopes() {
|
||||
}
|
||||
|
||||
// Project writing
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn project_write_scopes() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -689,7 +689,7 @@ pub async fn project_write_scopes() {
|
||||
}
|
||||
|
||||
// Version write
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn version_write_scopes() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -759,7 +759,7 @@ pub async fn version_write_scopes() {
|
||||
}
|
||||
|
||||
// Report scopes
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn report_scopes() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -841,7 +841,7 @@ pub async fn report_scopes() {
|
||||
}
|
||||
|
||||
// Thread scopes
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn thread_scopes() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
@ -889,7 +889,7 @@ pub async fn thread_scopes() {
|
||||
}
|
||||
|
||||
// Pat scopes
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn pat_scopes() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
let api = &test_env.api;
|
||||
@ -960,7 +960,7 @@ pub async fn pat_scopes() {
|
||||
}
|
||||
|
||||
// Collection scopes
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn collections_scopes() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment(
|
||||
@ -1065,7 +1065,7 @@ pub async fn collections_scopes() {
|
||||
}
|
||||
|
||||
// Organization scopes (and a couple PROJECT_WRITE scopes that are only allowed for orgs)
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
pub async fn organization_scopes() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment(
|
||||
|
||||
@ -19,7 +19,7 @@ mod common;
|
||||
// TODO: Revisit this wit h the new modify_json in the version maker
|
||||
// That change here should be able to simplify it vastly
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn search_projects() {
|
||||
// Test setup and dummy data
|
||||
with_test_environment(
|
||||
@ -125,7 +125,7 @@ async fn search_projects() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn index_swaps() {
|
||||
with_test_environment(
|
||||
Some(10),
|
||||
|
||||
@ -11,7 +11,7 @@ use crate::common::api_common::ApiTags;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_tags() {
|
||||
with_test_environment_all(None, |test_env| async move {
|
||||
let api = &test_env.api;
|
||||
@ -40,7 +40,7 @@ async fn get_tags() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ntex::test]
|
||||
async fn get_tags_v3() {
|
||||
with_test_environment(
|
||||
None,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user