Public Webhook Fixes (#493)
* Public discord webhook * Switch to jsonb for most queries + make gallery featured first * Run fmt + clippy + prepare
This commit is contained in:
parent
e809f77461
commit
4da1871567
2346
sqlx-data.json
2346
sqlx-data.json
File diff suppressed because it is too large
Load Diff
@ -117,7 +117,7 @@ pub struct TeamId(pub i64);
|
|||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct TeamMemberId(pub i64);
|
pub struct TeamMemberId(pub i64);
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct ProjectId(pub i64);
|
pub struct ProjectId(pub i64);
|
||||||
#[derive(Copy, Clone, Debug, Type)]
|
#[derive(Copy, Clone, Debug, Type)]
|
||||||
@ -130,11 +130,11 @@ pub struct StatusId(pub i32);
|
|||||||
#[derive(Copy, Clone, Debug, Type)]
|
#[derive(Copy, Clone, Debug, Type)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct SideTypeId(pub i32);
|
pub struct SideTypeId(pub i32);
|
||||||
#[derive(Copy, Clone, Debug, Type)]
|
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct DonationPlatformId(pub i32);
|
pub struct DonationPlatformId(pub i32);
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct VersionId(pub i64);
|
pub struct VersionId(pub i64);
|
||||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||||
@ -154,7 +154,7 @@ pub struct ReportId(pub i64);
|
|||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct ReportTypeId(pub i32);
|
pub struct ReportTypeId(pub i32);
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq)]
|
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct FileId(pub i64);
|
pub struct FileId(pub i64);
|
||||||
|
|
||||||
@ -162,10 +162,10 @@ pub struct FileId(pub i64);
|
|||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct StateId(pub i64);
|
pub struct StateId(pub i64);
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Type)]
|
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct NotificationId(pub i64);
|
pub struct NotificationId(pub i64);
|
||||||
#[derive(Copy, Clone, Debug, Type)]
|
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||||
#[sqlx(transparent)]
|
#[sqlx(transparent)]
|
||||||
pub struct NotificationActionId(pub i32);
|
pub struct NotificationActionId(pub i32);
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use super::ids::*;
|
use super::ids::*;
|
||||||
use crate::database::models::DatabaseError;
|
use crate::database::models::DatabaseError;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
pub struct NotificationBuilder {
|
pub struct NotificationBuilder {
|
||||||
pub notification_type: Option<String>,
|
pub notification_type: Option<String>,
|
||||||
@ -27,6 +28,7 @@ pub struct Notification {
|
|||||||
pub actions: Vec<NotificationAction>,
|
pub actions: Vec<NotificationAction>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
pub struct NotificationAction {
|
pub struct NotificationAction {
|
||||||
pub id: NotificationActionId,
|
pub id: NotificationActionId,
|
||||||
pub notification_id: NotificationId,
|
pub notification_id: NotificationId,
|
||||||
@ -124,7 +126,7 @@ impl Notification {
|
|||||||
let result = sqlx::query!(
|
let result = sqlx::query!(
|
||||||
"
|
"
|
||||||
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||||
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
|
JSONB_AGG(DISTINCT TO_JSONB(na)) filter (where na.id is not null) actions
|
||||||
FROM notifications n
|
FROM notifications n
|
||||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||||
WHERE n.id = $1
|
WHERE n.id = $1
|
||||||
@ -136,24 +138,6 @@ impl Notification {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if let Some(row) = result {
|
if let Some(row) = result {
|
||||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
|
||||||
|
|
||||||
row.actions.unwrap_or_default().into_iter().for_each(|x| {
|
|
||||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if action.len() >= 3 {
|
|
||||||
actions.push(NotificationAction {
|
|
||||||
id: NotificationActionId(
|
|
||||||
action[0].parse().unwrap_or(0),
|
|
||||||
),
|
|
||||||
notification_id: id,
|
|
||||||
title: action[1].to_string(),
|
|
||||||
action_route_method: action[3].to_string(),
|
|
||||||
action_route: action[2].to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(Some(Notification {
|
Ok(Some(Notification {
|
||||||
id,
|
id,
|
||||||
user_id: UserId(row.user_id),
|
user_id: UserId(row.user_id),
|
||||||
@ -163,7 +147,11 @@ impl Notification {
|
|||||||
link: row.link,
|
link: row.link,
|
||||||
read: row.read,
|
read: row.read,
|
||||||
created: row.created,
|
created: row.created,
|
||||||
actions,
|
actions: serde_json::from_value(
|
||||||
|
row.actions.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or_default(),
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
@ -184,7 +172,7 @@ impl Notification {
|
|||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"
|
"
|
||||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||||
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
|
JSONB_AGG(DISTINCT TO_JSONB(na)) filter (where na.id is not null) actions
|
||||||
FROM notifications n
|
FROM notifications n
|
||||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||||
WHERE n.id = ANY($1)
|
WHERE n.id = ANY($1)
|
||||||
@ -197,21 +185,6 @@ impl Notification {
|
|||||||
.try_filter_map(|e| async {
|
.try_filter_map(|e| async {
|
||||||
Ok(e.right().map(|row| {
|
Ok(e.right().map(|row| {
|
||||||
let id = NotificationId(row.id);
|
let id = NotificationId(row.id);
|
||||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
|
||||||
|
|
||||||
row.actions.unwrap_or_default().into_iter().for_each(|x| {
|
|
||||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if action.len() >= 3 {
|
|
||||||
actions.push(NotificationAction {
|
|
||||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
|
||||||
notification_id: id,
|
|
||||||
title: action[1].to_string(),
|
|
||||||
action_route_method: action[3].to_string(),
|
|
||||||
action_route: action[2].to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Notification {
|
Notification {
|
||||||
id,
|
id,
|
||||||
@ -222,7 +195,11 @@ impl Notification {
|
|||||||
link: row.link,
|
link: row.link,
|
||||||
read: row.read,
|
read: row.read,
|
||||||
created: row.created,
|
created: row.created,
|
||||||
actions,
|
actions: serde_json::from_value(
|
||||||
|
row.actions.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or_default(),
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
@ -242,7 +219,7 @@ impl Notification {
|
|||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"
|
"
|
||||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||||
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
|
JSONB_AGG(DISTINCT TO_JSONB(na)) filter (where na.id is not null) actions
|
||||||
FROM notifications n
|
FROM notifications n
|
||||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||||
WHERE n.user_id = $1
|
WHERE n.user_id = $1
|
||||||
@ -254,21 +231,6 @@ impl Notification {
|
|||||||
.try_filter_map(|e| async {
|
.try_filter_map(|e| async {
|
||||||
Ok(e.right().map(|row| {
|
Ok(e.right().map(|row| {
|
||||||
let id = NotificationId(row.id);
|
let id = NotificationId(row.id);
|
||||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
|
||||||
|
|
||||||
row.actions.unwrap_or_default().into_iter().for_each(|x| {
|
|
||||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if action.len() >= 3 {
|
|
||||||
actions.push(NotificationAction {
|
|
||||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
|
||||||
notification_id: id,
|
|
||||||
title: action[1].to_string(),
|
|
||||||
action_route_method: action[3].to_string(),
|
|
||||||
action_route: action[2].to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Notification {
|
Notification {
|
||||||
id,
|
id,
|
||||||
@ -279,7 +241,11 @@ impl Notification {
|
|||||||
link: row.link,
|
link: row.link,
|
||||||
read: row.read,
|
read: row.read,
|
||||||
created: row.created,
|
created: row.created,
|
||||||
actions,
|
actions: serde_json::from_value(
|
||||||
|
row.actions.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or_default(),
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,11 +1,10 @@
|
|||||||
use super::ids::*;
|
use super::ids::*;
|
||||||
use crate::database::models::convert_postgres_date;
|
|
||||||
use crate::models::projects::ProjectStatus;
|
use crate::models::projects::ProjectStatus;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
pub struct DonationUrl {
|
pub struct DonationUrl {
|
||||||
pub project_id: ProjectId,
|
|
||||||
pub platform_id: DonationPlatformId,
|
pub platform_id: DonationPlatformId,
|
||||||
pub platform_short: String,
|
pub platform_short: String,
|
||||||
pub platform_name: String,
|
pub platform_name: String,
|
||||||
@ -15,6 +14,7 @@ pub struct DonationUrl {
|
|||||||
impl DonationUrl {
|
impl DonationUrl {
|
||||||
pub async fn insert(
|
pub async fn insert(
|
||||||
&self,
|
&self,
|
||||||
|
project_id: ProjectId,
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||||
) -> Result<(), sqlx::error::Error> {
|
) -> Result<(), sqlx::error::Error> {
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
@ -26,7 +26,7 @@ impl DonationUrl {
|
|||||||
$1, $2, $3
|
$1, $2, $3
|
||||||
)
|
)
|
||||||
",
|
",
|
||||||
self.project_id as ProjectId,
|
project_id as ProjectId,
|
||||||
self.platform_id as DonationPlatformId,
|
self.platform_id as DonationPlatformId,
|
||||||
self.url,
|
self.url,
|
||||||
)
|
)
|
||||||
@ -37,9 +37,8 @@ impl DonationUrl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
pub struct GalleryItem {
|
pub struct GalleryItem {
|
||||||
pub project_id: ProjectId,
|
|
||||||
pub image_url: String,
|
pub image_url: String,
|
||||||
pub featured: bool,
|
pub featured: bool,
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
@ -50,6 +49,7 @@ pub struct GalleryItem {
|
|||||||
impl GalleryItem {
|
impl GalleryItem {
|
||||||
pub async fn insert(
|
pub async fn insert(
|
||||||
&self,
|
&self,
|
||||||
|
project_id: ProjectId,
|
||||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||||
) -> Result<(), sqlx::error::Error> {
|
) -> Result<(), sqlx::error::Error> {
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
@ -61,7 +61,7 @@ impl GalleryItem {
|
|||||||
$1, $2, $3, $4, $5
|
$1, $2, $3, $4, $5
|
||||||
)
|
)
|
||||||
",
|
",
|
||||||
self.project_id as ProjectId,
|
project_id as ProjectId,
|
||||||
self.image_url,
|
self.image_url,
|
||||||
self.featured,
|
self.featured,
|
||||||
self.title,
|
self.title,
|
||||||
@ -143,14 +143,12 @@ impl ProjectBuilder {
|
|||||||
version.insert(&mut *transaction).await?;
|
version.insert(&mut *transaction).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for mut donation in self.donation_urls {
|
for donation in self.donation_urls {
|
||||||
donation.project_id = self.project_id;
|
donation.insert(self.project_id, &mut *transaction).await?;
|
||||||
donation.insert(&mut *transaction).await?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for mut gallery in self.gallery_items {
|
for gallery in self.gallery_items {
|
||||||
gallery.project_id = self.project_id;
|
gallery.insert(self.project_id, &mut *transaction).await?;
|
||||||
gallery.insert(&mut *transaction).await?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for category in self.categories {
|
for category in self.categories {
|
||||||
@ -667,10 +665,11 @@ impl Project {
|
|||||||
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
||||||
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||||
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.flame_anvil_project flame_anvil_project, m.flame_anvil_user flame_anvil_user, m.webhook_sent webhook_sent,
|
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.flame_anvil_project flame_anvil_project, m.flame_anvil_user flame_anvil_user, m.webhook_sent webhook_sent,
|
||||||
ARRAY_AGG(DISTINCT c.category || ' |||| ' || mc.is_additional) filter (where c.category is not null) categories,
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||||
ARRAY_AGG(DISTINCT v.id || ' |||| ' || v.date_published) filter (where v.id is not null) versions,
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||||
ARRAY_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ')) filter (where mg.image_url is not null) gallery,
|
JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,
|
||||||
ARRAY_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url) filter (where md.joining_platform_id is not null) donations
|
JSONB_AGG(DISTINCT TO_JSONB(mg)) filter (where mg.image_url is not null) gallery,
|
||||||
|
JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations
|
||||||
FROM mods m
|
FROM mods m
|
||||||
INNER JOIN project_types pt ON pt.id = m.project_type
|
INNER JOIN project_types pt ON pt.id = m.project_type
|
||||||
INNER JOIN side_types cs ON m.client_side = cs.id
|
INNER JOIN side_types cs ON m.client_side = cs.id
|
||||||
@ -691,23 +690,6 @@ impl Project {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if let Some(m) = result {
|
if let Some(m) = result {
|
||||||
let categories_raw = m.categories.unwrap_or_default();
|
|
||||||
|
|
||||||
let mut categories = Vec::new();
|
|
||||||
let mut additional_categories = Vec::new();
|
|
||||||
|
|
||||||
for category in categories_raw {
|
|
||||||
let category: Vec<&str> = category.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if category.len() >= 2 {
|
|
||||||
if category[1].parse::<bool>().ok().unwrap_or_default() {
|
|
||||||
additional_categories.push(category[0].to_string());
|
|
||||||
} else {
|
|
||||||
categories.push(category[0].to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(QueryProject {
|
Ok(Some(QueryProject {
|
||||||
inner: Project {
|
inner: Project {
|
||||||
id: ProjectId(m.id),
|
id: ProjectId(m.id),
|
||||||
@ -743,86 +725,38 @@ impl Project {
|
|||||||
webhook_sent: m.webhook_sent,
|
webhook_sent: m.webhook_sent,
|
||||||
},
|
},
|
||||||
project_type: m.project_type_name,
|
project_type: m.project_type_name,
|
||||||
categories,
|
categories: m.categories.unwrap_or_default(),
|
||||||
additional_categories,
|
additional_categories: m
|
||||||
|
.additional_categories
|
||||||
|
.unwrap_or_default(),
|
||||||
versions: {
|
versions: {
|
||||||
let versions = m.versions.unwrap_or_default();
|
#[derive(Deserialize)]
|
||||||
|
struct Version {
|
||||||
|
pub id: VersionId,
|
||||||
|
pub date_published: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
let mut v = versions
|
let mut versions: Vec<Version> =
|
||||||
.into_iter()
|
serde_json::from_value(m.versions.unwrap_or_default())
|
||||||
.flat_map(|x| {
|
.ok()
|
||||||
let version: Vec<&str> =
|
.unwrap_or_default();
|
||||||
x.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if version.len() >= 2 {
|
versions.sort_by(|a, b| {
|
||||||
Some((
|
a.date_published.cmp(&b.date_published)
|
||||||
VersionId(
|
});
|
||||||
version[0].parse().unwrap_or_default(),
|
|
||||||
),
|
|
||||||
convert_postgres_date(version[1])
|
|
||||||
.timestamp(),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<(VersionId, i64)>>();
|
|
||||||
|
|
||||||
v.sort_by(|a, b| a.1.cmp(&b.1));
|
versions.into_iter().map(|x| x.id).collect()
|
||||||
|
|
||||||
v.into_iter().map(|x| x.0).collect()
|
|
||||||
},
|
},
|
||||||
donation_urls: m
|
gallery_items: serde_json::from_value(
|
||||||
.donations
|
m.gallery.unwrap_or_default(),
|
||||||
.unwrap_or_default()
|
)
|
||||||
.into_iter()
|
.ok()
|
||||||
.flat_map(|d| {
|
.unwrap_or_default(),
|
||||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
donation_urls: serde_json::from_value(
|
||||||
|
m.donations.unwrap_or_default(),
|
||||||
if strings.len() >= 3 {
|
)
|
||||||
Some(DonationUrl {
|
.ok()
|
||||||
project_id: id,
|
.unwrap_or_default(),
|
||||||
platform_id: DonationPlatformId(
|
|
||||||
strings[0].parse().unwrap_or(0),
|
|
||||||
),
|
|
||||||
platform_short: strings[1].to_string(),
|
|
||||||
platform_name: strings[2].to_string(),
|
|
||||||
url: strings[3].to_string(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
gallery_items: m
|
|
||||||
.gallery
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|d| {
|
|
||||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if strings.len() >= 5 {
|
|
||||||
Some(GalleryItem {
|
|
||||||
project_id: id,
|
|
||||||
image_url: strings[0].to_string(),
|
|
||||||
featured: strings[1].parse().unwrap_or(false),
|
|
||||||
title: if strings[3] == " " {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(strings[3].to_string())
|
|
||||||
},
|
|
||||||
description: if strings[4] == " " {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(strings[4].to_string())
|
|
||||||
},
|
|
||||||
created: convert_postgres_date(strings[2]),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
client_side: crate::models::projects::SideType::from_str(
|
client_side: crate::models::projects::SideType::from_str(
|
||||||
&m.client_side_type,
|
&m.client_side_type,
|
||||||
),
|
),
|
||||||
@ -854,10 +788,11 @@ impl Project {
|
|||||||
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
||||||
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||||
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.flame_anvil_project flame_anvil_project, m.flame_anvil_user flame_anvil_user, m.webhook_sent,
|
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.flame_anvil_project flame_anvil_project, m.flame_anvil_user flame_anvil_user, m.webhook_sent,
|
||||||
ARRAY_AGG(DISTINCT c.category || ' |||| ' || mc.is_additional) filter (where c.category is not null) categories,
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||||
ARRAY_AGG(DISTINCT v.id || ' |||| ' || v.date_published) filter (where v.id is not null) versions,
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||||
ARRAY_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ')) filter (where mg.image_url is not null) gallery,
|
JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,
|
||||||
ARRAY_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url) filter (where md.joining_platform_id is not null) donations
|
JSONB_AGG(DISTINCT TO_JSONB(mg)) filter (where mg.image_url is not null) gallery,
|
||||||
|
JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations
|
||||||
FROM mods m
|
FROM mods m
|
||||||
INNER JOIN project_types pt ON pt.id = m.project_type
|
INNER JOIN project_types pt ON pt.id = m.project_type
|
||||||
INNER JOIN side_types cs ON m.client_side = cs.id
|
INNER JOIN side_types cs ON m.client_side = cs.id
|
||||||
@ -879,24 +814,6 @@ impl Project {
|
|||||||
Ok(e.right().map(|m| {
|
Ok(e.right().map(|m| {
|
||||||
let id = m.id;
|
let id = m.id;
|
||||||
|
|
||||||
let categories_raw = m.categories.unwrap_or_default();
|
|
||||||
|
|
||||||
let mut categories = Vec::new();
|
|
||||||
let mut additional_categories = Vec::new();
|
|
||||||
|
|
||||||
for category in categories_raw {
|
|
||||||
let category: Vec<&str> =
|
|
||||||
category.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if category.len() >= 2 {
|
|
||||||
if category[1].parse::<bool>().ok().unwrap_or_default() {
|
|
||||||
additional_categories.push(category[0].to_string());
|
|
||||||
} else {
|
|
||||||
categories.push(category[0].to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
QueryProject {
|
QueryProject {
|
||||||
inner: Project {
|
inner: Project {
|
||||||
id: ProjectId(id),
|
id: ProjectId(id),
|
||||||
@ -934,74 +851,31 @@ impl Project {
|
|||||||
webhook_sent: m.webhook_sent,
|
webhook_sent: m.webhook_sent,
|
||||||
},
|
},
|
||||||
project_type: m.project_type_name,
|
project_type: m.project_type_name,
|
||||||
categories,
|
categories: m.categories.unwrap_or_default(),
|
||||||
additional_categories,
|
additional_categories: m.additional_categories.unwrap_or_default(),
|
||||||
versions: {
|
versions: {
|
||||||
let versions = m.versions.unwrap_or_default();
|
#[derive(Deserialize)]
|
||||||
|
struct Version {
|
||||||
|
pub id: VersionId,
|
||||||
|
pub date_published: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
let mut v = versions
|
let mut versions: Vec<Version> = serde_json::from_value(
|
||||||
.into_iter()
|
m.versions.unwrap_or_default(),
|
||||||
.flat_map(|x| {
|
)
|
||||||
let version: Vec<&str> =
|
.ok()
|
||||||
x.split(" |||| ").collect();
|
.unwrap_or_default();
|
||||||
|
|
||||||
if version.len() >= 2 {
|
versions.sort_by(|a, b| a.date_published.cmp(&b.date_published));
|
||||||
Some((
|
|
||||||
VersionId(version[0].parse().unwrap_or_default()),
|
|
||||||
convert_postgres_date(version[1])
|
|
||||||
.timestamp(),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<(VersionId, i64)>>();
|
|
||||||
|
|
||||||
v.sort_by(|a, b| a.1.cmp(&b.1));
|
versions.into_iter().map(|x| x.id).collect()
|
||||||
|
|
||||||
v.into_iter().map(|x| x.0).collect()
|
|
||||||
},
|
},
|
||||||
gallery_items: m
|
gallery_items: serde_json::from_value(
|
||||||
.gallery
|
m.gallery.unwrap_or_default(),
|
||||||
.unwrap_or_default()
|
).ok().unwrap_or_default(),
|
||||||
.into_iter()
|
donation_urls: serde_json::from_value(
|
||||||
.flat_map(|d| {
|
m.donations.unwrap_or_default(),
|
||||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
).ok().unwrap_or_default(),
|
||||||
|
|
||||||
if strings.len() >= 5 {
|
|
||||||
Some(GalleryItem {
|
|
||||||
project_id: ProjectId(id),
|
|
||||||
image_url: strings[0].to_string(),
|
|
||||||
featured: strings[1].parse().unwrap_or(false),
|
|
||||||
title: if strings[3] == " " { None } else { Some(strings[3].to_string()) },
|
|
||||||
description: if strings[4] == " " { None } else { Some(strings[4].to_string()) },
|
|
||||||
created: convert_postgres_date(strings[2])
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
donation_urls: m
|
|
||||||
.donations
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|d| {
|
|
||||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if strings.len() >= 3 {
|
|
||||||
Some(DonationUrl {
|
|
||||||
project_id: ProjectId(id),
|
|
||||||
platform_id: DonationPlatformId(strings[0].parse().unwrap_or(0)),
|
|
||||||
platform_short: strings[1].to_string(),
|
|
||||||
platform_name: strings[2].to_string(),
|
|
||||||
url: strings[3].to_string(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
client_side: crate::models::projects::SideType::from_str(&m.client_side_type),
|
client_side: crate::models::projects::SideType::from_str(&m.client_side_type),
|
||||||
server_side: crate::models::projects::SideType::from_str(&m.server_side_type),
|
server_side: crate::models::projects::SideType::from_str(&m.server_side_type),
|
||||||
}}))
|
}}))
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
use super::ids::*;
|
use super::ids::*;
|
||||||
use super::DatabaseError;
|
use super::DatabaseError;
|
||||||
use crate::database::models::convert_postgres_date;
|
|
||||||
use crate::models::projects::VersionStatus;
|
use crate::models::projects::VersionStatus;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::Deserialize;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
@ -601,10 +601,11 @@ impl Version {
|
|||||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
||||||
ARRAY_AGG(DISTINCT gv.version || ' |||| ' || gv.created) filter (where gv.version is not null) game_versions, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
|
||||||
ARRAY_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename) filter (where f.id is not null) files,
|
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||||
ARRAY_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id) filter (where h.hash is not null) hashes,
|
JSONB_AGG(DISTINCT TO_JSONB(f)) filter (where f.id is not null) files,
|
||||||
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' ')) filter (where d.dependency_type is not null) dependencies
|
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
||||||
|
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
|
||||||
FROM versions v
|
FROM versions v
|
||||||
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||||
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||||
@ -641,59 +642,57 @@ impl Version {
|
|||||||
.map(|x| VersionStatus::from_str(&x)),
|
.map(|x| VersionStatus::from_str(&x)),
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v
|
#[derive(Deserialize)]
|
||||||
.hashes
|
struct Hash {
|
||||||
.unwrap_or_default()
|
pub file_id: FileId,
|
||||||
|
pub algorithm: String,
|
||||||
|
pub hash: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct File {
|
||||||
|
pub id: FileId,
|
||||||
|
pub url: String,
|
||||||
|
pub filename: String,
|
||||||
|
pub primary: bool,
|
||||||
|
pub size: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
let hashes: Vec<Hash> =
|
||||||
|
serde_json::from_value(v.hashes.unwrap_or_default())
|
||||||
|
.ok()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let files: Vec<File> =
|
||||||
|
serde_json::from_value(v.files.unwrap_or_default())
|
||||||
|
.ok()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let mut files = files
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|f| {
|
.map(|x| {
|
||||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
let mut file_hashes = HashMap::new();
|
||||||
|
|
||||||
if hash.len() >= 3 {
|
for hash in &hashes {
|
||||||
Some((
|
if hash.file_id == x.id {
|
||||||
FileId(hash[2].parse().unwrap_or(0)),
|
file_hashes.insert(
|
||||||
hash[0].to_string(),
|
hash.algorithm.clone(),
|
||||||
hash[1].to_string().into_bytes(),
|
hash.hash.clone(),
|
||||||
))
|
);
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut files: Vec<QueryFile> = v
|
|
||||||
.files
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|f| {
|
|
||||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if file.len() >= 5 {
|
|
||||||
let file_id =
|
|
||||||
FileId(file[0].parse().unwrap_or(0));
|
|
||||||
let mut file_hashes = HashMap::new();
|
|
||||||
|
|
||||||
for hash in &hashes {
|
|
||||||
if (hash.0).0 == file_id.0 {
|
|
||||||
file_hashes.insert(
|
|
||||||
hash.1.clone(),
|
|
||||||
hash.2.clone(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Some(QueryFile {
|
QueryFile {
|
||||||
id: file_id,
|
id: x.id,
|
||||||
url: file[3].to_string(),
|
url: x.url,
|
||||||
filename: file[4].to_string(),
|
filename: x.filename,
|
||||||
hashes: file_hashes,
|
hashes: file_hashes,
|
||||||
primary: file[1].parse().unwrap_or(false),
|
primary: x.primary,
|
||||||
size: file[2].parse().unwrap_or(0),
|
size: x.size,
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
files.sort_by(|a, b| {
|
files.sort_by(|a, b| {
|
||||||
if a.primary {
|
if a.primary {
|
||||||
Ordering::Less
|
Ordering::Less
|
||||||
@ -703,69 +702,33 @@ impl Version {
|
|||||||
a.filename.cmp(&b.filename)
|
a.filename.cmp(&b.filename)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
files
|
files
|
||||||
},
|
},
|
||||||
game_versions: {
|
game_versions: {
|
||||||
let game_versions = v.game_versions.unwrap_or_default();
|
#[derive(Deserialize)]
|
||||||
|
struct GameVersion {
|
||||||
|
pub version: String,
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
let mut gv = game_versions
|
let mut game_versions: Vec<GameVersion> =
|
||||||
.into_iter()
|
serde_json::from_value(
|
||||||
.flat_map(|x| {
|
v.game_versions.unwrap_or_default(),
|
||||||
let version: Vec<&str> =
|
)
|
||||||
x.split(" |||| ").collect();
|
.ok()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
if version.len() >= 2 {
|
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
|
||||||
Some((
|
|
||||||
version[0].to_string(),
|
|
||||||
convert_postgres_date(version[1])
|
|
||||||
.timestamp(),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<(String, i64)>>();
|
|
||||||
|
|
||||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
game_versions.into_iter().map(|x| x.version).collect()
|
||||||
|
|
||||||
gv.into_iter().map(|x| x.0).collect()
|
|
||||||
},
|
},
|
||||||
loaders: v.loaders.unwrap_or_default(),
|
loaders: v.loaders.unwrap_or_default(),
|
||||||
dependencies: v
|
dependencies: serde_json::from_value(
|
||||||
.dependencies
|
v.dependencies.unwrap_or_default(),
|
||||||
.unwrap_or_default()
|
)
|
||||||
.into_iter()
|
.ok()
|
||||||
.flat_map(|f| {
|
.unwrap_or_default(),
|
||||||
let dependency: Vec<&str> = f.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if dependency.len() >= 4 {
|
|
||||||
Some(QueryDependency {
|
|
||||||
project_id: match dependency[1] {
|
|
||||||
"0" => None,
|
|
||||||
_ => match dependency[1].parse() {
|
|
||||||
Ok(x) => Some(ProjectId(x)),
|
|
||||||
Err(_) => None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
version_id: match dependency[0] {
|
|
||||||
"0" => None,
|
|
||||||
_ => match dependency[0].parse() {
|
|
||||||
Ok(x) => Some(VersionId(x)),
|
|
||||||
Err(_) => None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
file_name: if dependency[3] == " " {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(dependency[3].to_string())
|
|
||||||
},
|
|
||||||
dependency_type: dependency[2].to_string(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
@ -788,10 +751,11 @@ impl Version {
|
|||||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
||||||
ARRAY_AGG(DISTINCT gv.version || ' |||| ' || gv.created) filter (where gv.version is not null) game_versions, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
|
||||||
ARRAY_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename) filter (where f.id is not null) files,
|
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||||
ARRAY_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id) filter (where h.hash is not null) hashes,
|
JSONB_AGG(DISTINCT TO_JSONB(f)) filter (where f.id is not null) files,
|
||||||
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' ')) filter (where d.dependency_type is not null) dependencies
|
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
||||||
|
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
|
||||||
FROM versions v
|
FROM versions v
|
||||||
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||||
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||||
@ -827,50 +791,56 @@ impl Version {
|
|||||||
.map(|x| VersionStatus::from_str(&x)),
|
.map(|x| VersionStatus::from_str(&x)),
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default()
|
#[derive(Deserialize)]
|
||||||
.into_iter()
|
struct Hash {
|
||||||
.flat_map(|f| {
|
pub file_id: FileId,
|
||||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
pub algorithm: String,
|
||||||
|
pub hash: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
if hash.len() >= 3 {
|
#[derive(Deserialize)]
|
||||||
Some((
|
struct File {
|
||||||
FileId(hash[2].parse().unwrap_or(0)),
|
pub id: FileId,
|
||||||
hash[0].to_string(),
|
pub url: String,
|
||||||
hash[1].to_string().into_bytes(),
|
pub filename: String,
|
||||||
))
|
pub primary: bool,
|
||||||
} else {
|
pub size: u32,
|
||||||
None
|
}
|
||||||
}
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
let mut files: Vec<QueryFile> = v.files.unwrap_or_default()
|
let hashes: Vec<Hash> = serde_json::from_value(
|
||||||
.into_iter()
|
v.hashes.unwrap_or_default(),
|
||||||
.flat_map(|f| {
|
)
|
||||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
.ok()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
if file.len() >= 5 {
|
let files: Vec<File> = serde_json::from_value(
|
||||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
v.files.unwrap_or_default(),
|
||||||
let mut file_hashes = HashMap::new();
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
for hash in &hashes {
|
let mut files = files.into_iter().map(|x| {
|
||||||
if (hash.0).0 == file_id.0 {
|
let mut file_hashes = HashMap::new();
|
||||||
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
|
||||||
}
|
for hash in &hashes {
|
||||||
|
if hash.file_id == x.id {
|
||||||
|
file_hashes.insert(
|
||||||
|
hash.algorithm.clone(),
|
||||||
|
hash.hash.clone(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(QueryFile {
|
|
||||||
id: file_id,
|
|
||||||
url: file[3].to_string(),
|
|
||||||
filename: file[4].to_string(),
|
|
||||||
hashes: file_hashes,
|
|
||||||
primary: file[1].parse().unwrap_or(false),
|
|
||||||
size: file[2].parse().unwrap_or(0),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
})
|
|
||||||
.collect();
|
QueryFile {
|
||||||
|
id: x.id,
|
||||||
|
url: x.url,
|
||||||
|
filename: x.filename,
|
||||||
|
hashes: file_hashes,
|
||||||
|
primary: x.primary,
|
||||||
|
size: x.size,
|
||||||
|
}
|
||||||
|
}).collect::<Vec<_>>();
|
||||||
|
|
||||||
files.sort_by(|a, b| {
|
files.sort_by(|a, b| {
|
||||||
if a.primary {
|
if a.primary {
|
||||||
Ordering::Less
|
Ordering::Less
|
||||||
@ -880,68 +850,32 @@ impl Version {
|
|||||||
a.filename.cmp(&b.filename)
|
a.filename.cmp(&b.filename)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
files
|
files
|
||||||
},
|
},
|
||||||
game_versions: {
|
game_versions: {
|
||||||
let game_versions = v
|
#[derive(Deserialize)]
|
||||||
.game_versions
|
struct GameVersion {
|
||||||
|
pub version: String,
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut game_versions: Vec<GameVersion> = serde_json::from_value(
|
||||||
|
v.game_versions.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.ok()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let mut gv = game_versions
|
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
|
||||||
.into_iter()
|
|
||||||
|
|
||||||
.flat_map(|x| {
|
game_versions.into_iter().map(|x| x.version).collect()
|
||||||
let version: Vec<&str> = x.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if version.len() >= 2 {
|
|
||||||
Some((version[0].to_string(), convert_postgres_date(version[1]).timestamp()))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<(String, i64)>>();
|
|
||||||
|
|
||||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
|
||||||
|
|
||||||
gv.into_iter()
|
|
||||||
.map(|x| x.0)
|
|
||||||
.collect()
|
|
||||||
},
|
},
|
||||||
loaders: v.loaders.unwrap_or_default(),
|
loaders: v.loaders.unwrap_or_default(),
|
||||||
dependencies: v.dependencies
|
dependencies: serde_json::from_value(
|
||||||
.unwrap_or_default()
|
v.dependencies.unwrap_or_default(),
|
||||||
.into_iter()
|
)
|
||||||
|
.ok()
|
||||||
.flat_map(|f| {
|
.unwrap_or_default(),
|
||||||
let dependency: Vec<&str> = f.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if dependency.len() >= 4 {
|
|
||||||
Some(QueryDependency {
|
|
||||||
project_id: match dependency[1] {
|
|
||||||
"0" => None,
|
|
||||||
_ => match dependency[1].parse() {
|
|
||||||
Ok(x) => Some(ProjectId(x)),
|
|
||||||
Err(_) => None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
version_id: match dependency[0] {
|
|
||||||
"0" => None,
|
|
||||||
_ => match dependency[0].parse() {
|
|
||||||
Ok(x) => Some(VersionId(x)),
|
|
||||||
Err(_) => None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
file_name: if dependency[3] == " " {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(dependency[3].to_string())
|
|
||||||
},
|
|
||||||
dependency_type: dependency[2].to_string(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}).collect(),
|
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
@ -960,7 +894,7 @@ pub struct QueryVersion {
|
|||||||
pub dependencies: Vec<QueryDependency>,
|
pub dependencies: Vec<QueryDependency>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone, Deserialize)]
|
||||||
pub struct QueryDependency {
|
pub struct QueryDependency {
|
||||||
pub project_id: Option<ProjectId>,
|
pub project_id: Option<ProjectId>,
|
||||||
pub version_id: Option<VersionId>,
|
pub version_id: Option<VersionId>,
|
||||||
|
|||||||
@ -277,7 +277,7 @@ pub async fn project_create(
|
|||||||
&***file_host,
|
&***file_host,
|
||||||
&flame_anvil_queue,
|
&flame_anvil_queue,
|
||||||
&mut uploaded_files,
|
&mut uploaded_files,
|
||||||
&*client,
|
&client,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
@ -723,7 +723,6 @@ pub async fn project_create_inner(
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
donation_urls.push(models::project_item::DonationUrl {
|
donation_urls.push(models::project_item::DonationUrl {
|
||||||
project_id: project_id.into(),
|
|
||||||
platform_id,
|
platform_id,
|
||||||
platform_short: "".to_string(),
|
platform_short: "".to_string(),
|
||||||
platform_name: "".to_string(),
|
platform_name: "".to_string(),
|
||||||
@ -759,7 +758,6 @@ pub async fn project_create_inner(
|
|||||||
gallery_items: gallery_urls
|
gallery_items: gallery_urls
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| models::project_item::GalleryItem {
|
.map(|x| models::project_item::GalleryItem {
|
||||||
project_id: project_id.into(),
|
|
||||||
image_url: x.url.clone(),
|
image_url: x.url.clone(),
|
||||||
featured: x.featured,
|
featured: x.featured,
|
||||||
title: x.title.clone(),
|
title: x.title.clone(),
|
||||||
|
|||||||
@ -500,7 +500,7 @@ pub async fn project_edit(
|
|||||||
{
|
{
|
||||||
crate::util::webhook::send_discord_webhook(
|
crate::util::webhook::send_discord_webhook(
|
||||||
project_item.inner.id.into(),
|
project_item.inner.id.into(),
|
||||||
&*pool,
|
&pool,
|
||||||
webhook_url,
|
webhook_url,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
@ -529,7 +529,7 @@ pub async fn project_edit(
|
|||||||
{
|
{
|
||||||
crate::util::webhook::send_discord_webhook(
|
crate::util::webhook::send_discord_webhook(
|
||||||
project_item.inner.id.into(),
|
project_item.inner.id.into(),
|
||||||
&*pool,
|
&pool,
|
||||||
webhook_url,
|
webhook_url,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
@ -1483,14 +1483,13 @@ pub async fn add_gallery_item(
|
|||||||
}
|
}
|
||||||
|
|
||||||
database::models::project_item::GalleryItem {
|
database::models::project_item::GalleryItem {
|
||||||
project_id: project_item.id,
|
|
||||||
image_url: format!("{}/{}", cdn_url, url),
|
image_url: format!("{}/{}", cdn_url, url),
|
||||||
featured: item.featured,
|
featured: item.featured,
|
||||||
title: item.title,
|
title: item.title,
|
||||||
description: item.description,
|
description: item.description,
|
||||||
created: Utc::now(),
|
created: Utc::now(),
|
||||||
}
|
}
|
||||||
.insert(&mut transaction)
|
.insert(project_item.id, &mut transaction)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
transaction.commit().await?;
|
transaction.commit().await?;
|
||||||
|
|||||||
@ -133,7 +133,7 @@ pub async fn mod_create(
|
|||||||
&***file_host,
|
&***file_host,
|
||||||
&flame_anvil_queue,
|
&flame_anvil_queue,
|
||||||
&mut uploaded_files,
|
&mut uploaded_files,
|
||||||
&*client,
|
&client,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
|||||||
@ -18,10 +18,12 @@ pub async fn index_local(
|
|||||||
m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,
|
m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,
|
||||||
m.team_id team_id, m.license license, m.slug slug, m.status status_name,
|
m.team_id team_id, m.license license, m.slug slug, m.status status_name,
|
||||||
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, u.username username,
|
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, u.username username,
|
||||||
ARRAY_AGG(DISTINCT c.category || ' |||| ' || mc.is_additional) filter (where c.category is not null) categories,
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||||
|
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||||
ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,
|
ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,
|
||||||
ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null) versions,
|
ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null) versions,
|
||||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null) gallery
|
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,
|
||||||
|
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery
|
||||||
FROM mods m
|
FROM mods m
|
||||||
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
|
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
|
||||||
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
||||||
@ -46,27 +48,13 @@ pub async fn index_local(
|
|||||||
.fetch_many(&pool)
|
.fetch_many(&pool)
|
||||||
.try_filter_map(|e| async {
|
.try_filter_map(|e| async {
|
||||||
Ok(e.right().map(|m| {
|
Ok(e.right().map(|m| {
|
||||||
let categories_raw = m.categories.unwrap_or_default();
|
let mut additional_categories = m.additional_categories.unwrap_or_default();
|
||||||
|
let mut categories = m.categories.unwrap_or_default();
|
||||||
let mut additional_categories = Vec::new();
|
|
||||||
let mut categories = Vec::new();
|
|
||||||
|
|
||||||
for category in categories_raw {
|
|
||||||
let category: Vec<&str> = category.split(" |||| ").collect();
|
|
||||||
|
|
||||||
if category.len() >= 2 {
|
|
||||||
if category[1].parse::<bool>().ok().unwrap_or_default() {
|
|
||||||
additional_categories.push(category[0].to_string());
|
|
||||||
} else {
|
|
||||||
categories.push(category[0].to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
categories.append(&mut m.loaders.unwrap_or_default());
|
categories.append(&mut m.loaders.unwrap_or_default());
|
||||||
|
|
||||||
let display_categories = categories.clone();
|
let display_categories = categories.clone();
|
||||||
categories.append(&mut additional_categories.clone());
|
categories.append(&mut additional_categories);
|
||||||
|
|
||||||
let versions = m.versions.unwrap_or_default();
|
let versions = m.versions.unwrap_or_default();
|
||||||
|
|
||||||
@ -82,8 +70,11 @@ pub async fn index_local(
|
|||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut gallery = m.featured_gallery.unwrap_or_default();
|
||||||
|
gallery.append(&mut m.gallery.unwrap_or_default());
|
||||||
|
|
||||||
UploadSearchProject {
|
UploadSearchProject {
|
||||||
project_id: format!("{}", project_id),
|
project_id: project_id.to_string(),
|
||||||
title: m.title,
|
title: m.title,
|
||||||
description: m.description,
|
description: m.description,
|
||||||
categories,
|
categories,
|
||||||
@ -102,7 +93,7 @@ pub async fn index_local(
|
|||||||
server_side: m.server_side_type,
|
server_side: m.server_side_type,
|
||||||
slug: m.slug,
|
slug: m.slug,
|
||||||
project_type: m.project_type_name,
|
project_type: m.project_type_name,
|
||||||
gallery: m.gallery.unwrap_or_default(),
|
gallery,
|
||||||
display_categories,
|
display_categories,
|
||||||
open_source,
|
open_source,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -72,7 +72,8 @@ pub async fn send_discord_webhook(
|
|||||||
ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,
|
ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,
|
||||||
JSONB_AGG(DISTINCT TO_JSONB(gv)) filter (where gv.version is not null) versions,
|
JSONB_AGG(DISTINCT TO_JSONB(gv)) filter (where gv.version is not null) versions,
|
||||||
JSONB_AGG(DISTINCT TO_JSONB(agv)) filter (where gv.version is not null) all_game_versions,
|
JSONB_AGG(DISTINCT TO_JSONB(agv)) filter (where gv.version is not null) all_game_versions,
|
||||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null) gallery
|
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,
|
||||||
|
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery
|
||||||
FROM mods m
|
FROM mods m
|
||||||
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE
|
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE
|
||||||
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
||||||
@ -95,7 +96,7 @@ pub async fn send_discord_webhook(
|
|||||||
&*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>(),
|
&*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>(),
|
||||||
crate::models::teams::OWNER_ROLE,
|
crate::models::teams::OWNER_ROLE,
|
||||||
)
|
)
|
||||||
.fetch_optional(&*pool)
|
.fetch_optional(pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if let Some(project) = row {
|
if let Some(project) = row {
|
||||||
@ -106,19 +107,13 @@ pub async fn send_discord_webhook(
|
|||||||
|
|
||||||
let versions: Vec<GameVersion> =
|
let versions: Vec<GameVersion> =
|
||||||
serde_json::from_value(project.versions.unwrap_or_default())
|
serde_json::from_value(project.versions.unwrap_or_default())
|
||||||
.map_err(|err| {
|
.ok()
|
||||||
ApiError::DiscordError(
|
.unwrap_or_default();
|
||||||
"Error while sending projects webhook".to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
let all_game_versions: Vec<GameVersion> = serde_json::from_value(
|
let all_game_versions: Vec<GameVersion> = serde_json::from_value(
|
||||||
project.all_game_versions.unwrap_or_default(),
|
project.all_game_versions.unwrap_or_default(),
|
||||||
)
|
)
|
||||||
.map_err(|err| {
|
.ok()
|
||||||
ApiError::DiscordError(
|
.unwrap_or_default();
|
||||||
"Error while sending projects webhook".to_string(),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !categories.is_empty() {
|
if !categories.is_empty() {
|
||||||
fields.push(DiscordEmbedField {
|
fields.push(DiscordEmbedField {
|
||||||
@ -170,7 +165,7 @@ pub async fn send_discord_webhook(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !versions.is_empty() {
|
if !versions.is_empty() {
|
||||||
let mut formatted_game_versions: String =
|
let formatted_game_versions: String =
|
||||||
get_gv_range(versions, all_game_versions);
|
get_gv_range(versions, all_game_versions);
|
||||||
|
|
||||||
fields.push(DiscordEmbedField {
|
fields.push(DiscordEmbedField {
|
||||||
@ -204,11 +199,14 @@ pub async fn send_discord_webhook(
|
|||||||
thumbnail: DiscordEmbedThumbnail {
|
thumbnail: DiscordEmbedThumbnail {
|
||||||
url: project.icon_url,
|
url: project.icon_url,
|
||||||
},
|
},
|
||||||
image: project.gallery.unwrap_or_default().first().map(|x| {
|
image: if let Some(first) =
|
||||||
DiscordEmbedImage {
|
project.featured_gallery.unwrap_or_default().first()
|
||||||
url: Some(x.to_string()),
|
{
|
||||||
}
|
Some(first.clone())
|
||||||
}),
|
} else {
|
||||||
|
project.gallery.unwrap_or_default().first().cloned()
|
||||||
|
}
|
||||||
|
.map(|x| DiscordEmbedImage { url: Some(x) }),
|
||||||
footer: Some(DiscordEmbedFooter {
|
footer: Some(DiscordEmbedFooter {
|
||||||
text: "Modrinth".to_string(),
|
text: "Modrinth".to_string(),
|
||||||
icon_url: Some(
|
icon_url: Some(
|
||||||
@ -231,7 +229,7 @@ pub async fn send_discord_webhook(
|
|||||||
})
|
})
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|_| {
|
||||||
ApiError::DiscordError(
|
ApiError::DiscordError(
|
||||||
"Error while sending projects webhook".to_string(),
|
"Error while sending projects webhook".to_string(),
|
||||||
)
|
)
|
||||||
@ -261,8 +259,8 @@ fn get_gv_range(
|
|||||||
|
|
||||||
const MAX_VALUE: usize = 1000000;
|
const MAX_VALUE: usize = 1000000;
|
||||||
|
|
||||||
for i in 0..game_versions.len() {
|
for (i, current_version) in game_versions.iter().enumerate() {
|
||||||
let current_version = &*game_versions[i].version;
|
let current_version = ¤t_version.version;
|
||||||
|
|
||||||
let index = all_game_versions
|
let index = all_game_versions
|
||||||
.iter()
|
.iter()
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user