Public Webhook Fixes (#493)
* Public discord webhook * Switch to jsonb for most queries + make gallery featured first * Run fmt + clippy + prepare
This commit is contained in:
parent
e809f77461
commit
4da1871567
2346
sqlx-data.json
2346
sqlx-data.json
File diff suppressed because it is too large
Load Diff
@ -117,7 +117,7 @@ pub struct TeamId(pub i64);
|
||||
#[sqlx(transparent)]
|
||||
pub struct TeamMemberId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ProjectId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
@ -130,11 +130,11 @@ pub struct StatusId(pub i32);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct SideTypeId(pub i32);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct DonationPlatformId(pub i32);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct VersionId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
@ -154,7 +154,7 @@ pub struct ReportId(pub i64);
|
||||
#[sqlx(transparent)]
|
||||
pub struct ReportTypeId(pub i32);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct FileId(pub i64);
|
||||
|
||||
@ -162,10 +162,10 @@ pub struct FileId(pub i64);
|
||||
#[sqlx(transparent)]
|
||||
pub struct StateId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct NotificationId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct NotificationActionId(pub i32);
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models::DatabaseError;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
|
||||
pub struct NotificationBuilder {
|
||||
pub notification_type: Option<String>,
|
||||
@ -27,6 +28,7 @@ pub struct Notification {
|
||||
pub actions: Vec<NotificationAction>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct NotificationAction {
|
||||
pub id: NotificationActionId,
|
||||
pub notification_id: NotificationId,
|
||||
@ -124,7 +126,7 @@ impl Notification {
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
|
||||
JSONB_AGG(DISTINCT TO_JSONB(na)) filter (where na.id is not null) actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.id = $1
|
||||
@ -136,24 +138,6 @@ impl Notification {
|
||||
.await?;
|
||||
|
||||
if let Some(row) = result {
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
row.actions.unwrap_or_default().into_iter().for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(
|
||||
action[0].parse().unwrap_or(0),
|
||||
),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Some(Notification {
|
||||
id,
|
||||
user_id: UserId(row.user_id),
|
||||
@ -163,7 +147,11 @@ impl Notification {
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions,
|
||||
actions: serde_json::from_value(
|
||||
row.actions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
@ -184,7 +172,7 @@ impl Notification {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
|
||||
JSONB_AGG(DISTINCT TO_JSONB(na)) filter (where na.id is not null) actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.id = ANY($1)
|
||||
@ -197,21 +185,6 @@ impl Notification {
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|row| {
|
||||
let id = NotificationId(row.id);
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
row.actions.unwrap_or_default().into_iter().for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Notification {
|
||||
id,
|
||||
@ -222,7 +195,11 @@ impl Notification {
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions,
|
||||
actions: serde_json::from_value(
|
||||
row.actions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}))
|
||||
})
|
||||
@ -242,7 +219,7 @@ impl Notification {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
|
||||
JSONB_AGG(DISTINCT TO_JSONB(na)) filter (where na.id is not null) actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.user_id = $1
|
||||
@ -254,21 +231,6 @@ impl Notification {
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|row| {
|
||||
let id = NotificationId(row.id);
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
row.actions.unwrap_or_default().into_iter().for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Notification {
|
||||
id,
|
||||
@ -279,7 +241,11 @@ impl Notification {
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions,
|
||||
actions: serde_json::from_value(
|
||||
row.actions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}))
|
||||
})
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models::convert_postgres_date;
|
||||
use crate::models::projects::ProjectStatus;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct DonationUrl {
|
||||
pub project_id: ProjectId,
|
||||
pub platform_id: DonationPlatformId,
|
||||
pub platform_short: String,
|
||||
pub platform_name: String,
|
||||
@ -15,6 +14,7 @@ pub struct DonationUrl {
|
||||
impl DonationUrl {
|
||||
pub async fn insert(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<(), sqlx::error::Error> {
|
||||
sqlx::query!(
|
||||
@ -26,7 +26,7 @@ impl DonationUrl {
|
||||
$1, $2, $3
|
||||
)
|
||||
",
|
||||
self.project_id as ProjectId,
|
||||
project_id as ProjectId,
|
||||
self.platform_id as DonationPlatformId,
|
||||
self.url,
|
||||
)
|
||||
@ -37,9 +37,8 @@ impl DonationUrl {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct GalleryItem {
|
||||
pub project_id: ProjectId,
|
||||
pub image_url: String,
|
||||
pub featured: bool,
|
||||
pub title: Option<String>,
|
||||
@ -50,6 +49,7 @@ pub struct GalleryItem {
|
||||
impl GalleryItem {
|
||||
pub async fn insert(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<(), sqlx::error::Error> {
|
||||
sqlx::query!(
|
||||
@ -61,7 +61,7 @@ impl GalleryItem {
|
||||
$1, $2, $3, $4, $5
|
||||
)
|
||||
",
|
||||
self.project_id as ProjectId,
|
||||
project_id as ProjectId,
|
||||
self.image_url,
|
||||
self.featured,
|
||||
self.title,
|
||||
@ -143,14 +143,12 @@ impl ProjectBuilder {
|
||||
version.insert(&mut *transaction).await?;
|
||||
}
|
||||
|
||||
for mut donation in self.donation_urls {
|
||||
donation.project_id = self.project_id;
|
||||
donation.insert(&mut *transaction).await?;
|
||||
for donation in self.donation_urls {
|
||||
donation.insert(self.project_id, &mut *transaction).await?;
|
||||
}
|
||||
|
||||
for mut gallery in self.gallery_items {
|
||||
gallery.project_id = self.project_id;
|
||||
gallery.insert(&mut *transaction).await?;
|
||||
for gallery in self.gallery_items {
|
||||
gallery.insert(self.project_id, &mut *transaction).await?;
|
||||
}
|
||||
|
||||
for category in self.categories {
|
||||
@ -667,10 +665,11 @@ impl Project {
|
||||
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
||||
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.flame_anvil_project flame_anvil_project, m.flame_anvil_user flame_anvil_user, m.webhook_sent webhook_sent,
|
||||
ARRAY_AGG(DISTINCT c.category || ' |||| ' || mc.is_additional) filter (where c.category is not null) categories,
|
||||
ARRAY_AGG(DISTINCT v.id || ' |||| ' || v.date_published) filter (where v.id is not null) versions,
|
||||
ARRAY_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ')) filter (where mg.image_url is not null) gallery,
|
||||
ARRAY_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url) filter (where md.joining_platform_id is not null) donations
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,
|
||||
JSONB_AGG(DISTINCT TO_JSONB(mg)) filter (where mg.image_url is not null) gallery,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations
|
||||
FROM mods m
|
||||
INNER JOIN project_types pt ON pt.id = m.project_type
|
||||
INNER JOIN side_types cs ON m.client_side = cs.id
|
||||
@ -691,23 +690,6 @@ impl Project {
|
||||
.await?;
|
||||
|
||||
if let Some(m) = result {
|
||||
let categories_raw = m.categories.unwrap_or_default();
|
||||
|
||||
let mut categories = Vec::new();
|
||||
let mut additional_categories = Vec::new();
|
||||
|
||||
for category in categories_raw {
|
||||
let category: Vec<&str> = category.split(" |||| ").collect();
|
||||
|
||||
if category.len() >= 2 {
|
||||
if category[1].parse::<bool>().ok().unwrap_or_default() {
|
||||
additional_categories.push(category[0].to_string());
|
||||
} else {
|
||||
categories.push(category[0].to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(QueryProject {
|
||||
inner: Project {
|
||||
id: ProjectId(m.id),
|
||||
@ -743,86 +725,38 @@ impl Project {
|
||||
webhook_sent: m.webhook_sent,
|
||||
},
|
||||
project_type: m.project_type_name,
|
||||
categories,
|
||||
additional_categories,
|
||||
categories: m.categories.unwrap_or_default(),
|
||||
additional_categories: m
|
||||
.additional_categories
|
||||
.unwrap_or_default(),
|
||||
versions: {
|
||||
let versions = m.versions.unwrap_or_default();
|
||||
|
||||
let mut v = versions
|
||||
.into_iter()
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> =
|
||||
x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((
|
||||
VersionId(
|
||||
version[0].parse().unwrap_or_default(),
|
||||
),
|
||||
convert_postgres_date(version[1])
|
||||
.timestamp(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
#[derive(Deserialize)]
|
||||
struct Version {
|
||||
pub id: VersionId,
|
||||
pub date_published: DateTime<Utc>,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(VersionId, i64)>>();
|
||||
|
||||
v.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
let mut versions: Vec<Version> =
|
||||
serde_json::from_value(m.versions.unwrap_or_default())
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
v.into_iter().map(|x| x.0).collect()
|
||||
versions.sort_by(|a, b| {
|
||||
a.date_published.cmp(&b.date_published)
|
||||
});
|
||||
|
||||
versions.into_iter().map(|x| x.id).collect()
|
||||
},
|
||||
donation_urls: m
|
||||
.donations
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|d| {
|
||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||
|
||||
if strings.len() >= 3 {
|
||||
Some(DonationUrl {
|
||||
project_id: id,
|
||||
platform_id: DonationPlatformId(
|
||||
strings[0].parse().unwrap_or(0),
|
||||
),
|
||||
platform_short: strings[1].to_string(),
|
||||
platform_name: strings[2].to_string(),
|
||||
url: strings[3].to_string(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
gallery_items: m
|
||||
.gallery
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|d| {
|
||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||
|
||||
if strings.len() >= 5 {
|
||||
Some(GalleryItem {
|
||||
project_id: id,
|
||||
image_url: strings[0].to_string(),
|
||||
featured: strings[1].parse().unwrap_or(false),
|
||||
title: if strings[3] == " " {
|
||||
None
|
||||
} else {
|
||||
Some(strings[3].to_string())
|
||||
},
|
||||
description: if strings[4] == " " {
|
||||
None
|
||||
} else {
|
||||
Some(strings[4].to_string())
|
||||
},
|
||||
created: convert_postgres_date(strings[2]),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
gallery_items: serde_json::from_value(
|
||||
m.gallery.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
donation_urls: serde_json::from_value(
|
||||
m.donations.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
client_side: crate::models::projects::SideType::from_str(
|
||||
&m.client_side_type,
|
||||
),
|
||||
@ -854,10 +788,11 @@ impl Project {
|
||||
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
||||
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.flame_anvil_project flame_anvil_project, m.flame_anvil_user flame_anvil_user, m.webhook_sent,
|
||||
ARRAY_AGG(DISTINCT c.category || ' |||| ' || mc.is_additional) filter (where c.category is not null) categories,
|
||||
ARRAY_AGG(DISTINCT v.id || ' |||| ' || v.date_published) filter (where v.id is not null) versions,
|
||||
ARRAY_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ')) filter (where mg.image_url is not null) gallery,
|
||||
ARRAY_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url) filter (where md.joining_platform_id is not null) donations
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,
|
||||
JSONB_AGG(DISTINCT TO_JSONB(mg)) filter (where mg.image_url is not null) gallery,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations
|
||||
FROM mods m
|
||||
INNER JOIN project_types pt ON pt.id = m.project_type
|
||||
INNER JOIN side_types cs ON m.client_side = cs.id
|
||||
@ -879,24 +814,6 @@ impl Project {
|
||||
Ok(e.right().map(|m| {
|
||||
let id = m.id;
|
||||
|
||||
let categories_raw = m.categories.unwrap_or_default();
|
||||
|
||||
let mut categories = Vec::new();
|
||||
let mut additional_categories = Vec::new();
|
||||
|
||||
for category in categories_raw {
|
||||
let category: Vec<&str> =
|
||||
category.split(" |||| ").collect();
|
||||
|
||||
if category.len() >= 2 {
|
||||
if category[1].parse::<bool>().ok().unwrap_or_default() {
|
||||
additional_categories.push(category[0].to_string());
|
||||
} else {
|
||||
categories.push(category[0].to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
QueryProject {
|
||||
inner: Project {
|
||||
id: ProjectId(id),
|
||||
@ -934,74 +851,31 @@ impl Project {
|
||||
webhook_sent: m.webhook_sent,
|
||||
},
|
||||
project_type: m.project_type_name,
|
||||
categories,
|
||||
additional_categories,
|
||||
categories: m.categories.unwrap_or_default(),
|
||||
additional_categories: m.additional_categories.unwrap_or_default(),
|
||||
versions: {
|
||||
let versions = m.versions.unwrap_or_default();
|
||||
|
||||
let mut v = versions
|
||||
.into_iter()
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> =
|
||||
x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((
|
||||
VersionId(version[0].parse().unwrap_or_default()),
|
||||
convert_postgres_date(version[1])
|
||||
.timestamp(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
#[derive(Deserialize)]
|
||||
struct Version {
|
||||
pub id: VersionId,
|
||||
pub date_published: DateTime<Utc>,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(VersionId, i64)>>();
|
||||
|
||||
v.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
let mut versions: Vec<Version> = serde_json::from_value(
|
||||
m.versions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
v.into_iter().map(|x| x.0).collect()
|
||||
versions.sort_by(|a, b| a.date_published.cmp(&b.date_published));
|
||||
|
||||
versions.into_iter().map(|x| x.id).collect()
|
||||
},
|
||||
gallery_items: m
|
||||
.gallery
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|d| {
|
||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||
|
||||
if strings.len() >= 5 {
|
||||
Some(GalleryItem {
|
||||
project_id: ProjectId(id),
|
||||
image_url: strings[0].to_string(),
|
||||
featured: strings[1].parse().unwrap_or(false),
|
||||
title: if strings[3] == " " { None } else { Some(strings[3].to_string()) },
|
||||
description: if strings[4] == " " { None } else { Some(strings[4].to_string()) },
|
||||
created: convert_postgres_date(strings[2])
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
donation_urls: m
|
||||
.donations
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|d| {
|
||||
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||
|
||||
if strings.len() >= 3 {
|
||||
Some(DonationUrl {
|
||||
project_id: ProjectId(id),
|
||||
platform_id: DonationPlatformId(strings[0].parse().unwrap_or(0)),
|
||||
platform_short: strings[1].to_string(),
|
||||
platform_name: strings[2].to_string(),
|
||||
url: strings[3].to_string(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
gallery_items: serde_json::from_value(
|
||||
m.gallery.unwrap_or_default(),
|
||||
).ok().unwrap_or_default(),
|
||||
donation_urls: serde_json::from_value(
|
||||
m.donations.unwrap_or_default(),
|
||||
).ok().unwrap_or_default(),
|
||||
client_side: crate::models::projects::SideType::from_str(&m.client_side_type),
|
||||
server_side: crate::models::projects::SideType::from_str(&m.server_side_type),
|
||||
}}))
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
use super::ids::*;
|
||||
use super::DatabaseError;
|
||||
use crate::database::models::convert_postgres_date;
|
||||
use crate::models::projects::VersionStatus;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
|
||||
@ -601,10 +601,11 @@ impl Version {
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
||||
ARRAY_AGG(DISTINCT gv.version || ' |||| ' || gv.created) filter (where gv.version is not null) game_versions, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename) filter (where f.id is not null) files,
|
||||
ARRAY_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id) filter (where h.hash is not null) hashes,
|
||||
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' ')) filter (where d.dependency_type is not null) dependencies
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
JSONB_AGG(DISTINCT TO_JSONB(f)) filter (where f.id is not null) files,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
|
||||
FROM versions v
|
||||
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||
@ -641,59 +642,57 @@ impl Version {
|
||||
.map(|x| VersionStatus::from_str(&x)),
|
||||
},
|
||||
files: {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v
|
||||
.hashes
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: Vec<u8>,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut files: Vec<QueryFile> = v
|
||||
.files
|
||||
.unwrap_or_default()
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
}
|
||||
|
||||
let hashes: Vec<Hash> =
|
||||
serde_json::from_value(v.hashes.unwrap_or_default())
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let files: Vec<File> =
|
||||
serde_json::from_value(v.files.unwrap_or_default())
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut files = files
|
||||
.into_iter()
|
||||
.flat_map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id =
|
||||
FileId(file[0].parse().unwrap_or(0));
|
||||
.map(|x| {
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
if hash.file_id == x.id {
|
||||
file_hashes.insert(
|
||||
hash.1.clone(),
|
||||
hash.2.clone(),
|
||||
hash.algorithm.clone(),
|
||||
hash.hash.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
QueryFile {
|
||||
id: x.id,
|
||||
url: x.url,
|
||||
filename: x.filename,
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
primary: x.primary,
|
||||
size: x.size,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
files.sort_by(|a, b| {
|
||||
if a.primary {
|
||||
Ordering::Less
|
||||
@ -703,69 +702,33 @@ impl Version {
|
||||
a.filename.cmp(&b.filename)
|
||||
}
|
||||
});
|
||||
|
||||
files
|
||||
},
|
||||
game_versions: {
|
||||
let game_versions = v.game_versions.unwrap_or_default();
|
||||
|
||||
let mut gv = game_versions
|
||||
.into_iter()
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> =
|
||||
x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((
|
||||
version[0].to_string(),
|
||||
convert_postgres_date(version[1])
|
||||
.timestamp(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
#[derive(Deserialize)]
|
||||
struct GameVersion {
|
||||
pub version: String,
|
||||
pub created: DateTime<Utc>,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(String, i64)>>();
|
||||
|
||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
let mut game_versions: Vec<GameVersion> =
|
||||
serde_json::from_value(
|
||||
v.game_versions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
gv.into_iter().map(|x| x.0).collect()
|
||||
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
|
||||
|
||||
game_versions.into_iter().map(|x| x.version).collect()
|
||||
},
|
||||
loaders: v.loaders.unwrap_or_default(),
|
||||
dependencies: v
|
||||
.dependencies
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|f| {
|
||||
let dependency: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if dependency.len() >= 4 {
|
||||
Some(QueryDependency {
|
||||
project_id: match dependency[1] {
|
||||
"0" => None,
|
||||
_ => match dependency[1].parse() {
|
||||
Ok(x) => Some(ProjectId(x)),
|
||||
Err(_) => None,
|
||||
},
|
||||
},
|
||||
version_id: match dependency[0] {
|
||||
"0" => None,
|
||||
_ => match dependency[0].parse() {
|
||||
Ok(x) => Some(VersionId(x)),
|
||||
Err(_) => None,
|
||||
},
|
||||
},
|
||||
file_name: if dependency[3] == " " {
|
||||
None
|
||||
} else {
|
||||
Some(dependency[3].to_string())
|
||||
},
|
||||
dependency_type: dependency[2].to_string(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
dependencies: serde_json::from_value(
|
||||
v.dependencies.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
@ -788,10 +751,11 @@ impl Version {
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
||||
ARRAY_AGG(DISTINCT gv.version || ' |||| ' || gv.created) filter (where gv.version is not null) game_versions, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename) filter (where f.id is not null) files,
|
||||
ARRAY_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id) filter (where h.hash is not null) hashes,
|
||||
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' ')) filter (where d.dependency_type is not null) dependencies
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
JSONB_AGG(DISTINCT TO_JSONB(f)) filter (where f.id is not null) files,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
|
||||
FROM versions v
|
||||
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||
@ -827,50 +791,56 @@ impl Version {
|
||||
.map(|x| VersionStatus::from_str(&x)),
|
||||
},
|
||||
files: {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: Vec<u8>,
|
||||
}
|
||||
}).collect();
|
||||
|
||||
let mut files: Vec<QueryFile> = v.files.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
}
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||
let hashes: Vec<Hash> = serde_json::from_value(
|
||||
v.hashes.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let files: Vec<File> = serde_json::from_value(
|
||||
v.files.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut files = files.into_iter().map(|x| {
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
||||
if hash.file_id == x.id {
|
||||
file_hashes.insert(
|
||||
hash.algorithm.clone(),
|
||||
hash.hash.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
QueryFile {
|
||||
id: x.id,
|
||||
url: x.url,
|
||||
filename: x.filename,
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
primary: x.primary,
|
||||
size: x.size,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
files.sort_by(|a, b| {
|
||||
if a.primary {
|
||||
Ordering::Less
|
||||
@ -880,68 +850,32 @@ impl Version {
|
||||
a.filename.cmp(&b.filename)
|
||||
}
|
||||
});
|
||||
|
||||
files
|
||||
},
|
||||
game_versions: {
|
||||
let game_versions = v
|
||||
.game_versions
|
||||
#[derive(Deserialize)]
|
||||
struct GameVersion {
|
||||
pub version: String,
|
||||
pub created: DateTime<Utc>,
|
||||
}
|
||||
|
||||
let mut game_versions: Vec<GameVersion> = serde_json::from_value(
|
||||
v.game_versions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut gv = game_versions
|
||||
.into_iter()
|
||||
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
|
||||
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((version[0].to_string(), convert_postgres_date(version[1]).timestamp()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(String, i64)>>();
|
||||
|
||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
|
||||
gv.into_iter()
|
||||
.map(|x| x.0)
|
||||
.collect()
|
||||
game_versions.into_iter().map(|x| x.version).collect()
|
||||
},
|
||||
loaders: v.loaders.unwrap_or_default(),
|
||||
dependencies: v.dependencies
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
|
||||
.flat_map(|f| {
|
||||
let dependency: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if dependency.len() >= 4 {
|
||||
Some(QueryDependency {
|
||||
project_id: match dependency[1] {
|
||||
"0" => None,
|
||||
_ => match dependency[1].parse() {
|
||||
Ok(x) => Some(ProjectId(x)),
|
||||
Err(_) => None,
|
||||
},
|
||||
},
|
||||
version_id: match dependency[0] {
|
||||
"0" => None,
|
||||
_ => match dependency[0].parse() {
|
||||
Ok(x) => Some(VersionId(x)),
|
||||
Err(_) => None,
|
||||
},
|
||||
},
|
||||
file_name: if dependency[3] == " " {
|
||||
None
|
||||
} else {
|
||||
Some(dependency[3].to_string())
|
||||
},
|
||||
dependency_type: dependency[2].to_string(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).collect(),
|
||||
dependencies: serde_json::from_value(
|
||||
v.dependencies.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
))
|
||||
})
|
||||
@ -960,7 +894,7 @@ pub struct QueryVersion {
|
||||
pub dependencies: Vec<QueryDependency>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Deserialize)]
|
||||
pub struct QueryDependency {
|
||||
pub project_id: Option<ProjectId>,
|
||||
pub version_id: Option<VersionId>,
|
||||
|
||||
@ -277,7 +277,7 @@ pub async fn project_create(
|
||||
&***file_host,
|
||||
&flame_anvil_queue,
|
||||
&mut uploaded_files,
|
||||
&*client,
|
||||
&client,
|
||||
)
|
||||
.await;
|
||||
|
||||
@ -723,7 +723,6 @@ pub async fn project_create_inner(
|
||||
})?;
|
||||
|
||||
donation_urls.push(models::project_item::DonationUrl {
|
||||
project_id: project_id.into(),
|
||||
platform_id,
|
||||
platform_short: "".to_string(),
|
||||
platform_name: "".to_string(),
|
||||
@ -759,7 +758,6 @@ pub async fn project_create_inner(
|
||||
gallery_items: gallery_urls
|
||||
.iter()
|
||||
.map(|x| models::project_item::GalleryItem {
|
||||
project_id: project_id.into(),
|
||||
image_url: x.url.clone(),
|
||||
featured: x.featured,
|
||||
title: x.title.clone(),
|
||||
|
||||
@ -500,7 +500,7 @@ pub async fn project_edit(
|
||||
{
|
||||
crate::util::webhook::send_discord_webhook(
|
||||
project_item.inner.id.into(),
|
||||
&*pool,
|
||||
&pool,
|
||||
webhook_url,
|
||||
)
|
||||
.await
|
||||
@ -529,7 +529,7 @@ pub async fn project_edit(
|
||||
{
|
||||
crate::util::webhook::send_discord_webhook(
|
||||
project_item.inner.id.into(),
|
||||
&*pool,
|
||||
&pool,
|
||||
webhook_url,
|
||||
)
|
||||
.await
|
||||
@ -1483,14 +1483,13 @@ pub async fn add_gallery_item(
|
||||
}
|
||||
|
||||
database::models::project_item::GalleryItem {
|
||||
project_id: project_item.id,
|
||||
image_url: format!("{}/{}", cdn_url, url),
|
||||
featured: item.featured,
|
||||
title: item.title,
|
||||
description: item.description,
|
||||
created: Utc::now(),
|
||||
}
|
||||
.insert(&mut transaction)
|
||||
.insert(project_item.id, &mut transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
@ -133,7 +133,7 @@ pub async fn mod_create(
|
||||
&***file_host,
|
||||
&flame_anvil_queue,
|
||||
&mut uploaded_files,
|
||||
&*client,
|
||||
&client,
|
||||
)
|
||||
.await;
|
||||
|
||||
|
||||
@ -18,10 +18,12 @@ pub async fn index_local(
|
||||
m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,
|
||||
m.team_id team_id, m.license license, m.slug slug, m.status status_name,
|
||||
cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, u.username username,
|
||||
ARRAY_AGG(DISTINCT c.category || ' |||| ' || mc.is_additional) filter (where c.category is not null) categories,
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||
ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null) versions,
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null) gallery
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery
|
||||
FROM mods m
|
||||
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
|
||||
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
||||
@ -46,27 +48,13 @@ pub async fn index_local(
|
||||
.fetch_many(&pool)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|m| {
|
||||
let categories_raw = m.categories.unwrap_or_default();
|
||||
|
||||
let mut additional_categories = Vec::new();
|
||||
let mut categories = Vec::new();
|
||||
|
||||
for category in categories_raw {
|
||||
let category: Vec<&str> = category.split(" |||| ").collect();
|
||||
|
||||
if category.len() >= 2 {
|
||||
if category[1].parse::<bool>().ok().unwrap_or_default() {
|
||||
additional_categories.push(category[0].to_string());
|
||||
} else {
|
||||
categories.push(category[0].to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut additional_categories = m.additional_categories.unwrap_or_default();
|
||||
let mut categories = m.categories.unwrap_or_default();
|
||||
|
||||
categories.append(&mut m.loaders.unwrap_or_default());
|
||||
|
||||
let display_categories = categories.clone();
|
||||
categories.append(&mut additional_categories.clone());
|
||||
categories.append(&mut additional_categories);
|
||||
|
||||
let versions = m.versions.unwrap_or_default();
|
||||
|
||||
@ -82,8 +70,11 @@ pub async fn index_local(
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let mut gallery = m.featured_gallery.unwrap_or_default();
|
||||
gallery.append(&mut m.gallery.unwrap_or_default());
|
||||
|
||||
UploadSearchProject {
|
||||
project_id: format!("{}", project_id),
|
||||
project_id: project_id.to_string(),
|
||||
title: m.title,
|
||||
description: m.description,
|
||||
categories,
|
||||
@ -102,7 +93,7 @@ pub async fn index_local(
|
||||
server_side: m.server_side_type,
|
||||
slug: m.slug,
|
||||
project_type: m.project_type_name,
|
||||
gallery: m.gallery.unwrap_or_default(),
|
||||
gallery,
|
||||
display_categories,
|
||||
open_source,
|
||||
}
|
||||
|
||||
@ -72,7 +72,8 @@ pub async fn send_discord_webhook(
|
||||
ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,
|
||||
JSONB_AGG(DISTINCT TO_JSONB(gv)) filter (where gv.version is not null) versions,
|
||||
JSONB_AGG(DISTINCT TO_JSONB(agv)) filter (where gv.version is not null) all_game_versions,
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null) gallery
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery
|
||||
FROM mods m
|
||||
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE
|
||||
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
||||
@ -95,7 +96,7 @@ pub async fn send_discord_webhook(
|
||||
&*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>(),
|
||||
crate::models::teams::OWNER_ROLE,
|
||||
)
|
||||
.fetch_optional(&*pool)
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
if let Some(project) = row {
|
||||
@ -106,19 +107,13 @@ pub async fn send_discord_webhook(
|
||||
|
||||
let versions: Vec<GameVersion> =
|
||||
serde_json::from_value(project.versions.unwrap_or_default())
|
||||
.map_err(|err| {
|
||||
ApiError::DiscordError(
|
||||
"Error while sending projects webhook".to_string(),
|
||||
)
|
||||
})?;
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
let all_game_versions: Vec<GameVersion> = serde_json::from_value(
|
||||
project.all_game_versions.unwrap_or_default(),
|
||||
)
|
||||
.map_err(|err| {
|
||||
ApiError::DiscordError(
|
||||
"Error while sending projects webhook".to_string(),
|
||||
)
|
||||
})?;
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
if !categories.is_empty() {
|
||||
fields.push(DiscordEmbedField {
|
||||
@ -170,7 +165,7 @@ pub async fn send_discord_webhook(
|
||||
}
|
||||
|
||||
if !versions.is_empty() {
|
||||
let mut formatted_game_versions: String =
|
||||
let formatted_game_versions: String =
|
||||
get_gv_range(versions, all_game_versions);
|
||||
|
||||
fields.push(DiscordEmbedField {
|
||||
@ -204,11 +199,14 @@ pub async fn send_discord_webhook(
|
||||
thumbnail: DiscordEmbedThumbnail {
|
||||
url: project.icon_url,
|
||||
},
|
||||
image: project.gallery.unwrap_or_default().first().map(|x| {
|
||||
DiscordEmbedImage {
|
||||
url: Some(x.to_string()),
|
||||
image: if let Some(first) =
|
||||
project.featured_gallery.unwrap_or_default().first()
|
||||
{
|
||||
Some(first.clone())
|
||||
} else {
|
||||
project.gallery.unwrap_or_default().first().cloned()
|
||||
}
|
||||
}),
|
||||
.map(|x| DiscordEmbedImage { url: Some(x) }),
|
||||
footer: Some(DiscordEmbedFooter {
|
||||
text: "Modrinth".to_string(),
|
||||
icon_url: Some(
|
||||
@ -231,7 +229,7 @@ pub async fn send_discord_webhook(
|
||||
})
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
.map_err(|_| {
|
||||
ApiError::DiscordError(
|
||||
"Error while sending projects webhook".to_string(),
|
||||
)
|
||||
@ -261,8 +259,8 @@ fn get_gv_range(
|
||||
|
||||
const MAX_VALUE: usize = 1000000;
|
||||
|
||||
for i in 0..game_versions.len() {
|
||||
let current_version = &*game_versions[i].version;
|
||||
for (i, current_version) in game_versions.iter().enumerate() {
|
||||
let current_version = ¤t_version.version;
|
||||
|
||||
let index = all_game_versions
|
||||
.iter()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user