Switch to alternate query strategy which simplifies code (#244)

This commit is contained in:
Geometrically 2021-09-01 06:04:38 -07:00 committed by GitHub
parent efb82847cb
commit c87e72e08e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 1029 additions and 1564 deletions

File diff suppressed because it is too large Load Diff

View File

@ -153,7 +153,7 @@ pub struct ReportId(pub i64);
#[sqlx(transparent)]
pub struct ReportTypeId(pub i32);
#[derive(Copy, Clone, Debug, Type)]
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq)]
#[sqlx(transparent)]
pub struct FileId(pub i64);

View File

@ -118,39 +118,32 @@ impl Notification {
executor: E,
) -> Result<Option<Self>, sqlx::error::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let result = sqlx::query!(
"
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
ARRAY_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method) actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.id = $1
GROUP BY n.id, n.user_id;
",
id as NotificationId,
)
.fetch_optional(executor)
.await?;
if let Some(row) = result {
let mut actions: Vec<NotificationAction> = Vec::new();
row.actions.unwrap_or_default().iter().for_each(|x| {
let action: Vec<&str> = x.split(", ").collect();
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(action[0].parse().unwrap_or(0)),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
let (notifications, actions) = futures::join!(
sqlx::query!(
"
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
ARRAY_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method) actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.id = $1
GROUP BY n.id, n.user_id;
",
id as NotificationId,
)
.fetch_optional(executor),
sqlx::query!(
"
SELECT id, title, notification_id, action_route, action_route_method
FROM notifications_actions
WHERE notification_id = $1
",
id as NotificationId,
).fetch_all(executor),
);
if let Some(row) = notifications? {
Ok(Some(Notification {
id,
user_id: UserId(row.user_id),
@ -160,7 +153,16 @@ impl Notification {
link: row.link,
read: row.read,
created: row.created,
actions,
actions: actions?
.into_iter()
.map(|x| NotificationAction {
id: NotificationActionId(x.id),
notification_id: NotificationId(x.notification_id),
title: x.title,
action_route_method: x.action_route_method,
action_route: x.action_route,
})
.collect(),
}))
} else {
Ok(None)
@ -174,56 +176,9 @@ impl Notification {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::stream::TryStreamExt;
let notification_ids_parsed: Vec<i64> = notification_ids.into_iter().map(|x| x.0).collect();
sqlx::query!(
"
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
ARRAY_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method) actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.id = ANY($1)
GROUP BY n.id, n.user_id
ORDER BY n.created DESC;
",
&notification_ids_parsed
)
.fetch_many(exec)
.try_filter_map(|e| async {
Ok(e.right().map(|row| {
let id = NotificationId(row.id);
let mut actions: Vec<NotificationAction> = Vec::new();
row.actions.unwrap_or_default().iter().for_each(|x| {
let action: Vec<&str> = x.split(", ").collect();
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(action[0].parse().unwrap_or(0)),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
Notification {
id,
user_id: UserId(row.user_id),
notification_type: row.notification_type,
title: row.title,
text: row.text,
link: row.link,
read: row.read,
created: row.created,
actions,
}
}))
})
.try_collect::<Vec<Notification>>()
.await
futures::future::try_join_all(notification_ids.into_iter().map(|id| Self::get(id, exec)))
.await
.map(|x| x.into_iter().flatten().collect())
}
pub async fn get_many_user<'a, E>(
@ -233,54 +188,21 @@ impl Notification {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::stream::TryStreamExt;
sqlx::query!(
let notification_ids = sqlx::query!(
"
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
ARRAY_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method) actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.user_id = $1
GROUP BY n.id, n.user_id;
SELECT id
FROM notifications
WHERE user_id = $1
",
user_id as UserId
)
.fetch_many(exec)
.try_filter_map(|e| async {
Ok(e.right().map(|row| {
let id = NotificationId(row.id);
let mut actions: Vec<NotificationAction> = Vec::new();
.fetch_all(exec)
.await?
.into_iter()
.map(|x| NotificationId(x.id))
.collect();
row.actions.unwrap_or_default().iter().for_each(|x| {
let action: Vec<&str> = x.split(", ").collect();
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(action[0].parse().unwrap_or(0)),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
Notification {
id,
user_id: UserId(row.user_id),
notification_type: row.notification_type,
title: row.title,
text: row.text,
link: row.link,
read: row.read,
created: row.created,
actions,
}
}))
})
.try_collect::<Vec<Notification>>()
.await
Self::get_many(notification_ids, exec).await
}
pub async fn remove(

View File

@ -589,43 +589,69 @@ impl Project {
executor: E,
) -> Result<Option<QueryProject>, sqlx::error::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let result = sqlx::query!(
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
m.updated updated, m.status status,
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
ARRAY_AGG(DISTINCT c.category) categories, ARRAY_AGG(DISTINCT v.id::text) versions,
ARRAY_AGG(DISTINCT mg.image_url || ', ' || mg.featured || ', ' || COALESCE(mg.title, '') || ', ' || COALESCE(mg.description, '') || ', ' || mg.created) gallery,
ARRAY_AGG(DISTINCT md.joining_platform_id || ', ' || md.url || ', ' || dp.short || ', ' || dp.name) donations
FROM mods m
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN statuses s ON s.id = m.status
INNER JOIN side_types cs ON m.client_side = cs.id
INNER JOIN side_types ss ON m.server_side = ss.id
INNER JOIN licenses l ON m.license = l.id
LEFT JOIN mods_donations md ON md.joining_mod_id = m.id
LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
LEFT JOIN categories c ON mc.joining_category_id = c.id
LEFT JOIN versions v ON v.mod_id = m.id
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
WHERE m.id = $1
GROUP BY pt.id, s.id, cs.id, ss.id, l.id, m.id;
",
id as ProjectId,
)
.fetch_optional(executor)
.await?;
let (project, versions, categories, gallery, donations) = futures::join!(
sqlx::query!(
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
m.updated updated, m.status status,
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name
FROM mods m
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN statuses s ON s.id = m.status
INNER JOIN side_types cs ON m.client_side = cs.id
INNER JOIN side_types ss ON m.server_side = ss.id
INNER JOIN licenses l ON m.license = l.id
WHERE m.id = $1
",
id as ProjectId,
).fetch_optional(executor),
sqlx::query!(
"
SELECT id
FROM versions
WHERE mod_id = $1
",
id as ProjectId,
).fetch_all(executor),
sqlx::query!(
"
SELECT c.category category
FROM mods_categories mc
INNER JOIN categories c ON mc.joining_category_id = c.id
WHERE mc.joining_mod_id = $1
",
id as ProjectId,
).fetch_all(executor),
sqlx::query!(
"
SELECT image_url, featured, title, description, created
FROM mods_gallery
WHERE mod_id = $1
",
id as ProjectId,
).fetch_all(executor),
sqlx::query!(
"
SELECT md.url url, dp.id platform_id, dp.name dp_name, dp.short short
FROM mods_donations md
INNER JOIN donation_platforms dp ON md.joining_platform_id = dp.id
WHERE md.joining_mod_id = $1
",
id as ProjectId,
).fetch_all(executor)
);
if let Some(m) = project? {
let project_id = ProjectId(m.id);
if let Some(m) = result {
Ok(Some(QueryProject {
inner: Project {
id: ProjectId(m.id),
id: project_id,
project_type: ProjectTypeId(m.project_type),
team_id: TeamId(m.team_id),
title: m.title.clone(),
@ -651,70 +677,28 @@ impl Project {
moderation_message_body: m.moderation_message_body,
},
project_type: m.project_type_name,
categories: m
.categories
.map(|x| x.iter().map(|x| x.to_string()).collect())
.unwrap_or_default(),
versions: m
.versions
.map(|x| {
x.iter()
.map(|x| VersionId(x.parse().unwrap_or_default()))
.collect()
categories: categories?.into_iter().map(|x| x.category).collect(),
versions: versions?.into_iter().map(|x| VersionId(x.id)).collect(),
donation_urls: donations?
.into_iter()
.map(|x| DonationUrl {
project_id,
platform_id: DonationPlatformId(x.platform_id),
platform_short: x.short,
platform_name: x.dp_name,
url: x.url,
})
.unwrap_or_default(),
donation_urls: m
.donations
.unwrap_or_default()
.iter()
.map(|d| {
let strings: Vec<&str> = d.split(", ").collect();
if strings.len() >= 3 {
Some(DonationUrl {
project_id: id,
platform_id: DonationPlatformId(strings[0].parse().unwrap_or(0)),
platform_short: strings[2].to_string(),
platform_name: strings[3].to_string(),
url: strings[1].to_string(),
})
} else {
None
}
})
.flatten()
.collect(),
gallery_items: m
.gallery
.unwrap_or_default()
.iter()
.map(|d| {
let strings: Vec<&str> = d.split(", ").collect();
if strings.len() >= 5 {
Some(GalleryItem {
project_id: id,
image_url: strings[0].to_string(),
featured: strings[1].parse().unwrap_or(false),
title: if strings[2] == "" {
None
} else {
Some(strings[2].to_string())
},
description: if strings[3] == "" {
None
} else {
Some(strings[3].to_string())
},
created: chrono::DateTime::parse_from_rfc3339(strings[4])
.map(|x| x.with_timezone(&chrono::Utc))
.unwrap_or_else(|_| chrono::Utc::now()),
})
} else {
None
}
gallery_items: gallery?
.into_iter()
.map(|x| GalleryItem {
project_id,
image_url: x.image_url,
featured: x.featured.unwrap_or(false),
title: x.title,
description: x.description,
created: x.created,
})
.flatten()
.collect(),
status: crate::models::projects::ProjectStatus::from_str(&m.status_name),
license_id: m.short,
@ -734,123 +718,9 @@ impl Project {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::TryStreamExt;
let project_ids_parsed: Vec<i64> = project_ids.into_iter().map(|x| x.0).collect();
sqlx::query!(
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
m.updated updated, m.status status,
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
ARRAY_AGG(DISTINCT c.category) categories, ARRAY_AGG(DISTINCT v.id::text) versions,
ARRAY_AGG(DISTINCT mg.image_url || ', ' || mg.featured || ', ' || COALESCE(mg.title, '') || ', ' || COALESCE(mg.description, '') || ', ' || mg.created) gallery,
ARRAY_AGG(DISTINCT md.joining_platform_id || ', ' || md.url || ', ' || dp.short || ', ' || dp.name) donations
FROM mods m
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN statuses s ON s.id = m.status
INNER JOIN side_types cs ON m.client_side = cs.id
INNER JOIN side_types ss ON m.server_side = ss.id
INNER JOIN licenses l ON m.license = l.id
LEFT JOIN mods_donations md ON md.joining_mod_id = m.id
LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
LEFT JOIN categories c ON mc.joining_category_id = c.id
LEFT JOIN versions v ON v.mod_id = m.id
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
WHERE m.id = ANY($1)
GROUP BY pt.id, s.id, cs.id, ss.id, l.id, m.id;
",
&project_ids_parsed
)
.fetch_many(exec)
.try_filter_map(|e| async {
Ok(e.right().map(|m| {
let id = m.id;
QueryProject {
inner: Project {
id: ProjectId(id),
project_type: ProjectTypeId(m.project_type),
team_id: TeamId(m.team_id),
title: m.title.clone(),
description: m.description.clone(),
downloads: m.downloads,
body_url: m.body_url.clone(),
icon_url: m.icon_url.clone(),
published: m.published,
updated: m.updated,
issues_url: m.issues_url.clone(),
source_url: m.source_url.clone(),
wiki_url: m.wiki_url.clone(),
license_url: m.license_url.clone(),
discord_url: m.discord_url.clone(),
client_side: SideTypeId(m.client_side),
status: StatusId(m.status),
server_side: SideTypeId(m.server_side),
license: LicenseId(m.license),
slug: m.slug.clone(),
body: m.body.clone(),
follows: m.follows,
moderation_message: m.moderation_message,
moderation_message_body: m.moderation_message_body,
},
project_type: m.project_type_name,
categories: m.categories.map(|x| x.iter().map(|x| x.to_string()).collect()).unwrap_or_default(),
versions: m.versions.map(|x| x.iter().map(|x| VersionId(x.parse().unwrap_or_default())).collect()).unwrap_or_default(),
gallery_items: m
.gallery
.unwrap_or_default()
.iter()
.map(|d| {
let strings: Vec<&str> = d.split(", ").collect();
if strings.len() >= 5 {
Some(GalleryItem {
project_id: ProjectId(id),
image_url: strings[0].to_string(),
featured: strings[1].parse().unwrap_or(false),
title: if strings[2] == " " { None } else { Some(strings[2].to_string()) },
description: if strings[3] == " " { None } else { Some(strings[3].to_string()) },
created: chrono::DateTime::parse_from_rfc3339(strings[4]).map(|x| x.with_timezone(&chrono::Utc)).unwrap_or_else(|_| chrono::Utc::now())
})
} else {
None
}
})
.flatten()
.collect(),
donation_urls: m
.donations
.unwrap_or_default()
.iter()
.map(|d| {
let strings: Vec<&str> = d.split(", ").collect();
if strings.len() >= 3 {
Some(DonationUrl {
project_id: ProjectId(id),
platform_id: DonationPlatformId(strings[0].parse().unwrap_or(0)),
platform_short: strings[2].to_string(),
platform_name: strings[3].to_string(),
url: strings[1].to_string(),
})
} else {
None
}
})
.flatten()
.collect(),
status: crate::models::projects::ProjectStatus::from_str(&m.status_name),
license_id: m.short,
license_name: m.license_name,
client_side: crate::models::projects::SideType::from_str(&m.client_side_type),
server_side: crate::models::projects::SideType::from_str(&m.server_side_type),
}}))
})
.try_collect::<Vec<QueryProject>>()
futures::future::try_join_all(project_ids.into_iter().map(|id| Self::get_full(id, exec)))
.await
.map(|x| x.into_iter().flatten().collect())
}
}
#[derive(Clone, Debug)]

View File

@ -596,53 +596,78 @@ impl Version {
executor: E,
) -> Result<Option<QueryVersion>, sqlx::error::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let result = sqlx::query!(
"
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
v.version_type version_type, v.featured featured,
ARRAY_AGG(DISTINCT gv.version) game_versions, ARRAY_AGG(DISTINCT l.loader) loaders,
ARRAY_AGG(DISTINCT f.id || ', ' || f.filename || ', ' || f.is_primary || ', ' || f.url) files,
ARRAY_AGG(DISTINCT h.algorithm || ', ' || encode(h.hash, 'escape') || ', ' || h.file_id) hashes,
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ', ' || COALESCE(d.mod_dependency_id, 0) || ', ' || d.dependency_type) dependencies
FROM versions v
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
LEFT OUTER JOIN files f on v.id = f.version_id
LEFT OUTER JOIN hashes h on f.id = h.file_id
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
WHERE v.id = $1
GROUP BY v.id;
",
id as VersionId,
)
.fetch_optional(executor)
.await?;
let (version, game_versions, loaders, files, hashes, dependencies) = futures::join!(
sqlx::query!(
"
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
v.version_type version_type, v.featured featured
FROM versions v
WHERE v.id = $1
GROUP BY v.id;
",
id as VersionId,
)
.fetch_optional(executor),
sqlx::query!(
"
SELECT gv.version game_version
FROM game_versions_versions gvv
INNER JOIN game_versions gv on gvv.game_version_id = gv.id
WHERE gvv.joining_version_id = $1
",
id as VersionId,
).fetch_all(executor),
sqlx::query!(
"
SELECT l.loader loader
FROM loaders_versions lv
INNER JOIN loaders l on lv.loader_id = l.id
WHERE lv.version_id = $1
",
id as VersionId,
).fetch_all(executor),
sqlx::query!(
"
SELECT id, filename, is_primary, url
FROM files
WHERE version_id = $1
",
id as VersionId,
).fetch_all(executor),
sqlx::query!(
"
SELECT h.algorithm algorithm, encode(h.hash, 'escape') hash, h.file_id file_id
FROM files f
INNER JOIN hashes h ON h.file_id = f.id
WHERE f.version_id = $1
",
id as VersionId,
).fetch_all(executor),
sqlx::query!(
"
SELECT dependency_id, mod_dependency_id, dependency_type
FROM dependencies
WHERE dependent_id = $1
",
id as VersionId,
).fetch_all(executor),
);
if let Some(v) = result {
let hashes: Vec<(FileId, String, Vec<u8>)> = v
.hashes
.unwrap_or_default()
.iter()
.map(|f| {
let hash: Vec<&str> = f.split(", ").collect();
if let Some(v) = version? {
let mut hashes_map: HashMap<FileId, HashMap<String, Vec<u8>>> = HashMap::new();
if hash.len() >= 3 {
Some((
FileId(hash[2].parse().unwrap_or(0)),
hash[0].to_string(),
hash[1].to_string().into_bytes(),
))
} else {
None
}
})
.flatten()
.collect();
for hash in hashes? {
let entry = hashes_map
.entry(FileId(hash.file_id))
.or_insert(HashMap::new());
if let Some(raw_hash) = hash.hash {
entry.insert(hash.algorithm, raw_hash.into_bytes());
}
}
Ok(Some(QueryVersion {
id: VersionId(v.id),
@ -654,79 +679,26 @@ impl Version {
changelog_url: v.changelog_url,
date_published: v.date_published,
downloads: v.downloads,
files: v
.files
.unwrap_or_default()
.iter()
.map(|f| {
let file: Vec<&str> = f.split(", ").collect();
if file.len() >= 4 {
let file_id = FileId(file[0].parse().unwrap_or(0));
let mut file_hashes = HashMap::new();
for hash in &hashes {
if (hash.0).0 == file_id.0 {
file_hashes.insert(hash.1.clone(), hash.2.clone());
}
}
Some(QueryFile {
id: file_id,
url: file[3].to_string(),
filename: file[1].to_string(),
hashes: file_hashes,
primary: file[2].parse().unwrap_or(false),
})
} else {
None
}
files: files?
.into_iter()
.map(|x| QueryFile {
id: FileId(x.id),
url: x.url,
filename: x.filename,
hashes: hashes_map.entry(FileId(x.id)).or_default().clone(),
primary: x.is_primary,
})
.flatten()
.collect(),
game_versions: v
.game_versions
.unwrap_or_default()
.iter()
.map(|x| x.to_string())
.collect(),
loaders: v
.loaders
.unwrap_or_default()
.iter()
.map(|x| x.to_string())
.collect(),
game_versions: game_versions?.into_iter().map(|x| x.game_version).collect(),
loaders: loaders?.into_iter().map(|x| x.loader).collect(),
featured: v.featured,
dependencies: v
.dependencies
.unwrap_or_default()
.iter()
.map(|f| {
let dependency: Vec<&str> = f.split(", ").collect();
if dependency.len() >= 3 {
Some(QueryDependency {
project_id: match &*dependency[1] {
"0" => None,
_ => match dependency[1].parse() {
Ok(x) => Some(ProjectId(x)),
Err(_) => None,
},
},
version_id: match &*dependency[0] {
"0" => None,
_ => match dependency[0].parse() {
Ok(x) => Some(VersionId(x)),
Err(_) => None,
},
},
dependency_type: dependency[2].to_string(),
})
} else {
None
}
dependencies: dependencies?
.into_iter()
.map(|x| QueryDependency {
project_id: x.mod_dependency_id.map(|x| ProjectId(x)),
version_id: x.dependency_id.map(|x| VersionId(x)),
dependency_type: x.dependency_type,
})
.flatten()
.collect(),
version_type: v.version_type,
}))
@ -742,120 +714,9 @@ impl Version {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::stream::TryStreamExt;
let version_ids_parsed: Vec<i64> = version_ids.into_iter().map(|x| x.0).collect();
sqlx::query!(
"
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
v.version_type version_type, v.featured featured,
ARRAY_AGG(DISTINCT gv.version) game_versions, ARRAY_AGG(DISTINCT l.loader) loaders,
ARRAY_AGG(DISTINCT f.id || ', ' || f.filename || ', ' || f.is_primary || ', ' || f.url) files,
ARRAY_AGG(DISTINCT h.algorithm || ', ' || encode(h.hash, 'escape') || ', ' || h.file_id) hashes,
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ', ' || COALESCE(d.mod_dependency_id, 0) || ', ' || d.dependency_type) dependencies
FROM versions v
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
LEFT OUTER JOIN files f on v.id = f.version_id
LEFT OUTER JOIN hashes h on f.id = h.file_id
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
WHERE v.id = ANY($1)
GROUP BY v.id
ORDER BY v.date_published ASC;
",
&version_ids_parsed
)
.fetch_many(exec)
.try_filter_map(|e| async {
Ok(e.right().map(|v| {
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().iter().map(|f| {
let hash: Vec<&str> = f.split(", ").collect();
if hash.len() >= 3 {
Some((
FileId(hash[2].parse().unwrap_or(0)),
hash[0].to_string(),
hash[1].to_string().into_bytes(),
))
} else {
None
}
}).flatten().collect();
QueryVersion {
id: VersionId(v.id),
project_id: ProjectId(v.mod_id),
author_id: UserId(v.author_id),
name: v.version_name,
version_number: v.version_number,
changelog: v.changelog,
changelog_url: v.changelog_url,
date_published: v.date_published,
downloads: v.downloads,
files: v.files.unwrap_or_default().iter().map(|f| {
let file: Vec<&str> = f.split(", ").collect();
if file.len() >= 4 {
let file_id = FileId(file[0].parse().unwrap_or(0));
let mut file_hashes = HashMap::new();
for hash in &hashes {
if (hash.0).0 == file_id.0 {
file_hashes.insert(hash.1.clone(), hash.2.clone());
}
}
Some(QueryFile {
id: file_id,
url: file[3].to_string(),
filename: file[1].to_string(),
hashes: file_hashes,
primary: file[2].parse().unwrap_or(false),
})
} else {
None
}
}).flatten().collect(),
game_versions: v.game_versions.unwrap_or_default().iter().map(|x| x.to_string()).collect(),
loaders: v.loaders.unwrap_or_default().iter().map(|x| x.to_string()).collect(),
featured: v.featured,
dependencies: v.dependencies
.unwrap_or_default()
.iter()
.map(|f| {
let dependency: Vec<&str> = f.split(", ").collect();
if dependency.len() >= 3 {
Some(QueryDependency {
project_id: match &*dependency[1] {
"0" => None,
_ => match dependency[1].parse() {
Ok(x) => Some(ProjectId(x)),
Err(_) => None,
},
},
version_id: match &*dependency[0] {
"0" => None,
_ => match dependency[0].parse() {
Ok(x) => Some(VersionId(x)),
Err(_) => None,
},
},
dependency_type: dependency[2].to_string(),
})
} else {
None
}
}).flatten().collect(),
version_type: v.version_type
}
}))
})
.try_collect::<Vec<QueryVersion>>()
futures::future::try_join_all(version_ids.into_iter().map(|id| Self::get_full(id, exec)))
.await
.map(|x| x.into_iter().flatten().collect())
}
}

View File

@ -413,6 +413,7 @@ pub async fn upload_file_to_version(
let result = upload_file_to_version_inner(
req,
payload,
client,
&mut transaction,
&***file_host,
&mut uploaded_files,
@ -441,6 +442,7 @@ pub async fn upload_file_to_version(
async fn upload_file_to_version_inner(
req: HttpRequest,
mut payload: Multipart,
client: Data<PgPool>,
mut transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
@ -453,7 +455,7 @@ async fn upload_file_to_version_inner(
let user = get_user_from_headers(req.headers(), &mut *transaction).await?;
let result = models::Version::get_full(version_id, &mut *transaction).await?;
let result = models::Version::get_full(version_id, &**client).await?;
let version = match result {
Some(v) => v,

View File

@ -7,7 +7,7 @@ use crate::models::projects::ProjectStatus;
use crate::search::UploadSearchProject;
use sqlx::postgres::PgPool;
// TODO: only loaders for recent versions? For projects that have moved from forge to fabric
// TODO: Move this away from STRING_AGG to multiple queries - however this may be more efficient?
pub async fn index_local(pool: PgPool) -> Result<Vec<UploadSearchProject>, IndexingError> {
info!("Indexing local projects!");
Ok(
@ -18,8 +18,8 @@ pub async fn index_local(pool: PgPool) -> Result<Vec<UploadSearchProject>, Index
m.updated updated,
m.team_id team_id, m.license license, m.slug slug,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,
ARRAY_AGG(DISTINCT c.category) categories, ARRAY_AGG(DISTINCT lo.loader) loaders, ARRAY_AGG(DISTINCT gv.version) versions,
ARRAY_AGG(DISTINCT mg.image_url) gallery
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,
STRING_AGG(DISTINCT mg.image_url, ',') gallery
FROM mods m
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
@ -45,10 +45,10 @@ pub async fn index_local(pool: PgPool) -> Result<Vec<UploadSearchProject>, Index
.fetch_many(&pool)
.try_filter_map(|e| async {
Ok(e.right().map(|m| {
let mut categories = m.categories.map(|x| x.iter().map(|x| x.to_string()).collect::<Vec<String>>()).unwrap_or_default();
categories.append(&mut m.loaders.map(|x| x.iter().map(|x| x.to_string()).collect::<Vec<String>>()).unwrap_or_default());
let mut categories = m.categories.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>()).unwrap_or_default();
categories.append(&mut m.loaders.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>()).unwrap_or_default());
let versions : Vec<String> = m.versions.map(|x| x.iter().map(|x| x.to_string()).collect()).unwrap_or_default();
let versions : Vec<String> = m.versions.map(|x| x.split(',').map(|x| x.to_string()).collect()).unwrap_or_default();
let project_id : crate::models::projects::ProjectId = ProjectId(m.id).into();
@ -72,12 +72,12 @@ pub async fn index_local(pool: PgPool) -> Result<Vec<UploadSearchProject>, Index
server_side: m.server_side_type,
slug: m.slug,
project_type: m.project_type_name,
gallery: m.gallery.map(|x| x.iter().map(|x| x.to_string()).collect()).unwrap_or_default()
gallery: m.gallery.map(|x| x.split(',').map(|x| x.to_string()).collect()).unwrap_or_default()
}
}))
})
.try_collect::<Vec<UploadSearchProject>>()
.await?
.try_collect::<Vec<UploadSearchProject>>()
.await?
)
}
@ -92,8 +92,8 @@ pub async fn query_one(
m.updated updated,
m.team_id team_id, m.license license, m.slug slug,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,
ARRAY_AGG(DISTINCT c.category) categories, ARRAY_AGG(DISTINCT lo.loader) loaders, ARRAY_AGG(DISTINCT gv.version) versions,
ARRAY_AGG(DISTINCT mg.image_url) gallery
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,
STRING_AGG(DISTINCT mg.image_url, ',') gallery
FROM mods m
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
@ -116,23 +116,23 @@ pub async fn query_one(
id as ProjectId,
crate::models::teams::OWNER_ROLE,
)
.fetch_one(exec)
.await?;
.fetch_one(exec)
.await?;
let mut categories = m
.categories
.map(|x| x.iter().map(|x| x.to_string()).collect::<Vec<String>>())
.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>())
.unwrap_or_default();
categories.append(
&mut m
.loaders
.map(|x| x.iter().map(|x| x.to_string()).collect::<Vec<String>>())
.map(|x| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>())
.unwrap_or_default(),
);
let versions: Vec<String> = m
.versions
.map(|x| x.iter().map(|x| x.to_string()).collect())
.map(|x| x.split(',').map(|x| x.to_string()).collect())
.unwrap_or_default();
let project_id: crate::models::projects::ProjectId = ProjectId(m.id).into();
@ -162,7 +162,7 @@ pub async fn query_one(
project_type: m.project_type_name,
gallery: m
.gallery
.map(|x| x.iter().map(|x| x.to_string()).collect())
.map(|x| x.split(',').map(|x| x.to_string()).collect())
.unwrap_or_default(),
})
}