Reimplement old database code for better performance (#365)
This commit is contained in:
parent
f0ab40d748
commit
eaeff891d6
55
Cargo.lock
generated
55
Cargo.lock
generated
@ -340,6 +340,12 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async_once"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2ce4f10ea3abcd6617873bae9f91d1c5332b4a778bd9ce34d0cd517474c1de82"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "atoi"
|
name = "atoi"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
@ -524,6 +530,42 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c"
|
checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cached"
|
||||||
|
version = "0.34.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "aadf76ddea74bab35ebeb8f1eb115b9bc04eaee42d8acc0d5f477dee6b176c9a"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"async_once",
|
||||||
|
"cached_proc_macro",
|
||||||
|
"cached_proc_macro_types",
|
||||||
|
"futures",
|
||||||
|
"hashbrown 0.12.1",
|
||||||
|
"lazy_static",
|
||||||
|
"once_cell",
|
||||||
|
"thiserror",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cached_proc_macro"
|
||||||
|
version = "0.12.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bce0f37f9b77c6b93cdf3f060c89adca303d2ab052cacb3c3d1ab543e8cecd2f"
|
||||||
|
dependencies = [
|
||||||
|
"cached_proc_macro_types",
|
||||||
|
"darling",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cached_proc_macro_types"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3a4f925191b4367301851c6d99b09890311d74b0d43f274c0b34c86d308a3663"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "castaway"
|
name = "castaway"
|
||||||
version = "0.1.2"
|
version = "0.1.2"
|
||||||
@ -1410,6 +1452,7 @@ dependencies = [
|
|||||||
"base64",
|
"base64",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
"cached",
|
||||||
"dashmap",
|
"dashmap",
|
||||||
"dotenv",
|
"dotenv",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
@ -2776,9 +2819,21 @@ dependencies = [
|
|||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"signal-hook-registry",
|
"signal-hook-registry",
|
||||||
"socket2",
|
"socket2",
|
||||||
|
"tokio-macros",
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-macros"
|
||||||
|
version = "1.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-native-tls"
|
name = "tokio-native-tls"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
|
|||||||
@ -62,3 +62,5 @@ sqlx = { version = "0.5.11", features = ["runtime-actix-rustls", "postgres", "ti
|
|||||||
bytes = "1.1.0"
|
bytes = "1.1.0"
|
||||||
|
|
||||||
dashmap = "5.2.0"
|
dashmap = "5.2.0"
|
||||||
|
|
||||||
|
cached = "0.34.0"
|
||||||
1152
sqlx-data.json
1152
sqlx-data.json
File diff suppressed because it is too large
Load Diff
@ -601,69 +601,43 @@ impl Project {
|
|||||||
executor: E,
|
executor: E,
|
||||||
) -> Result<Option<QueryProject>, sqlx::error::Error>
|
) -> Result<Option<QueryProject>, sqlx::error::Error>
|
||||||
where
|
where
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
{
|
{
|
||||||
let (project, versions, categories, gallery, donations) = futures::join!(
|
let result = sqlx::query!(
|
||||||
sqlx::query!(
|
|
||||||
"
|
"
|
||||||
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
|
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
|
||||||
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
|
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
|
||||||
m.updated updated, m.status status,
|
m.updated updated, m.status status,
|
||||||
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
||||||
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||||
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name
|
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
|
||||||
|
STRING_AGG(DISTINCT c.category, ' ~~~~ ') categories, STRING_AGG(DISTINCT v.id::text, ' ~~~~ ') versions,
|
||||||
|
STRING_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ') || ' |||| ' || mg.created, ' ~~~~ ') gallery,
|
||||||
|
STRING_AGG(DISTINCT md.joining_platform_id || ' |||| ' || md.url || ' |||| ' || dp.short || ' |||| ' || dp.name, ' ~~~~ ') donations
|
||||||
FROM mods m
|
FROM mods m
|
||||||
INNER JOIN project_types pt ON pt.id = m.project_type
|
INNER JOIN project_types pt ON pt.id = m.project_type
|
||||||
INNER JOIN statuses s ON s.id = m.status
|
INNER JOIN statuses s ON s.id = m.status
|
||||||
INNER JOIN side_types cs ON m.client_side = cs.id
|
INNER JOIN side_types cs ON m.client_side = cs.id
|
||||||
INNER JOIN side_types ss ON m.server_side = ss.id
|
INNER JOIN side_types ss ON m.server_side = ss.id
|
||||||
INNER JOIN licenses l ON m.license = l.id
|
INNER JOIN licenses l ON m.license = l.id
|
||||||
|
LEFT JOIN mods_donations md ON md.joining_mod_id = m.id
|
||||||
|
LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id
|
||||||
|
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
|
||||||
|
LEFT JOIN categories c ON mc.joining_category_id = c.id
|
||||||
|
LEFT JOIN versions v ON v.mod_id = m.id
|
||||||
|
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
|
||||||
WHERE m.id = $1
|
WHERE m.id = $1
|
||||||
|
GROUP BY pt.id, s.id, cs.id, ss.id, l.id, m.id;
|
||||||
",
|
",
|
||||||
id as ProjectId,
|
id as ProjectId,
|
||||||
).fetch_optional(executor),
|
)
|
||||||
sqlx::query!(
|
.fetch_optional(executor)
|
||||||
"
|
.await?;
|
||||||
SELECT id
|
|
||||||
FROM versions
|
|
||||||
WHERE mod_id = $1
|
|
||||||
",
|
|
||||||
id as ProjectId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT c.category category
|
|
||||||
FROM mods_categories mc
|
|
||||||
INNER JOIN categories c ON mc.joining_category_id = c.id
|
|
||||||
WHERE mc.joining_mod_id = $1
|
|
||||||
",
|
|
||||||
id as ProjectId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT image_url, featured, title, description, created
|
|
||||||
FROM mods_gallery
|
|
||||||
WHERE mod_id = $1
|
|
||||||
",
|
|
||||||
id as ProjectId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT md.url url, dp.id platform_id, dp.name dp_name, dp.short short
|
|
||||||
FROM mods_donations md
|
|
||||||
INNER JOIN donation_platforms dp ON md.joining_platform_id = dp.id
|
|
||||||
WHERE md.joining_mod_id = $1
|
|
||||||
",
|
|
||||||
id as ProjectId,
|
|
||||||
).fetch_all(executor)
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(m) = project? {
|
|
||||||
let project_id = ProjectId(m.id);
|
|
||||||
|
|
||||||
|
if let Some(m) = result {
|
||||||
Ok(Some(QueryProject {
|
Ok(Some(QueryProject {
|
||||||
inner: Project {
|
inner: Project {
|
||||||
id: project_id,
|
id: ProjectId(m.id),
|
||||||
project_type: ProjectTypeId(m.project_type),
|
project_type: ProjectTypeId(m.project_type),
|
||||||
team_id: TeamId(m.team_id),
|
team_id: TeamId(m.team_id),
|
||||||
title: m.title.clone(),
|
title: m.title.clone(),
|
||||||
@ -689,46 +663,74 @@ impl Project {
|
|||||||
moderation_message_body: m.moderation_message_body,
|
moderation_message_body: m.moderation_message_body,
|
||||||
},
|
},
|
||||||
project_type: m.project_type_name,
|
project_type: m.project_type_name,
|
||||||
categories: categories?
|
categories: m
|
||||||
.into_iter()
|
.categories
|
||||||
.map(|x| x.category)
|
.map(|x| x.split(" ~~~~ ").map(|x| x.to_string()).collect())
|
||||||
.collect(),
|
.unwrap_or_default(),
|
||||||
versions: versions?
|
versions: m
|
||||||
.into_iter()
|
.versions
|
||||||
.map(|x| VersionId(x.id))
|
.map(|x| {
|
||||||
.collect(),
|
x.split(" ~~~~ ")
|
||||||
donation_urls: donations?
|
.map(|x| VersionId(x.parse().unwrap_or_default()))
|
||||||
.into_iter()
|
.collect()
|
||||||
.map(|x| DonationUrl {
|
|
||||||
project_id,
|
|
||||||
platform_id: DonationPlatformId(x.platform_id),
|
|
||||||
platform_short: x.short,
|
|
||||||
platform_name: x.dp_name,
|
|
||||||
url: x.url,
|
|
||||||
})
|
})
|
||||||
.collect(),
|
.unwrap_or_default(),
|
||||||
gallery_items: gallery?
|
donation_urls: m
|
||||||
.into_iter()
|
.donations
|
||||||
.map(|x| GalleryItem {
|
.unwrap_or_default()
|
||||||
project_id,
|
.split(" ~~~~ ")
|
||||||
image_url: x.image_url,
|
.map(|d| {
|
||||||
featured: x.featured.unwrap_or(false),
|
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||||
title: x.title,
|
|
||||||
description: x.description,
|
if strings.len() >= 3 {
|
||||||
created: x.created,
|
Some(DonationUrl {
|
||||||
|
project_id: id,
|
||||||
|
platform_id: DonationPlatformId(strings[0].parse().unwrap_or(0)),
|
||||||
|
platform_short: strings[2].to_string(),
|
||||||
|
platform_name: strings[3].to_string(),
|
||||||
|
url: strings[1].to_string(),
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
.collect(),
|
.collect(),
|
||||||
status: crate::models::projects::ProjectStatus::from_str(
|
gallery_items: m
|
||||||
&m.status_name,
|
.gallery
|
||||||
),
|
.unwrap_or_default()
|
||||||
|
.split(" ~~~~ ")
|
||||||
|
.map(|d| {
|
||||||
|
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||||
|
|
||||||
|
if strings.len() >= 5 {
|
||||||
|
Some(GalleryItem {
|
||||||
|
project_id: id,
|
||||||
|
image_url: strings[0].to_string(),
|
||||||
|
featured: strings[1].parse().unwrap_or(false),
|
||||||
|
title: if strings[2] == " " {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(strings[2].to_string())
|
||||||
|
},
|
||||||
|
description: if strings[3] == " " {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(strings[3].to_string())
|
||||||
|
},
|
||||||
|
created: OffsetDateTime::parse(strings[4], time::Format::Rfc3339).unwrap_or_else(|_| OffsetDateTime::now_utc())
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.collect(),
|
||||||
|
status: crate::models::projects::ProjectStatus::from_str(&m.status_name),
|
||||||
license_id: m.short,
|
license_id: m.short,
|
||||||
license_name: m.license_name,
|
license_name: m.license_name,
|
||||||
client_side: crate::models::projects::SideType::from_str(
|
client_side: crate::models::projects::SideType::from_str(&m.client_side_type),
|
||||||
&m.client_side_type,
|
server_side: crate::models::projects::SideType::from_str(&m.server_side_type),
|
||||||
),
|
|
||||||
server_side: crate::models::projects::SideType::from_str(
|
|
||||||
&m.server_side_type,
|
|
||||||
),
|
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
@ -740,13 +742,125 @@ impl Project {
|
|||||||
exec: E,
|
exec: E,
|
||||||
) -> Result<Vec<QueryProject>, sqlx::Error>
|
) -> Result<Vec<QueryProject>, sqlx::Error>
|
||||||
where
|
where
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
{
|
{
|
||||||
futures::future::try_join_all(
|
use futures::TryStreamExt;
|
||||||
project_ids.into_iter().map(|id| Self::get_full(id, exec)),
|
|
||||||
|
let project_ids_parsed: Vec<i64> = project_ids.into_iter().map(|x| x.0).collect();
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
|
||||||
|
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
|
||||||
|
m.updated updated, m.status status,
|
||||||
|
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
|
||||||
|
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||||
|
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
|
||||||
|
STRING_AGG(DISTINCT c.category, ' ~~~~ ') categories, STRING_AGG(DISTINCT v.id::text, ' ~~~~ ') versions,
|
||||||
|
STRING_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ') || ' |||| ' || mg.created, ' ~~~~ ') gallery,
|
||||||
|
STRING_AGG(DISTINCT md.joining_platform_id || ' |||| ' || md.url || ' |||| ' || dp.short || ' |||| ' || dp.name, ' ~~~~ ') donations
|
||||||
|
FROM mods m
|
||||||
|
INNER JOIN project_types pt ON pt.id = m.project_type
|
||||||
|
INNER JOIN statuses s ON s.id = m.status
|
||||||
|
INNER JOIN side_types cs ON m.client_side = cs.id
|
||||||
|
INNER JOIN side_types ss ON m.server_side = ss.id
|
||||||
|
INNER JOIN licenses l ON m.license = l.id
|
||||||
|
LEFT JOIN mods_donations md ON md.joining_mod_id = m.id
|
||||||
|
LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id
|
||||||
|
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
|
||||||
|
LEFT JOIN categories c ON mc.joining_category_id = c.id
|
||||||
|
LEFT JOIN versions v ON v.mod_id = m.id
|
||||||
|
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
|
||||||
|
WHERE m.id = ANY($1)
|
||||||
|
GROUP BY pt.id, s.id, cs.id, ss.id, l.id, m.id;
|
||||||
|
",
|
||||||
|
&project_ids_parsed
|
||||||
)
|
)
|
||||||
|
.fetch_many(exec)
|
||||||
|
.try_filter_map(|e| async {
|
||||||
|
Ok(e.right().map(|m| {
|
||||||
|
let id = m.id;
|
||||||
|
QueryProject {
|
||||||
|
inner: Project {
|
||||||
|
id: ProjectId(id),
|
||||||
|
project_type: ProjectTypeId(m.project_type),
|
||||||
|
team_id: TeamId(m.team_id),
|
||||||
|
title: m.title.clone(),
|
||||||
|
description: m.description.clone(),
|
||||||
|
downloads: m.downloads,
|
||||||
|
body_url: m.body_url.clone(),
|
||||||
|
icon_url: m.icon_url.clone(),
|
||||||
|
published: m.published,
|
||||||
|
updated: m.updated,
|
||||||
|
issues_url: m.issues_url.clone(),
|
||||||
|
source_url: m.source_url.clone(),
|
||||||
|
wiki_url: m.wiki_url.clone(),
|
||||||
|
license_url: m.license_url.clone(),
|
||||||
|
discord_url: m.discord_url.clone(),
|
||||||
|
client_side: SideTypeId(m.client_side),
|
||||||
|
status: StatusId(m.status),
|
||||||
|
server_side: SideTypeId(m.server_side),
|
||||||
|
license: LicenseId(m.license),
|
||||||
|
slug: m.slug.clone(),
|
||||||
|
body: m.body.clone(),
|
||||||
|
follows: m.follows,
|
||||||
|
moderation_message: m.moderation_message,
|
||||||
|
moderation_message_body: m.moderation_message_body,
|
||||||
|
},
|
||||||
|
project_type: m.project_type_name,
|
||||||
|
categories: m.categories.map(|x| x.split(" ~~~~ ").map(|x| x.to_string()).collect()).unwrap_or_default(),
|
||||||
|
versions: m.versions.map(|x| x.split(" ~~~~ ").map(|x| VersionId(x.parse().unwrap_or_default())).collect()).unwrap_or_default(),
|
||||||
|
gallery_items: m
|
||||||
|
.gallery
|
||||||
|
.unwrap_or_default()
|
||||||
|
.split(" ~~~~ ")
|
||||||
|
.map(|d| {
|
||||||
|
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||||
|
|
||||||
|
if strings.len() >= 5 {
|
||||||
|
Some(GalleryItem {
|
||||||
|
project_id: ProjectId(id),
|
||||||
|
image_url: strings[0].to_string(),
|
||||||
|
featured: strings[1].parse().unwrap_or(false),
|
||||||
|
title: if strings[2] == " " { None } else { Some(strings[2].to_string()) },
|
||||||
|
description: if strings[3] == " " { None } else { Some(strings[3].to_string()) },
|
||||||
|
created: OffsetDateTime::parse(strings[4], time::Format::Rfc3339).unwrap_or_else(|_| OffsetDateTime::now_utc())
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.collect(),
|
||||||
|
donation_urls: m
|
||||||
|
.donations
|
||||||
|
.unwrap_or_default()
|
||||||
|
.split(" ~~~~ ")
|
||||||
|
.map(|d| {
|
||||||
|
let strings: Vec<&str> = d.split(" |||| ").collect();
|
||||||
|
|
||||||
|
if strings.len() >= 3 {
|
||||||
|
Some(DonationUrl {
|
||||||
|
project_id: ProjectId(id),
|
||||||
|
platform_id: DonationPlatformId(strings[0].parse().unwrap_or(0)),
|
||||||
|
platform_short: strings[2].to_string(),
|
||||||
|
platform_name: strings[3].to_string(),
|
||||||
|
url: strings[1].to_string(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.collect(),
|
||||||
|
status: crate::models::projects::ProjectStatus::from_str(&m.status_name),
|
||||||
|
license_id: m.short,
|
||||||
|
license_name: m.license_name,
|
||||||
|
client_side: crate::models::projects::SideType::from_str(&m.client_side_type),
|
||||||
|
server_side: crate::models::projects::SideType::from_str(&m.server_side_type),
|
||||||
|
}}))
|
||||||
|
})
|
||||||
|
.try_collect::<Vec<QueryProject>>()
|
||||||
.await
|
.await
|
||||||
.map(|x| x.into_iter().flatten().collect())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
|||||||
@ -608,80 +608,53 @@ impl Version {
|
|||||||
executor: E,
|
executor: E,
|
||||||
) -> Result<Option<QueryVersion>, sqlx::error::Error>
|
) -> Result<Option<QueryVersion>, sqlx::error::Error>
|
||||||
where
|
where
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||||
{
|
{
|
||||||
let (version, game_versions, loaders, files, hashes, dependencies) = futures::join!(
|
let result = sqlx::query!(
|
||||||
sqlx::query!(
|
|
||||||
"
|
"
|
||||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||||
v.version_type version_type, v.featured featured
|
v.version_type version_type, v.featured featured,
|
||||||
|
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||||
|
STRING_AGG(DISTINCT f.id || ' |||| ' || f.filename || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url, ' ~~~~ ') files,
|
||||||
|
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
|
||||||
|
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || COALESCE(d.dependency_file_name, ' ') || ' |||| ' || d.dependency_type, ' ~~~~ ') dependencies
|
||||||
FROM versions v
|
FROM versions v
|
||||||
|
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||||
|
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||||
|
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
|
||||||
|
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
|
||||||
|
LEFT OUTER JOIN files f on v.id = f.version_id
|
||||||
|
LEFT OUTER JOIN hashes h on f.id = h.file_id
|
||||||
|
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
|
||||||
WHERE v.id = $1
|
WHERE v.id = $1
|
||||||
GROUP BY v.id;
|
GROUP BY v.id;
|
||||||
",
|
",
|
||||||
id as VersionId,
|
id as VersionId,
|
||||||
)
|
)
|
||||||
.fetch_optional(executor),
|
.fetch_optional(executor)
|
||||||
sqlx::query!(
|
.await?;
|
||||||
"
|
|
||||||
SELECT gv.version game_version
|
|
||||||
FROM game_versions_versions gvv
|
|
||||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id
|
|
||||||
WHERE gvv.joining_version_id = $1
|
|
||||||
ORDER BY gv.created
|
|
||||||
",
|
|
||||||
id as VersionId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT l.loader loader
|
|
||||||
FROM loaders_versions lv
|
|
||||||
INNER JOIN loaders l on lv.loader_id = l.id
|
|
||||||
WHERE lv.version_id = $1
|
|
||||||
",
|
|
||||||
id as VersionId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT id, filename, is_primary, url, size
|
|
||||||
FROM files
|
|
||||||
WHERE version_id = $1
|
|
||||||
",
|
|
||||||
id as VersionId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT h.algorithm algorithm, encode(h.hash, 'escape') hash, h.file_id file_id
|
|
||||||
FROM files f
|
|
||||||
INNER JOIN hashes h ON h.file_id = f.id
|
|
||||||
WHERE f.version_id = $1
|
|
||||||
",
|
|
||||||
id as VersionId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
sqlx::query!(
|
|
||||||
"
|
|
||||||
SELECT dependency_id, mod_dependency_id, dependency_file_name, dependency_type
|
|
||||||
FROM dependencies
|
|
||||||
WHERE dependent_id = $1
|
|
||||||
",
|
|
||||||
id as VersionId,
|
|
||||||
).fetch_all(executor),
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(v) = version? {
|
if let Some(v) = result {
|
||||||
let mut hashes_map: HashMap<FileId, HashMap<String, Vec<u8>>> =
|
let hashes: Vec<(FileId, String, Vec<u8>)> = v
|
||||||
HashMap::new();
|
.hashes
|
||||||
|
.unwrap_or_default()
|
||||||
|
.split(" ~~~~ ")
|
||||||
|
.map(|f| {
|
||||||
|
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||||
|
|
||||||
for hash in hashes? {
|
if hash.len() >= 3 {
|
||||||
let entry = hashes_map
|
Some((
|
||||||
.entry(FileId(hash.file_id))
|
FileId(hash[2].parse().unwrap_or(0)),
|
||||||
.or_insert_with(HashMap::new);
|
hash[0].to_string(),
|
||||||
|
hash[1].to_string().into_bytes(),
|
||||||
if let Some(raw_hash) = hash.hash {
|
))
|
||||||
entry.insert(hash.algorithm, raw_hash.into_bytes());
|
} else {
|
||||||
}
|
None
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(Some(QueryVersion {
|
Ok(Some(QueryVersion {
|
||||||
id: VersionId(v.id),
|
id: VersionId(v.id),
|
||||||
@ -693,34 +666,81 @@ impl Version {
|
|||||||
changelog_url: v.changelog_url,
|
changelog_url: v.changelog_url,
|
||||||
date_published: v.date_published,
|
date_published: v.date_published,
|
||||||
downloads: v.downloads,
|
downloads: v.downloads,
|
||||||
files: files?
|
files: v
|
||||||
.into_iter()
|
.files
|
||||||
.map(|x| QueryFile {
|
.unwrap_or_default()
|
||||||
id: FileId(x.id),
|
.split(" ~~~~ ")
|
||||||
url: x.url,
|
.map(|f| {
|
||||||
filename: x.filename,
|
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||||
hashes: hashes_map
|
|
||||||
.entry(FileId(x.id))
|
if file.len() >= 5 {
|
||||||
.or_default()
|
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||||
.clone(),
|
let mut file_hashes = HashMap::new();
|
||||||
primary: x.is_primary,
|
|
||||||
size: x.size as u32,
|
for hash in &hashes {
|
||||||
|
if (hash.0).0 == file_id.0 {
|
||||||
|
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(QueryFile {
|
||||||
|
id: file_id,
|
||||||
|
url: file[4].to_string(),
|
||||||
|
filename: file[1].to_string(),
|
||||||
|
hashes: file_hashes,
|
||||||
|
primary: file[2].parse().unwrap_or(false),
|
||||||
|
size: file[3].parse().unwrap_or(0)
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
.collect(),
|
.collect(),
|
||||||
game_versions: game_versions?
|
game_versions: v
|
||||||
.into_iter()
|
.game_versions
|
||||||
.map(|x| x.game_version)
|
.unwrap_or_default()
|
||||||
|
.split(" ~~~~ ")
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect(),
|
||||||
|
loaders: v
|
||||||
|
.loaders
|
||||||
|
.unwrap_or_default()
|
||||||
|
.split(" ~~~~ ")
|
||||||
|
.map(|x| x.to_string())
|
||||||
.collect(),
|
.collect(),
|
||||||
loaders: loaders?.into_iter().map(|x| x.loader).collect(),
|
|
||||||
featured: v.featured,
|
featured: v.featured,
|
||||||
dependencies: dependencies?
|
dependencies: v
|
||||||
.into_iter()
|
.dependencies
|
||||||
.map(|x| QueryDependency {
|
.unwrap_or_default()
|
||||||
project_id: x.mod_dependency_id.map(ProjectId),
|
.split(" ~~~~ ")
|
||||||
version_id: x.dependency_id.map(VersionId),
|
.map(|f| {
|
||||||
file_name: x.dependency_file_name,
|
let dependency: Vec<&str> = f.split(" |||| ").collect();
|
||||||
dependency_type: x.dependency_type,
|
|
||||||
|
if dependency.len() >= 4 {
|
||||||
|
Some(QueryDependency {
|
||||||
|
project_id: match &*dependency[1] {
|
||||||
|
"0" => None,
|
||||||
|
_ => match dependency[1].parse() {
|
||||||
|
Ok(x) => Some(ProjectId(x)),
|
||||||
|
Err(_) => None,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
version_id: match &*dependency[0] {
|
||||||
|
"0" => None,
|
||||||
|
_ => match dependency[0].parse() {
|
||||||
|
Ok(x) => Some(VersionId(x)),
|
||||||
|
Err(_) => None,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
file_name: if dependency[2] == " " { None } else { Some(dependency[4].to_string())},
|
||||||
|
dependency_type: dependency[3].to_string(),
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
.collect(),
|
.collect(),
|
||||||
version_type: v.version_type,
|
version_type: v.version_type,
|
||||||
}))
|
}))
|
||||||
@ -736,11 +756,122 @@ impl Version {
|
|||||||
where
|
where
|
||||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||||
{
|
{
|
||||||
futures::future::try_join_all(
|
use futures::stream::TryStreamExt;
|
||||||
version_ids.into_iter().map(|id| Self::get_full(id, exec)),
|
|
||||||
|
let version_ids_parsed: Vec<i64> = version_ids.into_iter().map(|x| x.0).collect();
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||||
|
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||||
|
v.version_type version_type, v.featured featured,
|
||||||
|
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||||
|
STRING_AGG(DISTINCT f.id || ' |||| ' || f.filename || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url, ' ~~~~ ') files,
|
||||||
|
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
|
||||||
|
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || COALESCE(d.dependency_file_name, ' ') || ' |||| ' || d.dependency_type, ' ~~~~ ') dependencies
|
||||||
|
FROM versions v
|
||||||
|
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||||
|
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||||
|
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
|
||||||
|
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
|
||||||
|
LEFT OUTER JOIN files f on v.id = f.version_id
|
||||||
|
LEFT OUTER JOIN hashes h on f.id = h.file_id
|
||||||
|
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
|
||||||
|
WHERE v.id = ANY($1)
|
||||||
|
GROUP BY v.id
|
||||||
|
ORDER BY v.date_published ASC;
|
||||||
|
",
|
||||||
|
&version_ids_parsed
|
||||||
)
|
)
|
||||||
|
.fetch_many(exec)
|
||||||
|
.try_filter_map(|e| async {
|
||||||
|
Ok(e.right().map(|v| {
|
||||||
|
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||||
|
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||||
|
|
||||||
|
if hash.len() >= 3 {
|
||||||
|
Some((
|
||||||
|
FileId(hash[2].parse().unwrap_or(0)),
|
||||||
|
hash[0].to_string(),
|
||||||
|
hash[1].to_string().into_bytes(),
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}).flatten().collect();
|
||||||
|
|
||||||
|
QueryVersion {
|
||||||
|
id: VersionId(v.id),
|
||||||
|
project_id: ProjectId(v.mod_id),
|
||||||
|
author_id: UserId(v.author_id),
|
||||||
|
name: v.version_name,
|
||||||
|
version_number: v.version_number,
|
||||||
|
changelog: v.changelog,
|
||||||
|
changelog_url: v.changelog_url,
|
||||||
|
date_published: v.date_published,
|
||||||
|
downloads: v.downloads,
|
||||||
|
files: v.files.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||||
|
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||||
|
|
||||||
|
if file.len() >= 5 {
|
||||||
|
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||||
|
let mut file_hashes = HashMap::new();
|
||||||
|
|
||||||
|
for hash in &hashes {
|
||||||
|
if (hash.0).0 == file_id.0 {
|
||||||
|
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(QueryFile {
|
||||||
|
id: file_id,
|
||||||
|
url: file[4].to_string(),
|
||||||
|
filename: file[1].to_string(),
|
||||||
|
hashes: file_hashes,
|
||||||
|
primary: file[2].parse().unwrap_or(false),
|
||||||
|
size: file[3].parse().unwrap_or(0)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}).flatten().collect(),
|
||||||
|
game_versions: v.game_versions.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
|
||||||
|
loaders: v.loaders.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
|
||||||
|
featured: v.featured,
|
||||||
|
dependencies: v.dependencies
|
||||||
|
.unwrap_or_default()
|
||||||
|
.split(" ~~~~ ")
|
||||||
|
.map(|f| {
|
||||||
|
let dependency: Vec<&str> = f.split(" |||| ").collect();
|
||||||
|
|
||||||
|
if dependency.len() >= 4 {
|
||||||
|
Some(QueryDependency {
|
||||||
|
project_id: match &*dependency[1] {
|
||||||
|
"0" => None,
|
||||||
|
_ => match dependency[1].parse() {
|
||||||
|
Ok(x) => Some(ProjectId(x)),
|
||||||
|
Err(_) => None,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
version_id: match &*dependency[0] {
|
||||||
|
"0" => None,
|
||||||
|
_ => match dependency[0].parse() {
|
||||||
|
Ok(x) => Some(VersionId(x)),
|
||||||
|
Err(_) => None,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
file_name: if dependency[2] == " " { None } else { Some(dependency[4].to_string())},
|
||||||
|
dependency_type: dependency[3].to_string(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}).flatten().collect(),
|
||||||
|
version_type: v.version_type
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.try_collect::<Vec<QueryVersion>>()
|
||||||
.await
|
.await
|
||||||
.map(|x| x.into_iter().flatten().collect())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -39,7 +39,7 @@ pub async fn projects_get(
|
|||||||
pool: web::Data<PgPool>,
|
pool: web::Data<PgPool>,
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let project_ids =
|
let project_ids =
|
||||||
serde_json::from_str::<Vec<models::ids::ProjectId>>(&*ids.ids)?
|
serde_json::from_str::<Vec<ProjectId>>(&*ids.ids)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|x| x.into())
|
.map(|x| x.into())
|
||||||
.collect();
|
.collect();
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user