More project data (#406)

* More project data

* Array_agg fixes + cleanup

* fix prepare

* Add approval dates to search

* Update migrations/20220725204351_more-project-data.sql

Co-authored-by: wafflecoffee <emmaffle@modrinth.com>

* Add category labels + display categories

Co-authored-by: wafflecoffee <emmaffle@modrinth.com>
This commit is contained in:
Geometrically 2022-07-31 13:29:20 -07:00 committed by GitHub
parent 13335cadc6
commit b04bced37f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 3673 additions and 3517 deletions

254
Cargo.lock generated
View File

@ -351,9 +351,9 @@ checksum = "2ce4f10ea3abcd6617873bae9f91d1c5332b4a778bd9ce34d0cd517474c1de82"
[[package]]
name = "atoi"
version = "0.4.0"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5"
checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e"
dependencies = [
"num-traits",
]
@ -416,12 +416,6 @@ dependencies = [
"anyhow",
]
[[package]]
name = "base-x"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc19a4937b4fbd3fe3379793130e42060d10627a360f2127802b10b87e7baf74"
[[package]]
name = "base64"
version = "0.13.0"
@ -599,6 +593,20 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
dependencies = [
"libc",
"num-integer",
"num-traits",
"serde",
"time 0.1.44",
"winapi",
]
[[package]]
name = "cipher"
version = "0.3.0"
@ -617,12 +625,6 @@ dependencies = [
"cache-padded",
]
[[package]]
name = "const_fn"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbdcdcb6d86f71c5e97409ad45898af11cbc995b4ee8112d59095a28d376c935"
[[package]]
name = "constant_time_eq"
version = "0.1.5"
@ -673,18 +675,18 @@ dependencies = [
[[package]]
name = "crc"
version = "2.1.0"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23"
checksum = "53757d12b596c16c78b83458d732a5d1a17ab3f53f2f7412f6fb57cc8a140ab3"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "1.1.1"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403"
checksum = "2d0165d2900ae6778e36e80bbc4da3b5eefccee9ba939761f9c2882a5d9af3ff"
[[package]]
name = "crc32fast"
@ -831,7 +833,7 @@ dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version 0.4.0",
"rustc_version",
"syn",
]
@ -875,12 +877,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "discard"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0"
[[package]]
name = "dlv-list"
version = "0.2.3"
@ -1130,7 +1126,7 @@ checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
dependencies = [
"cfg-if",
"libc",
"wasi 0.10.2+wasi-snapshot-preview1",
"wasi 0.10.0+wasi-snapshot-preview1",
]
[[package]]
@ -1186,23 +1182,23 @@ name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
"ahash 0.7.6",
]
[[package]]
name = "hashbrown"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"
dependencies = [
"ahash 0.7.6",
]
[[package]]
name = "hashlink"
version = "0.7.0"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf"
checksum = "d452c155cb93fecdfb02a73dd57b5d8e442c2063bd7aac72f1bc5e4263a43086"
dependencies = [
"hashbrown 0.11.2",
"hashbrown 0.12.1",
]
[[package]]
@ -1453,7 +1449,7 @@ dependencies = [
[[package]]
name = "labrinth"
version = "2.4.4"
version = "2.5.0"
dependencies = [
"actix",
"actix-cors",
@ -1466,6 +1462,7 @@ dependencies = [
"bytes",
"cached",
"censor",
"chrono",
"dashmap",
"dotenv",
"env_logger",
@ -1487,7 +1484,6 @@ dependencies = [
"sha2 0.9.9",
"sqlx",
"thiserror",
"time 0.2.27",
"tokio",
"tokio-stream",
"url",
@ -1728,6 +1724,16 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "num-integer"
version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.15"
@ -1993,12 +1999,6 @@ dependencies = [
"version_check",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
version = "1.0.39"
@ -2195,37 +2195,36 @@ dependencies = [
"url",
]
[[package]]
name = "rustc_version"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
dependencies = [
"semver 0.9.0",
]
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver 1.0.9",
"semver",
]
[[package]]
name = "rustls"
version = "0.19.1"
version = "0.20.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7"
checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033"
dependencies = [
"base64",
"log",
"ring",
"sct",
"webpki",
]
[[package]]
name = "rustls-pemfile"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7522c9de787ff061458fe9a829dc790a3f5b22dc571694fc5883f448b94d9a9"
dependencies = [
"base64",
]
[[package]]
name = "rustversion"
version = "1.0.6"
@ -2256,9 +2255,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "sct"
version = "0.6.1"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce"
checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4"
dependencies = [
"ring",
"untrusted",
@ -2287,27 +2286,12 @@ dependencies = [
"libc",
]
[[package]]
name = "semver"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
dependencies = [
"semver-parser",
]
[[package]]
name = "semver"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cb243bdfdb5936c8dc3c45762a19d12ab4550cdc753bc247637d4ec35a040fd"
[[package]]
name = "semver-parser"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "serde"
version = "1.0.137"
@ -2508,9 +2492,9 @@ dependencies = [
[[package]]
name = "sqlx"
version = "0.5.13"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "551873805652ba0d912fec5bbb0f8b4cdd96baf8e2ebf5970e5671092966019b"
checksum = "1f82cbe94f41641d6c410ded25bbf5097c240cefdf8e3b06d04198d0a96af6a4"
dependencies = [
"sqlx-core",
"sqlx-macros",
@ -2518,9 +2502,9 @@ dependencies = [
[[package]]
name = "sqlx-core"
version = "0.5.13"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e48c61941ccf5ddcada342cd59e3e5173b007c509e1e8e990dafc830294d9dc5"
checksum = "6b69bf218860335ddda60d6ce85ee39f6cf6e5630e300e19757d1de15886a093"
dependencies = [
"ahash 0.7.6",
"atoi",
@ -2528,6 +2512,7 @@ dependencies = [
"bitflags",
"byteorder",
"bytes",
"chrono",
"crc",
"crossbeam-queue",
"dirs",
@ -2552,6 +2537,7 @@ dependencies = [
"percent-encoding",
"rand",
"rustls",
"rustls-pemfile",
"serde",
"serde_json",
"sha-1",
@ -2561,19 +2547,17 @@ dependencies = [
"sqlx-rt",
"stringprep",
"thiserror",
"time 0.2.27",
"tokio-stream",
"url",
"webpki",
"webpki-roots",
"whoami",
]
[[package]]
name = "sqlx-macros"
version = "0.5.13"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc0fba2b0cae21fc00fe6046f8baa4c7fcb49e379f0f592b04696607f69ed2e1"
checksum = "f40c63177cf23d356b159b60acd27c54af7423f1736988502e36bae9a712118f"
dependencies = [
"dotenv",
"either",
@ -2593,9 +2577,9 @@ dependencies = [
[[package]]
name = "sqlx-rt"
version = "0.5.13"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4db708cd3e459078f85f39f96a00960bd841f66ee2a669e90bf36907f5a79aae"
checksum = "874e93a365a598dc3dadb197565952cb143ae4aa716f7bcc933a8d836f6bf89f"
dependencies = [
"actix-rt",
"once_cell",
@ -2603,70 +2587,12 @@ dependencies = [
"tokio-rustls",
]
[[package]]
name = "standback"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e113fb6f3de07a243d434a56ec6f186dfd51cb08448239fe7bcae73f87ff28ff"
dependencies = [
"version_check",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "stdweb"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d022496b16281348b52d0e30ae99e01a73d737b2f45d38fed4edf79f9325a1d5"
dependencies = [
"discard",
"rustc_version 0.2.3",
"stdweb-derive",
"stdweb-internal-macros",
"stdweb-internal-runtime",
"wasm-bindgen",
]
[[package]]
name = "stdweb-derive"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef"
dependencies = [
"proc-macro2",
"quote",
"serde",
"serde_derive",
"syn",
]
[[package]]
name = "stdweb-internal-macros"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11"
dependencies = [
"base-x",
"proc-macro2",
"quote",
"serde",
"serde_derive",
"serde_json",
"sha1 0.6.1",
"syn",
]
[[package]]
name = "stdweb-internal-runtime"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0"
[[package]]
name = "stringprep"
version = "0.1.2"
@ -2745,17 +2671,12 @@ dependencies = [
[[package]]
name = "time"
version = "0.2.27"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4752a97f8eebd6854ff91f1c1824cd6160626ac4bd44287f7f4ea2035a02a242"
checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
dependencies = [
"const_fn",
"libc",
"serde",
"standback",
"stdweb",
"time-macros 0.1.1",
"version_check",
"wasi 0.10.0+wasi-snapshot-preview1",
"winapi",
]
@ -2769,17 +2690,7 @@ dependencies = [
"libc",
"num_threads",
"serde",
"time-macros 0.2.4",
]
[[package]]
name = "time-macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "957e9c6e26f12cb6d0dd7fc776bb67a706312e7299aed74c8dd5b17ebb27e2f1"
dependencies = [
"proc-macro-hack",
"time-macros-impl",
"time-macros",
]
[[package]]
@ -2788,19 +2699,6 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792"
[[package]]
name = "time-macros-impl"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd3c141a1b43194f3f56a1411225df8646c55781d5f26db825b3d98507eb482f"
dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote",
"standback",
"syn",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
@ -2859,9 +2757,9 @@ dependencies = [
[[package]]
name = "tokio-rustls"
version = "0.22.0"
version = "0.23.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6"
checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
dependencies = [
"rustls",
"tokio",
@ -3114,9 +3012,9 @@ dependencies = [
[[package]]
name = "wasi"
version = "0.10.2+wasi-snapshot-preview1"
version = "0.10.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]]
name = "wasi"
@ -3202,9 +3100,9 @@ dependencies = [
[[package]]
name = "webpki"
version = "0.21.4"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea"
checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd"
dependencies = [
"ring",
"untrusted",
@ -3212,9 +3110,9 @@ dependencies = [
[[package]]
name = "webpki-roots"
version = "0.21.1"
version = "0.22.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aabe153544e473b775453675851ecc86863d2a81d786d741f6b76778f2a48940"
checksum = "f1c760f0d366a6c24a02ed7816e23e691f5d92291f94d15e836006fd11b04daf"
dependencies = [
"webpki",
]

View File

@ -1,8 +1,7 @@
[package]
name = "labrinth"
version = "2.4.4"
#Team members, please add your emails and usernames
authors = ["geometrically <jai.a@tuta.io>", "Redblueflame <contact@redblueflame.com>", "Aeledfyr <aeledfyr@gmail.com>", "Charalampos Fanoulis <yo@fanoulis.dev>", "AppleTheGolden <scotsbox@protonmail.com>"]
version = "2.5.0"
authors = ["geometrically <jai@modrinth.com>"]
edition = "2018"
license = "AGPL-3.0"
@ -31,7 +30,7 @@ xml-rs = "0.8.4"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_with = "1.12.0"
time = { version = "0.2.27", features = ["serde"] }
chrono = { version = "0.4.19", features = ["serde"]}
rand = "0.8.5"
base64 = "0.13.0"
sha1 = { version = "0.6.1", features = ["std"] }
@ -57,7 +56,7 @@ futures-timer = "3.0.2"
rust-s3 = "0.30.0"
async-trait = "0.1.53"
sqlx = { version = "0.5.11", features = ["runtime-actix-rustls", "postgres", "time", "offline", "macros", "migrate"] }
sqlx = { version = "0.6.0", features = ["runtime-actix-rustls", "postgres", "chrono", "offline", "macros", "migrate"] }
bytes = "1.1.0"

View File

@ -0,0 +1,37 @@
-- Add migration script here
ALTER TABLE mods_categories
ADD COLUMN is_additional BOOLEAN NOT NULL DEFAULT FALSE;
ALTER TABLE mods
ADD COLUMN approved timestamptz NULL;
ALTER TABLE categories
ADD COLUMN header varchar(256) NOT NULL DEFAULT 'Categories';
UPDATE mods
SET approved = published
WHERE status = 1 OR status = 4;
CREATE INDEX mods_slug
ON mods (slug);
CREATE INDEX versions_mod_id
ON versions (mod_id);
CREATE INDEX files_version_id
ON files (version_id);
CREATE INDEX dependencies_dependent_id
ON dependencies (dependent_id);
CREATE INDEX mods_gallery_mod_id
ON mods_gallery(mod_id);
CREATE INDEX game_versions_versions_joining_version_id
ON game_versions_versions(joining_version_id);
CREATE INDEX loaders_versions_version_id
ON loaders_versions(version_id);
CREATE INDEX notifications_user_id
ON notifications(user_id);

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,8 @@
use super::ids::*;
use super::DatabaseError;
use chrono::DateTime;
use chrono::Utc;
use futures::TryStreamExt;
use time::OffsetDateTime;
pub struct ProjectType {
pub id: ProjectTypeId,
@ -20,7 +21,7 @@ pub struct GameVersion {
pub id: GameVersionId,
pub version: String,
pub version_type: String,
pub date: OffsetDateTime,
pub date: DateTime<Utc>,
pub major: bool,
}
@ -29,6 +30,7 @@ pub struct Category {
pub category: String,
pub project_type: String,
pub icon: String,
pub header: String,
}
pub struct ReportType {
@ -52,6 +54,7 @@ pub struct CategoryBuilder<'a> {
pub name: Option<&'a str>,
pub project_type: Option<&'a ProjectTypeId>,
pub icon: Option<&'a str>,
pub header: Option<&'a str>,
}
impl Category {
@ -60,6 +63,7 @@ impl Category {
name: None,
project_type: None,
icon: None,
header: None,
}
}
@ -145,7 +149,7 @@ impl Category {
{
let result = sqlx::query!(
"
SELECT c.id id, c.category category, c.icon icon, pt.name project_type
SELECT c.id id, c.category category, c.icon icon, c.header header, pt.name project_type
FROM categories c
INNER JOIN project_types pt ON c.project_type = pt.id
ORDER BY c.id
@ -158,6 +162,7 @@ impl Category {
category: c.category,
project_type: c.project_type,
icon: c.icon,
header: c.header
}))
})
.try_collect::<Vec<Category>>()
@ -211,6 +216,16 @@ impl<'a> CategoryBuilder<'a> {
}
}
pub fn header(
self,
header: &'a str,
) -> Result<CategoryBuilder<'a>, DatabaseError> {
Ok(Self {
header: Some(header),
..self
})
}
pub fn project_type(
self,
project_type: &'a ProjectTypeId,
@ -243,13 +258,14 @@ impl<'a> CategoryBuilder<'a> {
})?;
let result = sqlx::query!(
"
INSERT INTO categories (category, project_type, icon)
VALUES ($1, $2, $3)
INSERT INTO categories (category, project_type, icon, header)
VALUES ($1, $2, $3, $4)
RETURNING id
",
self.name,
id as ProjectTypeId,
self.icon
self.icon,
self.header
)
.fetch_one(exec)
.await?;
@ -327,7 +343,7 @@ impl Loader {
let result = sqlx::query!(
"
SELECT l.id id, l.loader loader, l.icon icon,
ARRAY_AGG(DISTINCT pt.name) project_types
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types
FROM loaders l
LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id
LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id
@ -470,7 +486,7 @@ impl<'a> LoaderBuilder<'a> {
pub struct GameVersionBuilder<'a> {
pub version: Option<&'a str>,
pub version_type: Option<&'a str>,
pub date: Option<&'a OffsetDateTime>,
pub date: Option<&'a DateTime<Utc>>,
}
impl GameVersion {
@ -688,10 +704,7 @@ impl<'a> GameVersionBuilder<'a> {
}
}
pub fn created(
self,
created: &'a OffsetDateTime,
) -> GameVersionBuilder<'a> {
pub fn created(self, created: &'a DateTime<Utc>) -> GameVersionBuilder<'a> {
Self {
date: Some(created),
..self
@ -719,7 +732,7 @@ impl<'a> GameVersionBuilder<'a> {
",
self.version,
self.version_type,
self.date.map(|x| time::PrimitiveDateTime::new(x.date(), x.time())),
self.date.map(chrono::DateTime::naive_utc),
)
.fetch_one(exec)
.await?;

View File

@ -1,8 +1,8 @@
#![allow(dead_code)]
// TODO: remove attr once routes are created
use chrono::{DateTime, Utc};
use thiserror::Error;
use time::OffsetDateTime;
pub mod categories;
pub mod ids;
@ -123,14 +123,12 @@ impl ids::ProjectTypeId {
.fetch_optional(exec)
.await?;
Ok(result.map(|r| ids::ProjectTypeId(r.id)))
Ok(result.map(|r| ProjectTypeId(r.id)))
}
}
pub fn convert_postgres_date(input: &str) -> OffsetDateTime {
OffsetDateTime::parse(
format!("{}:00Z", input.replace(' ', "T")),
time::Format::Rfc3339,
)
.unwrap_or_else(|_| OffsetDateTime::now_utc())
pub fn convert_postgres_date(input: &str) -> DateTime<Utc> {
DateTime::parse_from_rfc3339(&*format!("{}:00Z", input.replace(' ', "T")))
.map(|x| x.with_timezone(&Utc))
.unwrap_or_else(|_| Utc::now())
}

View File

@ -1,6 +1,6 @@
use super::ids::*;
use crate::database::models::DatabaseError;
use time::OffsetDateTime;
use chrono::{DateTime, Utc};
pub struct NotificationBuilder {
pub notification_type: Option<String>,
@ -23,7 +23,7 @@ pub struct Notification {
pub text: String,
pub link: String,
pub read: bool,
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
pub actions: Vec<NotificationAction>,
}
@ -72,7 +72,7 @@ impl NotificationBuilder {
text: self.text.clone(),
link: self.link.clone(),
read: false,
created: OffsetDateTime::now_utc(),
created: Utc::now(),
actions,
}
.insert(&mut *transaction)
@ -124,7 +124,7 @@ impl Notification {
let result = sqlx::query!(
"
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.id = $1
@ -138,24 +138,21 @@ impl Notification {
if let Some(row) = result {
let mut actions: Vec<NotificationAction> = Vec::new();
row.actions
.unwrap_or_default()
.split(" ~~~~ ")
.for_each(|x| {
let action: Vec<&str> = x.split(" |||| ").collect();
row.actions.unwrap_or_default().into_iter().for_each(|x| {
let action: Vec<&str> = x.split(" |||| ").collect();
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(
action[0].parse().unwrap_or(0),
),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(
action[0].parse().unwrap_or(0),
),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
Ok(Some(Notification {
id,
@ -187,7 +184,7 @@ impl Notification {
sqlx::query!(
"
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.id = ANY($1)
@ -202,7 +199,7 @@ impl Notification {
let id = NotificationId(row.id);
let mut actions: Vec<NotificationAction> = Vec::new();
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
row.actions.unwrap_or_default().into_iter().for_each(|x| {
let action: Vec<&str> = x.split(" |||| ").collect();
if action.len() >= 3 {
@ -245,7 +242,7 @@ impl Notification {
sqlx::query!(
"
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
ARRAY_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method) filter (where na.id is not null) actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.user_id = $1
@ -259,7 +256,7 @@ impl Notification {
let id = NotificationId(row.id);
let mut actions: Vec<NotificationAction> = Vec::new();
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
row.actions.unwrap_or_default().into_iter().for_each(|x| {
let action: Vec<&str> = x.split(" |||| ").collect();
if action.len() >= 3 {

View File

@ -1,6 +1,6 @@
use super::ids::*;
use crate::database::models::convert_postgres_date;
use time::OffsetDateTime;
use chrono::{DateTime, Utc};
#[derive(Clone, Debug)]
pub struct DonationUrl {
@ -43,7 +43,7 @@ pub struct GalleryItem {
pub featured: bool,
pub title: Option<String>,
pub description: Option<String>,
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
}
impl GalleryItem {
@ -87,6 +87,7 @@ pub struct ProjectBuilder {
pub license_url: Option<String>,
pub discord_url: Option<String>,
pub categories: Vec<CategoryId>,
pub additional_categories: Vec<CategoryId>,
pub initial_versions: Vec<super::version_item::VersionBuilder>,
pub status: StatusId,
pub client_side: SideTypeId,
@ -110,8 +111,9 @@ impl ProjectBuilder {
description: self.description,
body: self.body,
body_url: None,
published: time::OffsetDateTime::now_utc(),
updated: time::OffsetDateTime::now_utc(),
published: Utc::now(),
updated: Utc::now(),
approved: None,
status: self.status,
downloads: 0,
follows: 0,
@ -148,8 +150,8 @@ impl ProjectBuilder {
for category in self.categories {
sqlx::query!(
"
INSERT INTO mods_categories (joining_mod_id, joining_category_id)
VALUES ($1, $2)
INSERT INTO mods_categories (joining_mod_id, joining_category_id, is_additional)
VALUES ($1, $2, FALSE)
",
self.project_id as ProjectId,
category as CategoryId,
@ -158,6 +160,19 @@ impl ProjectBuilder {
.await?;
}
for category in self.additional_categories {
sqlx::query!(
"
INSERT INTO mods_categories (joining_mod_id, joining_category_id, is_additional)
VALUES ($1, $2, TRUE)
",
self.project_id as ProjectId,
category as CategoryId,
)
.execute(&mut *transaction)
.await?;
}
Ok(self.project_id)
}
}
@ -170,8 +185,9 @@ pub struct Project {
pub description: String,
pub body: String,
pub body_url: Option<String>,
pub published: OffsetDateTime,
pub updated: OffsetDateTime,
pub published: DateTime<Utc>,
pub updated: DateTime<Utc>,
pub approved: Option<DateTime<Utc>>,
pub status: StatusId,
pub downloads: i32,
pub follows: i32,
@ -248,7 +264,7 @@ impl Project {
"
SELECT project_type, title, description, downloads, follows,
icon_url, body, body_url, published,
updated, status,
updated, approved, status,
issues_url, source_url, wiki_url, discord_url, license_url,
team_id, client_side, server_side, license, slug,
moderation_message, moderation_message_body
@ -286,6 +302,7 @@ impl Project {
follows: row.follows,
moderation_message: row.moderation_message,
moderation_message_body: row.moderation_message_body,
approved: row.approved,
}))
} else {
Ok(None)
@ -307,7 +324,7 @@ impl Project {
"
SELECT id, project_type, title, description, downloads, follows,
icon_url, body, body_url, published,
updated, status,
updated, approved, status,
issues_url, source_url, wiki_url, discord_url, license_url,
team_id, client_side, server_side, license, slug,
moderation_message, moderation_message_body
@ -343,6 +360,7 @@ impl Project {
follows: m.follows,
moderation_message: m.moderation_message,
moderation_message_body: m.moderation_message_body,
approved: m.approved,
}))
})
.try_collect::<Vec<Project>>()
@ -499,7 +517,7 @@ impl Project {
let id = sqlx::query!(
"
SELECT id FROM mods
WHERE LOWER(slug) = LOWER($1)
WHERE slug = LOWER($1)
",
slug
)
@ -523,7 +541,7 @@ impl Project {
let id = sqlx::query!(
"
SELECT id FROM mods
WHERE LOWER(slug) = LOWER($1)
WHERE slug = LOWER($1)
",
slug
)
@ -607,13 +625,15 @@ impl Project {
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
m.updated updated, m.status status,
m.updated updated, m.approved approved, m.status status,
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
STRING_AGG(DISTINCT c.category, ' ~~~~ ') categories, STRING_AGG(DISTINCT v.id::text, ' ~~~~ ') versions,
STRING_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' '), ' ~~~~ ') gallery,
STRING_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url, ' ~~~~ ') donations
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,
ARRAY_AGG(DISTINCT ca.category) filter (where ca.category is not null) additional_categories,
ARRAY_AGG(DISTINCT v.id || ' |||| ' || v.date_published) filter (where v.id is not null) versions,
ARRAY_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ')) filter (where mg.image_url is not null) gallery,
ARRAY_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url) filter (where md.joining_platform_id is not null) donations
FROM mods m
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN statuses s ON s.id = m.status
@ -624,6 +644,7 @@ impl Project {
LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
LEFT JOIN categories c ON mc.joining_category_id = c.id
LEFT JOIN categories ca ON mc.joining_category_id = c.id AND mc.is_additional = TRUE
LEFT JOIN versions v ON v.mod_id = m.id
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
WHERE m.id = $1
@ -661,24 +682,44 @@ impl Project {
follows: m.follows,
moderation_message: m.moderation_message,
moderation_message_body: m.moderation_message_body,
approved: m.approved,
},
project_type: m.project_type_name,
categories: m
.categories
.map(|x| x.split(" ~~~~ ").map(|x| x.to_string()).collect())
.unwrap_or_default(),
versions: m
.versions
.map(|x| {
x.split(" ~~~~ ")
.map(|x| VersionId(x.parse().unwrap_or_default()))
.collect()
})
categories: m.categories.unwrap_or_default(),
additional_categories: m
.additional_categories
.unwrap_or_default(),
versions: {
let versions = m.versions.unwrap_or_default();
let mut v = versions
.into_iter()
.flat_map(|x| {
let version: Vec<&str> =
x.split(" |||| ").collect();
if version.len() >= 2 {
Some((
VersionId(
version[0].parse().unwrap_or_default(),
),
convert_postgres_date(version[1])
.timestamp(),
))
} else {
None
}
})
.collect::<Vec<(VersionId, i64)>>();
v.sort_by(|a, b| a.1.cmp(&b.1));
v.into_iter().map(|x| x.0).collect()
},
donation_urls: m
.donations
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|d| {
let strings: Vec<&str> = d.split(" |||| ").collect();
@ -700,7 +741,7 @@ impl Project {
gallery_items: m
.gallery
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|d| {
let strings: Vec<&str> = d.split(" |||| ").collect();
@ -758,13 +799,15 @@ impl Project {
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
m.updated updated, m.status status,
m.updated updated, m.approved approved, m.status status,
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
STRING_AGG(DISTINCT c.category, ' ~~~~ ') categories, STRING_AGG(DISTINCT v.id::text, ' ~~~~ ') versions,
STRING_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' '), ' ~~~~ ') gallery,
STRING_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url, ' ~~~~ ') donations
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,
ARRAY_AGG(DISTINCT ca.category) filter (where ca.category is not null) additional_categories,
ARRAY_AGG(DISTINCT v.id || ' |||| ' || v.date_published) filter (where v.id is not null) versions,
ARRAY_AGG(DISTINCT mg.image_url || ' |||| ' || mg.featured || ' |||| ' || mg.created || ' |||| ' || COALESCE(mg.title, ' ') || ' |||| ' || COALESCE(mg.description, ' ')) filter (where mg.image_url is not null) gallery,
ARRAY_AGG(DISTINCT md.joining_platform_id || ' |||| ' || dp.short || ' |||| ' || dp.name || ' |||| ' || md.url) filter (where md.joining_platform_id is not null) donations
FROM mods m
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN statuses s ON s.id = m.status
@ -774,7 +817,8 @@ impl Project {
LEFT JOIN mods_donations md ON md.joining_mod_id = m.id
LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
LEFT JOIN categories c ON mc.joining_category_id = c.id
LEFT JOIN categories c ON mc.joining_category_id = c.id AND mc.is_additional = FALSE
LEFT JOIN categories ca ON mc.joining_category_id = c.id AND mc.is_additional = TRUE
LEFT JOIN versions v ON v.mod_id = m.id
LEFT JOIN mods_gallery mg ON mg.mod_id = m.id
WHERE m.id = ANY($1)
@ -812,14 +856,40 @@ impl Project {
follows: m.follows,
moderation_message: m.moderation_message,
moderation_message_body: m.moderation_message_body,
approved: m.approved
},
project_type: m.project_type_name,
categories: m.categories.map(|x| x.split(" ~~~~ ").map(|x| x.to_string()).collect()).unwrap_or_default(),
versions: m.versions.map(|x| x.split(" ~~~~ ").map(|x| VersionId(x.parse().unwrap_or_default())).collect()).unwrap_or_default(),
categories: m.categories.unwrap_or_default(),
additional_categories: m.additional_categories.unwrap_or_default(),
versions: {
let versions = m.versions.unwrap_or_default();
let mut v = versions
.into_iter()
.flat_map(|x| {
let version: Vec<&str> =
x.split(" |||| ").collect();
if version.len() >= 2 {
Some((
VersionId(version[0].parse().unwrap_or_default()),
convert_postgres_date(version[1])
.timestamp(),
))
} else {
None
}
})
.collect::<Vec<(VersionId, i64)>>();
v.sort_by(|a, b| a.1.cmp(&b.1));
v.into_iter().map(|x| x.0).collect()
},
gallery_items: m
.gallery
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|d| {
let strings: Vec<&str> = d.split(" |||| ").collect();
@ -840,7 +910,7 @@ impl Project {
donation_urls: m
.donations
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|d| {
let strings: Vec<&str> = d.split(" |||| ").collect();
@ -873,6 +943,7 @@ pub struct QueryProject {
pub inner: Project,
pub project_type: String,
pub categories: Vec<String>,
pub additional_categories: Vec<String>,
pub versions: Vec<VersionId>,
pub donation_urls: Vec<DonationUrl>,
pub gallery_items: Vec<GalleryItem>,

View File

@ -1,5 +1,5 @@
use super::ids::*;
use time::OffsetDateTime;
use chrono::{DateTime, Utc};
pub struct Report {
pub id: ReportId,
@ -9,7 +9,7 @@ pub struct Report {
pub user_id: Option<UserId>,
pub body: String,
pub reporter: UserId,
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
}
pub struct QueryReport {
@ -20,7 +20,7 @@ pub struct QueryReport {
pub user_id: Option<UserId>,
pub body: String,
pub reporter: UserId,
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
}
impl Report {

View File

@ -1,5 +1,5 @@
use super::ids::{ProjectId, UserId};
use time::OffsetDateTime;
use chrono::{DateTime, Utc};
pub struct User {
pub id: UserId,
@ -9,7 +9,7 @@ pub struct User {
pub email: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
pub role: String,
}

View File

@ -1,8 +1,8 @@
use super::ids::*;
use super::DatabaseError;
use crate::database::models::convert_postgres_date;
use chrono::{DateTime, Utc};
use std::collections::HashMap;
use time::OffsetDateTime;
pub struct VersionBuilder {
pub version_id: VersionId,
@ -144,7 +144,7 @@ impl VersionBuilder {
version_number: self.version_number,
changelog: self.changelog,
changelog_url: None,
date_published: OffsetDateTime::now_utc(),
date_published: Utc::now(),
downloads: 0,
featured: self.featured,
version_type: self.version_type,
@ -244,7 +244,7 @@ pub struct Version {
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: OffsetDateTime,
pub date_published: DateTime<Utc>,
pub downloads: i32,
pub version_type: String,
pub featured: bool,
@ -614,10 +614,10 @@ impl Version {
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
v.version_type version_type, v.featured featured,
STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
ARRAY_AGG(DISTINCT gv.version || ' |||| ' || gv.created) filter (where gv.version is not null) game_versions, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
ARRAY_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename) filter (where f.id is not null) files,
ARRAY_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id) filter (where h.hash is not null) hashes,
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' ')) filter (where d.dependency_type is not null) dependencies
FROM versions v
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
@ -649,7 +649,7 @@ impl Version {
let hashes: Vec<(FileId, String, Vec<u8>)> = v
.hashes
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|f| {
let hash: Vec<&str> = f.split(" |||| ").collect();
@ -667,7 +667,7 @@ impl Version {
v.files
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|f| {
let file: Vec<&str> = f.split(" |||| ").collect();
@ -703,38 +703,33 @@ impl Version {
let game_versions = v.game_versions.unwrap_or_default();
let mut gv = game_versions
.split(" ~~~~ ")
.into_iter()
.flat_map(|x| {
let version: Vec<&str> =
x.split(" |||| ").collect();
if version.len() >= 2 {
Some((
version[0],
version[0].to_string(),
convert_postgres_date(version[1])
.unix_timestamp(),
.timestamp(),
))
} else {
None
}
})
.collect::<Vec<(&str, i64)>>();
.collect::<Vec<(String, i64)>>();
gv.sort_by(|a, b| a.1.cmp(&b.1));
gv.into_iter().map(|x| x.0.to_string()).collect()
gv.into_iter().map(|x| x.0).collect()
},
loaders: v
.loaders
.unwrap_or_default()
.split(" ~~~~ ")
.map(|x| x.to_string())
.collect(),
loaders: v.loaders.unwrap_or_default(),
featured: v.featured,
dependencies: v
.dependencies
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|f| {
let dependency: Vec<&str> = f.split(" |||| ").collect();
@ -789,10 +784,10 @@ impl Version {
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
v.version_type version_type, v.featured featured,
STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
ARRAY_AGG(DISTINCT gv.version || ' |||| ' || gv.created) filter (where gv.version is not null) game_versions, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
ARRAY_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename) filter (where f.id is not null) files,
ARRAY_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id) filter (where h.hash is not null) hashes,
ARRAY_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' ')) filter (where d.dependency_type is not null) dependencies
FROM versions v
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
@ -821,7 +816,9 @@ impl Version {
date_published: v.date_published,
downloads: v.downloads,
files: {
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").flat_map(|f| {
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default()
.into_iter()
.flat_map(|f| {
let hash: Vec<&str> = f.split(" |||| ").collect();
if hash.len() >= 3 {
@ -835,7 +832,9 @@ impl Version {
}
}).collect();
v.files.unwrap_or_default().split(" ~~~~ ").flat_map(|f| {
v.files.unwrap_or_default()
.into_iter()
.flat_map(|f| {
let file: Vec<&str> = f.split(" |||| ").collect();
if file.len() >= 5 {
@ -867,29 +866,31 @@ impl Version {
.unwrap_or_default();
let mut gv = game_versions
.split(" ~~~~ ")
.into_iter()
.flat_map(|x| {
let version: Vec<&str> = x.split(" |||| ").collect();
if version.len() >= 2 {
Some((version[0], convert_postgres_date(version[1]).unix_timestamp()))
Some((version[0].to_string(), convert_postgres_date(version[1]).timestamp()))
} else {
None
}
})
.collect::<Vec<(&str, i64)>>();
.collect::<Vec<(String, i64)>>();
gv.sort_by(|a, b| a.1.cmp(&b.1));
gv.into_iter()
.map(|x| x.0.to_string())
.map(|x| x.0)
.collect()
},
loaders: v.loaders.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
loaders: v.loaders.unwrap_or_default(),
featured: v.featured,
dependencies: v.dependencies
.unwrap_or_default()
.split(" ~~~~ ")
.into_iter()
.flat_map(|f| {
let dependency: Vec<&str> = f.split(" |||| ").collect();
@ -952,7 +953,7 @@ pub struct QueryVersion {
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: OffsetDateTime,
pub date_published: DateTime<Utc>,
pub downloads: i32,
pub version_type: String,

View File

@ -1,8 +1,8 @@
use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
use bytes::Bytes;
use chrono::Utc;
use sha2::Digest;
use time::OffsetDateTime;
pub struct MockHost(());
@ -39,7 +39,7 @@ impl FileHost for MockHost {
content_sha1,
content_md5: None,
content_type: content_type.to_string(),
upload_timestamp: OffsetDateTime::now_utc().unix_timestamp() as u64,
upload_timestamp: Utc::now().timestamp() as u64,
})
}

View File

@ -3,11 +3,11 @@ use crate::file_hosting::{
};
use async_trait::async_trait;
use bytes::Bytes;
use chrono::Utc;
use s3::bucket::Bucket;
use s3::creds::Credentials;
use s3::region::Region;
use sha2::Digest;
use time::OffsetDateTime;
pub struct S3Host {
bucket: Bucket,
@ -85,7 +85,7 @@ impl FileHost for S3Host {
content_sha1,
content_md5: None,
content_type: content_type.to_string(),
upload_timestamp: OffsetDateTime::now_utc().unix_timestamp() as u64,
upload_timestamp: Utc::now().timestamp() as u64,
})
}

View File

@ -1,7 +1,7 @@
use super::ids::Base62Id;
use super::users::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
@ -18,8 +18,7 @@ pub struct Notification {
pub text: String,
pub link: String,
pub read: bool,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
pub actions: Vec<NotificationAction>,
}

View File

@ -3,8 +3,8 @@ use super::teams::TeamId;
use super::users::UserId;
use crate::database::models::project_item::QueryProject;
use crate::database::models::version_item::QueryVersion;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use validator::Validate;
/// The ID of a specific project, encoded as base62 for usage in the API
@ -40,12 +40,14 @@ pub struct Project {
pub body_url: Option<String>,
/// The date at which the project was first published.
#[serde(with = "crate::util::time_ser")]
pub published: OffsetDateTime,
pub published: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
/// The date at which the project was first published.
pub updated: OffsetDateTime,
pub updated: DateTime<Utc>,
/// The date at which the project was first approved.
//pub approved: Option<DateTime<Utc>>,
pub approved: Option<DateTime<Utc>>,
/// The status of the project
pub status: ProjectStatus,
@ -67,6 +69,9 @@ pub struct Project {
/// A list of the categories that the project is in.
pub categories: Vec<String>,
/// A list of the categories that the project is in.
pub additional_categories: Vec<String>,
/// A list of ids for versions of the project.
pub versions: Vec<VersionId>,
/// The URL of the icon of the project
@ -100,6 +105,7 @@ impl From<QueryProject> for Project {
body_url: m.body_url,
published: m.published,
updated: m.updated,
approved: m.approved,
status: data.status,
moderator_message: if let Some(message) = m.moderation_message {
Some(ModeratorMessage {
@ -119,6 +125,7 @@ impl From<QueryProject> for Project {
downloads: m.downloads as u32,
followers: m.follows as u32,
categories: data.categories,
additional_categories: data.additional_categories,
versions: data.versions.into_iter().map(|v| v.into()).collect(),
icon_url: m.icon_url,
issues_url: m.issues_url,
@ -156,8 +163,7 @@ pub struct GalleryItem {
pub featured: bool,
pub title: Option<String>,
pub description: Option<String>,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
@ -303,9 +309,8 @@ pub struct Version {
/// A link to the changelog for this version of the project. (Deprecated), being replaced by `changelog`
pub changelog_url: Option<String>,
#[serde(with = "crate::util::time_ser")]
/// The date that this version was published.
pub date_published: OffsetDateTime,
pub date_published: DateTime<Utc>,
/// The number of downloads this specific version has had.
pub downloads: u32,
/// The type of the release - `Alpha`, `Beta`, or `Release`.

View File

@ -1,7 +1,7 @@
use super::ids::Base62Id;
use crate::models::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
@ -16,8 +16,7 @@ pub struct Report {
pub item_type: ItemType,
pub reporter: UserId,
pub body: String,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Clone)]

View File

@ -1,6 +1,6 @@
use super::ids::Base62Id;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
@ -18,8 +18,7 @@ pub struct User {
pub email: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
pub role: Role,
}

View File

@ -21,10 +21,10 @@ use crate::util::auth::get_github_user_from_token;
use actix_web::http::StatusCode;
use actix_web::web::{scope, Data, Query, ServiceConfig};
use actix_web::{get, HttpResponse};
use chrono::Utc;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use thiserror::Error;
use time::OffsetDateTime;
pub fn config(cfg: &mut ServiceConfig) {
cfg.service(scope("auth").service(auth_callback).service(init));
@ -176,9 +176,9 @@ pub async fn auth_callback(
.await?;
if let Some(result) = result_option {
let duration = result.expires - OffsetDateTime::now_utc();
let duration: chrono::Duration = result.expires - Utc::now();
if duration.whole_seconds() < 0 {
if duration.num_seconds() < 0 {
return Err(AuthorizationError::InvalidCredentials);
}
@ -255,7 +255,7 @@ pub async fn auth_callback(
email: user.email,
avatar_url: Some(user.avatar_url),
bio: user.bio,
created: OffsetDateTime::now_utc(),
created: Utc::now(),
role: Role::Developer.to_string(),
}
.insert(&mut transaction)

View File

@ -108,7 +108,7 @@ pub async fn maven_metadata(
.map(|x| x.version_number.clone())
.collect::<Vec<_>>(),
},
last_updated: data.inner.updated.format("%Y%m%d%H%M%S"),
last_updated: data.inner.updated.format("%Y%m%d%H%M%S").to_string(),
},
};

View File

@ -14,13 +14,13 @@ use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode;
use actix_web::web::Data;
use actix_web::{post, HttpRequest, HttpResponse};
use chrono::Utc;
use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use std::collections::HashSet;
use std::sync::Arc;
use thiserror::Error;
use time::OffsetDateTime;
use validator::Validate;
#[derive(Error, Debug)]
@ -166,6 +166,9 @@ struct ProjectCreateData {
#[validate(length(max = 3))]
/// A list of the categories that the project is in.
pub categories: Vec<String>,
#[validate(length(max = 256))]
/// A list of the categories that the project is in.
pub additional_categories: Vec<String>,
#[validate(url, length(max = 2048))]
/// An optional link to where to submit bugs or issues with the project.
@ -388,7 +391,7 @@ pub async fn project_create_inner(
{
let results = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM mods WHERE slug = $1)
SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1))
",
create_data.slug
)
@ -522,7 +525,7 @@ pub async fn project_create_inner(
featured: item.featured,
title: item.title.clone(),
description: item.description.clone(),
created: OffsetDateTime::now_utc(),
created: Utc::now(),
});
continue;
@ -593,6 +596,19 @@ pub async fn project_create_inner(
categories.push(id);
}
let mut additional_categories =
Vec::with_capacity(project_create_data.additional_categories.len());
for category in &project_create_data.additional_categories {
let id = models::categories::Category::get_id_project(
category,
project_type_id,
&mut *transaction,
)
.await?
.ok_or_else(|| CreateError::InvalidCategory(category.clone()))?;
additional_categories.push(id);
}
let team = models::team_item::TeamBuilder {
members: vec![models::team_item::TeamMemberBuilder {
user_id: current_user.id.into(),
@ -698,6 +714,7 @@ pub async fn project_create_inner(
license_url: project_create_data.license_url,
discord_url: project_create_data.discord_url,
categories,
additional_categories,
initial_versions: versions,
status: status_id,
client_side: client_side_id,
@ -718,7 +735,7 @@ pub async fn project_create_inner(
.collect(),
};
let now = OffsetDateTime::now_utc();
let now = Utc::now();
let response = crate::models::projects::Project {
id: project_id,
@ -731,6 +748,7 @@ pub async fn project_create_inner(
body_url: None,
published: now,
updated: now,
approved: None,
status: status.clone(),
moderator_message: None,
license: License {
@ -743,6 +761,7 @@ pub async fn project_create_inner(
downloads: 0,
followers: 0,
categories: project_create_data.categories,
additional_categories: project_create_data.additional_categories,
versions: project_builder
.initial_versions
.iter()

View File

@ -11,12 +11,12 @@ use crate::util::auth::{get_user_from_headers, is_authorized};
use crate::util::routes::read_from_payload;
use crate::util::validate::validation_errors_to_string;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use chrono::Utc;
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::{PgPool, Row};
use std::sync::Arc;
use time::OffsetDateTime;
use validator::Validate;
#[get("search")]
@ -112,7 +112,7 @@ pub async fn project_get_check(
sqlx::query!(
"
SELECT id FROM mods
WHERE LOWER(slug) = LOWER($1)
WHERE slug = LOWER($1)
",
&slug
)
@ -126,7 +126,7 @@ pub async fn project_get_check(
sqlx::query!(
"
SELECT id FROM mods
WHERE LOWER(slug) = LOWER($1)
WHERE slug = LOWER($1)
",
&slug
)
@ -259,6 +259,8 @@ pub struct EditProject {
pub body: Option<String>,
#[validate(length(max = 3))]
pub categories: Option<Vec<String>>,
#[validate(length(max = 256))]
pub additional_categories: Option<Vec<String>>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
@ -483,6 +485,21 @@ pub async fn project_edit(
)
})?;
if status == &ProjectStatus::Approved
|| status == &ProjectStatus::Unlisted
{
sqlx::query!(
"
UPDATE mods
SET published = NOW()
WHERE id = $1 AND approved = NULL
",
id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
}
sqlx::query!(
"
UPDATE mods
@ -513,7 +530,7 @@ pub async fn project_edit(
sqlx::query!(
"
DELETE FROM mods_categories
WHERE joining_mod_id = $1
WHERE joining_mod_id = $1 AND is_additional = FALSE
",
id as database::models::ids::ProjectId,
)
@ -536,8 +553,8 @@ pub async fn project_edit(
sqlx::query!(
"
INSERT INTO mods_categories (joining_mod_id, joining_category_id)
VALUES ($1, $2)
INSERT INTO mods_categories (joining_mod_id, joining_category_id, is_additional)
VALUES ($1, $2, FALSE)
",
id as database::models::ids::ProjectId,
category_id as database::models::ids::CategoryId,
@ -547,6 +564,51 @@ pub async fn project_edit(
}
}
if let Some(categories) = &new_project.additional_categories {
if !perms.contains(Permissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the additional categories of this project!"
.to_string(),
));
}
sqlx::query!(
"
DELETE FROM mods_categories
WHERE joining_mod_id = $1 AND is_additional = TRUE
",
id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
for category in categories {
let category_id =
database::models::categories::Category::get_id(
category,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Category {} does not exist.",
category.clone()
))
})?;
sqlx::query!(
"
INSERT INTO mods_categories (joining_mod_id, joining_category_id, is_additional)
VALUES ($1, $2, TRUE)
",
id as database::models::ids::ProjectId,
category_id as database::models::ids::CategoryId,
)
.execute(&mut *transaction)
.await?;
}
}
if let Some(issues_url) = &new_project.issues_url {
if !perms.contains(Permissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
@ -1178,7 +1240,7 @@ pub async fn add_gallery_item(
featured: item.featured,
title: item.title,
description: item.description,
created: OffsetDateTime::now_utc(),
created: Utc::now(),
}
.insert(&mut transaction)
.await?;

View File

@ -5,10 +5,10 @@ use crate::util::auth::{
check_is_moderator_from_headers, get_user_from_headers,
};
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use chrono::Utc;
use futures::StreamExt;
use serde::Deserialize;
use sqlx::PgPool;
use time::OffsetDateTime;
#[derive(Deserialize)]
pub struct CreateReport {
@ -60,7 +60,7 @@ pub async fn report_create(
user_id: None,
body: new_report.body.clone(),
reporter: current_user.id.into(),
created: OffsetDateTime::now_utc(),
created: Utc::now(),
};
match new_report.item_type {
@ -109,7 +109,7 @@ pub async fn report_create(
item_type: new_report.item_type.clone(),
reporter: current_user.id,
body: new_report.body.clone(),
created: OffsetDateTime::now_utc(),
created: Utc::now(),
}))
}

View File

@ -5,9 +5,9 @@ use crate::database::models::categories::{
};
use crate::util::auth::check_is_admin_from_headers;
use actix_web::{delete, get, put, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use models::categories::{Category, GameVersion, Loader};
use sqlx::PgPool;
use time::OffsetDateTime;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
@ -38,6 +38,7 @@ pub struct CategoryData {
icon: String,
name: String,
project_type: String,
header: String,
}
// TODO: searching / filtering? Could be used to implement a live
@ -53,6 +54,7 @@ pub async fn category_list(
icon: x.icon,
name: x.category,
project_type: x.project_type,
header: x.header,
})
.collect::<Vec<_>>();
@ -84,6 +86,7 @@ pub async fn category_create(
.name(&new_category.name)?
.project_type(&project_type)?
.icon(&new_category.icon)?
.header(&new_category.header)?
.insert(&**pool)
.await?;
@ -202,8 +205,7 @@ pub async fn loader_delete(
pub struct GameVersionQueryData {
pub version: String,
pub version_type: String,
#[serde(with = "crate::util::time_ser")]
pub date: OffsetDateTime,
pub date: DateTime<Utc>,
pub major: bool,
}
@ -243,7 +245,7 @@ pub async fn game_version_list(
pub struct GameVersionData {
#[serde(rename = "type")]
type_: String,
date: Option<OffsetDateTime>,
date: Option<DateTime<Utc>>,
}
#[put("game_version/{name}")]

View File

@ -7,10 +7,10 @@ use crate::util::auth::{
};
use actix_web::web;
use actix_web::{get, post, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use time::OffsetDateTime;
#[derive(Serialize, Deserialize)]
pub struct Report {
@ -20,8 +20,7 @@ pub struct Report {
pub item_type: ItemType,
pub reporter: UserId,
pub body: String,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
pub created: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Clone)]
@ -93,7 +92,7 @@ pub async fn report_create(
user_id: None,
body: new_report.body.clone(),
reporter: current_user.id.into(),
created: OffsetDateTime::now_utc(),
created: Utc::now(),
};
match new_report.item_type {
@ -142,7 +141,7 @@ pub async fn report_create(
item_type: new_report.item_type.clone(),
reporter: current_user.id,
body: new_report.body.clone(),
created: OffsetDateTime::now_utc(),
created: Utc::now(),
}))
}

View File

@ -9,10 +9,10 @@ use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use crate::{database, models};
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
use time::OffsetDateTime;
/// A specific version of a mod
#[derive(Serialize, Deserialize)]
@ -25,8 +25,7 @@ pub struct LegacyVersion {
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
#[serde(with = "crate::util::time_ser")]
pub date_published: OffsetDateTime,
pub date_published: DateTime<Utc>,
pub downloads: u32,
pub version_type: VersionType,
pub files: Vec<VersionFile>,

View File

@ -18,10 +18,10 @@ use crate::validate::{validate_file, ValidationResult};
use actix_multipart::{Field, Multipart};
use actix_web::web::Data;
use actix_web::{post, HttpRequest, HttpResponse};
use chrono::Utc;
use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use time::OffsetDateTime;
use validator::Validate;
#[derive(Serialize, Deserialize, Validate, Clone)]
@ -402,7 +402,7 @@ async fn version_create_inner(
version_number: builder.version_number.clone(),
changelog: builder.changelog.clone(),
changelog_url: None,
date_published: OffsetDateTime::now_utc(),
date_published: Utc::now(),
downloads: 0,
version_type: version_data.release_channel,
files: builder
@ -722,23 +722,18 @@ pub async fn upload_file(
for file in &format.files {
if let Some(dep) = res.iter().find(|x| {
x.hash.as_deref()
Some(&*x.hash)
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
if let Some(project_id) = dep.project_id {
if let Some(version_id) = dep.version_id {
dependencies.push(DependencyBuilder {
project_id: Some(models::ProjectId(project_id)),
version_id: Some(models::VersionId(version_id)),
file_name: None,
dependency_type: DependencyType::Embedded
.to_string(),
});
}
}
dependencies.push(DependencyBuilder {
project_id: Some(models::ProjectId(dep.project_id)),
version_id: Some(models::VersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});
} else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,

View File

@ -73,9 +73,8 @@ pub enum VersionIndexingError {
}
use crate::util::env::parse_var;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use time::Format::Rfc3339;
use time::OffsetDateTime;
use tokio_stream::wrappers::IntervalStream;
#[derive(Deserialize)]
@ -88,8 +87,8 @@ struct VersionFormat<'a> {
id: String,
#[serde(rename = "type")]
type_: std::borrow::Cow<'a, str>,
#[serde(rename = "releaseTime", with = "crate::util::time_ser")]
release_time: OffsetDateTime,
#[serde(rename = "releaseTime")]
release_time: DateTime<Utc>,
}
async fn update_versions(
@ -128,26 +127,30 @@ async fn update_versions(
lazy_static::lazy_static! {
/// Mojank for some reason has versions released at the same DateTime. This hardcodes them to fix this,
/// as most of our ordering logic is with DateTime
static ref HALL_OF_SHAME_2: [(&'static str, OffsetDateTime); 4] = [
static ref HALL_OF_SHAME_2: [(&'static str, chrono::DateTime<chrono::Utc>); 4] = [
(
"1.4.5",
OffsetDateTime::parse("2012-12-19T22:00:00+00:00", Rfc3339)
chrono::DateTime::parse_from_rfc3339("2012-12-19T22:00:00+00:00")
.unwrap()
.into(),
),
(
"1.4.6",
OffsetDateTime::parse("2012-12-19T22:00:01+00:00", Rfc3339)
chrono::DateTime::parse_from_rfc3339("2012-12-19T22:00:01+00:00")
.unwrap()
.into(),
),
(
"1.6.3",
OffsetDateTime::parse("2013-09-13T10:54:41+00:00", Rfc3339)
chrono::DateTime::parse_from_rfc3339("2013-09-13T10:54:41+00:00")
.unwrap()
.into(),
),
(
"13w37b",
OffsetDateTime::parse("2013-09-13T10:54:42+00:00", Rfc3339)
chrono::DateTime::parse_from_rfc3339("2013-09-13T10:54:42+00:00")
.unwrap()
.into(),
),
];
}

View File

@ -6,26 +6,23 @@ use crate::database::models::ProjectId;
use crate::search::UploadSearchProject;
use sqlx::postgres::PgPool;
// TODO: Move this away from STRING_AGG to multiple queries - however this may be more efficient?
pub async fn index_local(
pool: PgPool,
) -> Result<Vec<UploadSearchProject>, IndexingError> {
info!("Indexing local projects!");
Ok(
sqlx::query!(
//FIXME: there must be a way to reduce the duplicate lines between this query and the one in `query_one` here...
//region query
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.published published,
m.updated updated,
m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,
m.team_id team_id, m.license license, m.slug slug,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,
STRING_AGG(DISTINCT mg.image_url, ',') gallery
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories, ARRAY_AGG(DISTINCT cp.category) filter (where cp.category is not null) primary_categories, ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders, ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null) versions,
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null) gallery
FROM mods m
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
LEFT OUTER JOIN categories cp ON mc.joining_category_id = c.id AND mc.is_additional = FALSE
LEFT OUTER JOIN versions v ON v.mod_id = m.id
LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id
@ -42,7 +39,6 @@ pub async fn index_local(
WHERE s.status = $1 OR s.status = $2
GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;
",
//endregion query
crate::models::projects::ProjectStatus::Approved.as_str(),
crate::models::projects::ProjectStatus::Archived.as_str(),
crate::models::teams::OWNER_ROLE,
@ -50,15 +46,12 @@ pub async fn index_local(
.fetch_many(&pool)
.try_filter_map(|e| async {
Ok(e.right().map(|m| {
let mut categories = split_to_strings(m.categories);
categories.append(&mut split_to_strings(m.loaders));
let versions = split_to_strings(m.versions);
let mut categories = m.categories.unwrap_or_default();
categories.append(&mut m.loaders.unwrap_or_default());
let versions = m.versions.unwrap_or_default();
let project_id: crate::models::projects::ProjectId = ProjectId(m.id).into();
// TODO: Cleanup - This method has a lot of code in common with the method below.
// But, since the macro returns an (de facto) unnamed struct,
// We cannot reuse the code easily. Ugh.
UploadSearchProject {
project_id: format!("{}", project_id),
title: m.title,
@ -69,9 +62,9 @@ pub async fn index_local(
icon_url: m.icon_url.unwrap_or_default(),
author: m.username,
date_created: m.published,
created_timestamp: m.published.unix_timestamp(),
created_timestamp: m.approved.unwrap_or(m.published).timestamp(),
date_modified: m.updated,
modified_timestamp: m.updated.unix_timestamp(),
modified_timestamp: m.updated.timestamp(),
latest_version: versions.last().cloned().unwrap_or_else(|| "None".to_string()),
versions,
license: m.short,
@ -79,7 +72,8 @@ pub async fn index_local(
server_side: m.server_side_type,
slug: m.slug,
project_type: m.project_type_name,
gallery: m.gallery.map(|x| x.split(',').map(|x| x.to_string()).collect()).unwrap_or_default()
gallery: m.gallery.unwrap_or_default(),
display_categories: m.primary_categories.unwrap_or_default()
}
}))
})
@ -87,8 +81,3 @@ pub async fn index_local(
.await?
)
}
fn split_to_strings(s: Option<String>) -> Vec<String> {
s.map(|x| x.split(',').map(ToString::to_string).collect())
.unwrap_or_default()
}

View File

@ -15,8 +15,6 @@ pub enum IndexingError {
Indexing(#[from] meilisearch_sdk::errors::Error),
#[error("Error while serializing or deserializing JSON: {0}")]
Serde(#[from] serde_json::Error),
#[error("Error while parsing a timestamp: {0}")]
ParseDate(#[from] time::error::Error),
#[error("Database Error: {0}")]
Sqlx(#[from] sqlx::error::Error),
#[error("Database Error: {0}")]

View File

@ -2,13 +2,13 @@ use crate::models::error::ApiError;
use crate::models::projects::SearchRequest;
use actix_web::http::StatusCode;
use actix_web::HttpResponse;
use chrono::{DateTime, Utc};
use meilisearch_sdk::client::Client;
use meilisearch_sdk::document::Document;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::cmp::min;
use thiserror::Error;
use time::OffsetDateTime;
pub mod indexing;
@ -74,6 +74,7 @@ pub struct UploadSearchProject {
pub title: String,
pub description: String,
pub categories: Vec<String>,
pub display_categories: Vec<String>,
pub versions: Vec<String>,
pub follows: i32,
pub downloads: i32,
@ -83,16 +84,12 @@ pub struct UploadSearchProject {
pub client_side: String,
pub server_side: String,
pub gallery: Vec<String>,
#[serde(with = "crate::util::time_ser")]
/// RFC 3339 formatted creation date of the project
pub date_created: OffsetDateTime,
pub date_created: DateTime<Utc>,
/// Unix timestamp of the creation date of the project
pub created_timestamp: i64,
#[serde(with = "crate::util::time_ser")]
/// RFC 3339 formatted date/time of last major modification (update)
pub date_modified: OffsetDateTime,
pub date_modified: DateTime<Utc>,
/// Unix timestamp of the last major modification
pub modified_timestamp: i64,
}
@ -114,6 +111,7 @@ pub struct ResultSearchProject {
pub title: String,
pub description: String,
pub categories: Vec<String>,
pub display_categories: Vec<String>,
// TODO: more efficient format for listing versions, without many repetitions
pub versions: Vec<String>,
pub downloads: i32,

View File

@ -3,6 +3,5 @@ pub mod env;
pub mod ext;
pub mod guards;
pub mod routes;
pub mod time_ser;
pub mod validate;
pub mod webhook;

View File

@ -1,42 +0,0 @@
//! Use the well-known [RFC3339 format] when serializing and deserializing an [`OffsetDateTime`].
//!
//! Use this module in combination with serde's [`#[with]`][with] attribute.
//!
//! [RFC3339 format]: https://tools.ietf.org/html/rfc3339#section-5.6
//! [with]: https://serde.rs/field-attrs.html#with
use core::fmt;
use core::marker::PhantomData;
use serde::{de, Deserializer, Serialize, Serializer};
use time::Format::Rfc3339;
use time::OffsetDateTime;
/// Serialize an [`OffsetDateTime`] using the well-known RFC3339 format.
pub fn serialize<S: Serializer>(
datetime: &OffsetDateTime,
serializer: S,
) -> Result<S::Ok, S::Error> {
datetime.format(Rfc3339).serialize(serializer)
}
/// Deserialize an [`OffsetDateTime`] from its RFC3339 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
) -> Result<OffsetDateTime, D::Error> {
deserializer.deserialize_any(Visitor(PhantomData))
}
pub(super) struct Visitor<T: ?Sized>(pub(super) PhantomData<T>);
impl<'a> de::Visitor<'a> for Visitor<OffsetDateTime> {
type Value = OffsetDateTime;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("an `OffsetDateTime`")
}
fn visit_str<E: de::Error>(self, value: &str) -> Result<OffsetDateTime, E> {
OffsetDateTime::parse(value, Rfc3339).map_err(E::custom)
}
}

View File

@ -1,14 +1,13 @@
use crate::models::projects::Project;
use chrono::{DateTime, Utc};
use serde::Serialize;
use time::OffsetDateTime;
#[derive(Serialize)]
struct DiscordEmbed {
pub title: String,
pub description: String,
pub url: String,
#[serde(with = "crate::util::time_ser")]
pub timestamp: OffsetDateTime,
pub timestamp: DateTime<Utc>,
pub color: u32,
pub fields: Vec<DiscordEmbedField>,
pub image: DiscordEmbedImage,

View File

@ -1,8 +1,8 @@
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use time::OffsetDateTime;
use zip::ZipArchive;
pub struct FabricValidator;
@ -22,8 +22,9 @@ impl super::Validator for FabricValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 18w49a, the first fabric version
SupportedGameVersions::PastDate(OffsetDateTime::from_unix_timestamp(
1543969469,
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1543969469, 0),
Utc,
))
}

View File

@ -1,8 +1,8 @@
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use time::OffsetDateTime;
use zip::ZipArchive;
pub struct ForgeValidator;
@ -22,8 +22,9 @@ impl super::Validator for ForgeValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 1.13, the first forge version which uses the new TOML system
SupportedGameVersions::PastDate(OffsetDateTime::from_unix_timestamp(
1540122067,
SupportedGameVersions::PastDate(DateTime::<Utc>::from_utc(
NaiveDateTime::from_timestamp(1540122067, 0),
Utc,
))
}
@ -67,8 +68,14 @@ impl super::Validator for LegacyForgeValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Times between versions 1.5.2 to 1.12.2, which all use the legacy way of defining mods
SupportedGameVersions::Range(
OffsetDateTime::from_unix_timestamp(1366818300),
OffsetDateTime::from_unix_timestamp(1505810340),
DateTime::from_utc(
NaiveDateTime::from_timestamp(1366818300, 0),
Utc,
),
DateTime::from_utc(
NaiveDateTime::from_timestamp(1505810340, 0),
Utc,
),
)
}

View File

@ -7,9 +7,9 @@ use crate::validate::modpack::ModpackValidator;
use crate::validate::plugin::*;
use crate::validate::quilt::QuiltValidator;
use crate::validate::resourcepack::{PackValidator, TexturePackValidator};
use chrono::{DateTime, Utc};
use std::io::Cursor;
use thiserror::Error;
use time::OffsetDateTime;
use zip::ZipArchive;
mod fabric;
@ -59,8 +59,8 @@ impl ValidationResult {
pub enum SupportedGameVersions {
All,
PastDate(OffsetDateTime),
Range(OffsetDateTime, OffsetDateTime),
PastDate(DateTime<Utc>),
Range(DateTime<Utc>, DateTime<Utc>),
#[allow(dead_code)]
Custom(Vec<GameVersion>),
}

View File

@ -1,8 +1,8 @@
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use time::OffsetDateTime;
use zip::ZipArchive;
pub struct QuiltValidator;
@ -21,9 +21,9 @@ impl super::Validator for QuiltValidator {
}
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 18w49a, the first fabric version
SupportedGameVersions::PastDate(OffsetDateTime::from_unix_timestamp(
1646070100,
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1646070100, 0),
Utc,
))
}

View File

@ -1,8 +1,8 @@
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use time::OffsetDateTime;
use zip::ZipArchive;
pub struct PackValidator;
@ -22,8 +22,9 @@ impl super::Validator for PackValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 13w24a which replaced texture packs with resource packs
SupportedGameVersions::PastDate(OffsetDateTime::from_unix_timestamp(
1371137542,
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1371137542, 0),
Utc,
))
}
@ -59,8 +60,14 @@ impl super::Validator for TexturePackValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// a1.2.2a to 13w23b
SupportedGameVersions::Range(
OffsetDateTime::from_unix_timestamp(1289339999),
OffsetDateTime::from_unix_timestamp(1370651522),
DateTime::from_utc(
NaiveDateTime::from_timestamp(1289339999, 0),
Utc,
),
DateTime::from_utc(
NaiveDateTime::from_timestamp(1370651522, 0),
Utc,
),
)
}