Compare commits

..

7 Commits

Author SHA1 Message Date
Calum H.
475ee69cfb feat: "no servers found" impl 2025-08-06 16:58:36 +01:00
IMB11
ab48d4b144 Merge branch 'main' into cal/server-panel-refactor 2025-08-05 11:26:41 +01:00
IMB11
3480448351 Merge branch 'main' into cal/server-panel-refactor 2025-08-04 17:26:00 +01:00
IMB11
daae37806c fix: update button size for new server action 2025-07-28 19:14:23 +01:00
IMB11
6a4c140420 refactor: remove mobile pull refresh logic 2025-07-28 18:47:53 +01:00
IMB11
5d98b16270 feat: manage page 2025-07-28 18:43:00 +01:00
IMB11
6c452f86b6 feat: hide globe behind flag 2025-07-28 17:30:22 +01:00
85 changed files with 2358 additions and 1964 deletions

1476
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,31 +25,31 @@ actix-ws = "0.3.0"
argon2 = { version = "0.5.3", features = ["std"] }
ariadne = { path = "packages/ariadne" }
async_zip = "0.0.17"
async-compression = { version = "0.4.27", default-features = false }
async-compression = { version = "0.4.25", default-features = false }
async-recursion = "1.1.1"
async-stripe = { version = "0.41.0", default-features = false, features = [
"runtime-tokio-hyper-rustls",
] }
async-trait = "0.1.88"
async-tungstenite = { version = "0.30.0", default-features = false, features = [
async-tungstenite = { version = "0.29.1", default-features = false, features = [
"futures-03-sink",
] }
async-walkdir = "2.1.0"
base64 = "0.22.1"
bitflags = "2.9.1"
bytemuck = "1.23.1"
bytemuck = "1.23.0"
bytes = "1.10.1"
censor = "0.3.0"
chardetng = "0.1.17"
chrono = "0.4.41"
clap = "4.5.43"
clap = "4.5.40"
clickhouse = "0.13.3"
color-thief = "0.2.2"
console-subscriber = "0.4.1"
daedalus = { path = "packages/daedalus" }
dashmap = "6.1.0"
data-url = "0.3.1"
deadpool-redis = "0.22.0"
deadpool-redis = "0.21.1"
dirs = "6.0.0"
discord-rich-presence = "0.2.5"
dotenv-build = "0.1.1"
@@ -57,7 +57,7 @@ dotenvy = "0.15.7"
dunce = "1.0.5"
either = "1.15.0"
encoding_rs = "0.8.35"
enumset = "1.1.7"
enumset = "1.1.6"
flate2 = "1.1.2"
fs4 = { version = "0.13.1", default-features = false }
futures = { version = "0.3.31", default-features = false }
@@ -74,15 +74,15 @@ hyper-rustls = { version = "0.27.7", default-features = false, features = [
"ring",
"tls12",
] }
hyper-util = "0.1.16"
hyper-util = "0.1.14"
iana-time-zone = "0.1.63"
image = { version = "0.25.6", default-features = false, features = ["rayon"] }
indexmap = "2.10.0"
indicatif = "0.18.0"
indexmap = "2.9.0"
indicatif = "0.17.11"
itertools = "0.14.0"
jemalloc_pprof = "0.8.1"
jemalloc_pprof = "0.7.0"
json-patch = { version = "4.0.0", default-features = false }
lettre = { version = "0.11.18", default-features = false, features = [
lettre = { version = "0.11.17", default-features = false, features = [
"builder",
"hostname",
"pool",
@@ -92,24 +92,24 @@ lettre = { version = "0.11.18", default-features = false, features = [
"smtp-transport",
] }
maxminddb = "0.26.0"
meilisearch-sdk = { version = "0.29.1", default-features = false }
meilisearch-sdk = { version = "0.28.0", default-features = false }
murmur2 = "0.1.0"
native-dialog = "0.9.0"
notify = { version = "8.2.0", default-features = false }
notify-debouncer-mini = { version = "0.7.0", default-features = false }
notify = { version = "8.0.0", default-features = false }
notify-debouncer-mini = { version = "0.6.0", default-features = false }
p256 = "0.13.2"
paste = "1.0.15"
phf = { version = "0.12.1", features = ["macros"] }
png = "0.17.16"
prometheus = "0.14.0"
quartz_nbt = "0.2.9"
quick-xml = "0.38.1"
quick-xml = "0.37.5"
rand = "=0.8.5" # Locked on 0.8 until argon2 and p256 update to 0.9
rand_chacha = "=0.3.1" # Locked on 0.3 until we can update rand to 0.9
redis = "0.32.4"
redis = "=0.31.0" # Locked on 0.31 until deadpool-redis updates to 0.32
regex = "1.11.1"
reqwest = { version = "0.12.22", default-features = false }
rgb = "0.8.52"
reqwest = { version = "0.12.20", default-features = false }
rgb = "0.8.50"
rust_decimal = { version = "1.37.2", features = [
"serde-with-float",
"serde-with-str",
@@ -121,7 +121,7 @@ rust-s3 = { version = "0.35.1", default-features = false, features = [
"tokio-rustls-tls",
] }
rusty-money = "0.4.1"
sentry = { version = "0.42.0", default-features = false, features = [
sentry = { version = "0.41.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
@@ -129,45 +129,45 @@ sentry = { version = "0.42.0", default-features = false, features = [
"reqwest",
"rustls",
] }
sentry-actix = "0.42.0"
sentry-actix = "0.41.0"
serde = "1.0.219"
serde_bytes = "0.11.17"
serde_cbor = "0.11.2"
serde_ini = "0.2.0"
serde_json = "1.0.142"
serde_with = "3.14.0"
serde_json = "1.0.140"
serde_with = "3.13.0"
serde-xml-rs = "0.8.1" # Also an XML (de)serializer, consider dropping yaserde in favor of this
sha1 = "0.10.6"
sha1_smol = { version = "1.0.1", features = ["std"] }
sha2 = "0.10.9"
spdx = "0.10.9"
spdx = "0.10.8"
sqlx = { version = "0.8.6", default-features = false }
sysinfo = { version = "0.36.1", default-features = false }
sysinfo = { version = "0.35.2", default-features = false }
tar = "0.4.44"
tauri = "2.7.0"
tauri-build = "2.3.1"
tauri-plugin-deep-link = "2.4.1"
tauri-plugin-dialog = "2.3.2"
tauri-plugin-http = "2.5.1"
tauri = "2.6.1"
tauri-build = "2.3.0"
tauri-plugin-deep-link = "2.4.0"
tauri-plugin-dialog = "2.3.0"
tauri-plugin-http = "2.5.0"
tauri-plugin-opener = "2.4.0"
tauri-plugin-os = "2.3.0"
tauri-plugin-single-instance = "2.3.2"
tauri-plugin-single-instance = "2.3.0"
tauri-plugin-updater = { version = "2.9.0", default-features = false, features = [
"rustls-tls",
"zip",
] }
tauri-plugin-window-state = "2.4.0"
tauri-plugin-window-state = "2.3.0"
tempfile = "3.20.0"
theseus = { path = "packages/app-lib" }
thiserror = "2.0.12"
tikv-jemalloc-ctl = "0.6.0"
tikv-jemallocator = "0.6.0"
tokio = "1.47.1"
tokio = "1.45.1"
tokio-stream = "0.1.17"
tokio-util = "0.7.16"
tokio-util = "0.7.15"
totp-rs = "5.7.0"
tracing = "0.1.41"
tracing-actix-web = "0.7.19"
tracing-actix-web = "0.7.18"
tracing-error = "0.2.1"
tracing-subscriber = "0.3.19"
url = "2.5.4"
@@ -179,7 +179,7 @@ whoami = "1.6.0"
winreg = "0.55.0"
woothee = "0.13.0"
yaserde = "0.12.0"
zip = { version = "4.3.0", default-features = false, features = [
zip = { version = "4.2.0", default-features = false, features = [
"bzip2",
"deflate",
"deflate64",
@@ -226,7 +226,7 @@ wildcard_dependencies = "warn"
warnings = "deny"
[patch.crates-io]
wry = { git = "https://github.com/modrinth/wry", rev = "f2ce0b0" }
wry = { git = "https://github.com/modrinth/wry", rev = "21db186" }
# Optimize for speed and reduce size on release builds
[profile.release]

View File

@@ -76,10 +76,10 @@ const installing = ref(false)
const onInstall = ref(() => {})
defineExpose({
show: (instanceVal, projectVal, projectVersions, selected, callback) => {
show: (instanceVal, projectVal, projectVersions, callback) => {
instance.value = instanceVal
versions.value = projectVersions
selectedVersion.value = selected ?? projectVersions[0]
selectedVersion.value = projectVersions[0]
project.value = projectVal

View File

@@ -29,8 +29,8 @@ export const useInstall = defineStore('installStore', {
setIncompatibilityWarningModal(ref) {
this.incompatibilityWarningModal = ref
},
showIncompatibilityWarningModal(instance, project, versions, selected, onInstall) {
this.incompatibilityWarningModal.show(instance, project, versions, selected, onInstall)
showIncompatibilityWarningModal(instance, project, versions, onInstall) {
this.incompatibilityWarningModal.show(instance, project, versions, onInstall)
},
setModInstallModal(ref) {
this.modInstallModal = ref
@@ -133,13 +133,7 @@ export const install = async (
callback(version.id)
} else {
const install = useInstall()
install.showIncompatibilityWarningModal(
instance,
project,
projectVersions,
version,
callback,
)
install.showIncompatibilityWarningModal(instance, project, projectVersions, callback)
}
} else {
const versions = (await get_version_many(project.versions).catch(handleError)).sort(

View File

@@ -197,13 +197,15 @@ pub async fn open_link<R: Runtime>(
if url::Url::parse(&path).is_ok()
&& !state.malicious_origins.contains(&origin)
&& let Some(last_click) = state.last_click
&& last_click.elapsed() < Duration::from_millis(100)
{
let _ = app.opener().open_url(&path, None::<String>);
state.last_click = None;
if let Some(last_click) = state.last_click {
if last_click.elapsed() < Duration::from_millis(100) {
let _ = app.opener().open_url(&path, None::<String>);
state.last_click = None;
return Ok(());
return Ok(());
}
}
}
tracing::info!("Malicious click: {path} origin {origin}");

View File

@@ -59,13 +59,16 @@ pub async fn login<R: Runtime>(
.url()?
.as_str()
.starts_with("https://login.live.com/oauth20_desktop.srf")
&& let Some((_, code)) =
window.url()?.query_pairs().find(|x| x.0 == "code")
{
window.close()?;
let val = minecraft_auth::finish_login(&code.clone(), flow).await?;
if let Some((_, code)) =
window.url()?.query_pairs().find(|x| x.0 == "code")
{
window.close()?;
let val =
minecraft_auth::finish_login(&code.clone(), flow).await?;
return Ok(Some(val));
return Ok(Some(val));
}
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;

View File

@@ -63,11 +63,11 @@ pub async fn should_disable_mouseover() -> bool {
// We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety
if let tauri_plugin_os::Version::Semantic(major, minor, _) =
tauri_plugin_os::version()
&& major >= 12
&& minor >= 3
{
// Mac os version is 12.3 or higher, we allow mouseover
return false;
if major >= 12 && minor >= 3 {
// Mac os version is 12.3 or higher, we allow mouseover
return false;
}
}
true
} else {

View File

@@ -233,10 +233,10 @@ fn main() {
});
#[cfg(not(target_os = "linux"))]
if let Some(window) = app.get_window("main")
&& let Err(e) = window.set_shadow(true)
{
tracing::warn!("Failed to set window shadow: {e}");
if let Some(window) = app.get_window("main") {
if let Err(e) = window.set_shadow(true) {
tracing::warn!("Failed to set window shadow: {e}");
}
}
Ok(())

View File

@@ -506,25 +506,27 @@ async fn fetch(
return Ok(lib);
}
} else if let Some(url) = &lib.url
&& !url.is_empty()
{
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://libraries.minecraft.net/".to_string(),
"https://maven.creeperhost.net/".to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
} else if let Some(url) = &lib.url {
if !url.is_empty() {
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://libraries.minecraft.net/"
.to_string(),
"https://maven.creeperhost.net/"
.to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
lib.url = Some(format_url("maven/"));
lib.url = Some(format_url("maven/"));
return Ok(lib);
return Ok(lib);
}
}
// Other libraries are generally available in the "maven" directory of the installer. If they are

View File

@@ -93,22 +93,22 @@ async fn main() -> Result<()> {
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
&& let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN")
&& let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID")
{
let cache_clears = upload_files
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
let cache_clears = upload_files
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
.bearer_auth(&token)
.json(&serde_json::json!({
"files": chunk
@@ -128,6 +128,8 @@ async fn main() -> Result<()> {
item: "cloudflare clear cache".to_string(),
}
})?;
}
}
}
}

View File

@@ -167,18 +167,20 @@ pub async fn download_file(
let bytes = x.bytes().await;
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1
&& &*sha1_async(bytes.clone()).await? != sha1
{
if attempt <= 3 {
continue;
} else {
return Err(crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
if let Some(sha1) = sha1 {
if &*sha1_async(bytes.clone()).await? != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(
crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
}
.into(),
);
}
.into());
}
}

View File

@@ -143,13 +143,8 @@ export default defineNuxtConfig({
state.lastGenerated &&
new Date(state.lastGenerated).getTime() + TTL > new Date().getTime() &&
// ...but only if the API URL is the same
state.apiUrl === API_URL &&
// ...and if no errors were caught during the last generation
(state.errors ?? []).length === 0
state.apiUrl === API_URL
) {
console.log(
"Tags already recently generated. Delete apps/frontend/generated/state.json to force regeneration.",
);
return;
}

View File

@@ -50,22 +50,27 @@ const container = ref(null);
const showLabels = ref(false);
const locations = ref([
// Active locations
{ name: "New York", lat: 40.7128, lng: -74.006, active: true, clicked: false },
{ name: "Los Angeles", lat: 34.0522, lng: -118.2437, active: true, clicked: false },
{ name: "Miami", lat: 25.7617, lng: -80.1918, active: true, clicked: false },
{ name: "Spokane", lat: 47.667309, lng: -117.411922, active: true, clicked: false },
{ name: "Dallas", lat: 32.78372, lng: -96.7947, active: true, clicked: false },
// Future Locations
// { name: "London", lat: 51.5074, lng: -0.1278, active: false, clicked: false },
// { name: "Frankfurt", lat: 50.1109, lng: 8.6821, active: false, clicked: false },
// { name: "Amsterdam", lat: 52.3676, lng: 4.9041, active: false, clicked: false },
// { name: "Paris", lat: 48.8566, lng: 2.3522, active: false, clicked: false },
// { name: "Singapore", lat: 1.3521, lng: 103.8198, active: false, clicked: false },
// { name: "Tokyo", lat: 35.6762, lng: 139.6503, active: false, clicked: false },
// { name: "Sydney", lat: -33.8688, lng: 151.2093, active: false, clicked: false },
// { name: "São Paulo", lat: -23.5505, lng: -46.6333, active: false, clicked: false },
// { name: "Toronto", lat: 43.6532, lng: -79.3832, active: false, clicked: false },
{
name: "Vint Hill",
lat: 38.74724876915715,
lng: -77.67436507922152,
active: true,
clicked: false,
},
{
name: "Coventry",
lat: 52.39751276904742,
lng: -1.5777183894453757,
active: true,
clicked: false,
},
{
name: "Limburg",
lat: 50.40863558430334,
lng: 8.062427315007714,
active: true,
clicked: false,
},
]);
const isLocationVisible = (location) => {

View File

@@ -34,6 +34,7 @@ export const DEFAULT_FEATURE_FLAGS = validateValues({
showProjectPageDownloadModalServersPromo: false,
showProjectPageCreateServersTooltip: true,
showProjectPageQuickServerButton: false,
showModrinthServersGlobe: false,
// advancedRendering: true,
// externalLinksNewTab: true,
// notUsingBlockers: false,

View File

@@ -515,6 +515,98 @@
</div>
</section>
<section
v-if="flags.showModrinthServersGlobe"
class="relative mt-24 flex flex-col bg-[radial-gradient(65%_50%_at_50%_-10%,var(--color-brand-highlight)_0%,var(--color-accent-contrast)_100%)] px-3 pt-24 md:mt-48 md:pt-48"
>
<div class="faded-brand-line absolute left-0 top-0 h-[1px] w-full"></div>
<div class="mx-auto flex w-full max-w-7xl flex-col gap-8">
<div class="grid grid-cols-1 items-center gap-12 lg:grid-cols-2">
<div class="flex flex-col gap-8">
<div class="flex flex-col gap-4">
<div
class="relative w-fit rounded-full bg-highlight-green px-3 py-1 text-sm font-bold text-brand backdrop-blur-lg"
>
Server Locations
</div>
<h1 class="relative m-0 max-w-2xl text-4xl leading-[120%] md:text-7xl">
Global Coverage
</h1>
</div>
<div class="flex flex-col gap-8">
<div class="flex flex-col gap-4">
<div class="flex items-center gap-3">
<div class="grid size-8 place-content-center rounded-full bg-highlight-green">
<svg
xmlns="http://www.w3.org/2000/svg"
width="16"
height="16"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="text-brand"
>
<path d="M20 10c0 6-8 12-8 12s-8-6-8-12a8 8 0 0 1 16 0Z" />
<circle cx="12" cy="10" r="3" />
</svg>
</div>
<h2 class="relative m-0 text-xl font-medium leading-[155%] md:text-2xl">
Strategic Locations
</h2>
</div>
<p
class="relative m-0 max-w-xl text-base font-normal leading-[155%] text-secondary md:text-[18px]"
>
With servers strategically placed in Vint Hill (USA), Coventry (UK), and Limburg
(Germany), we provide excellent coverage across North America and Europe. Each
location features high-performance hardware and comprehensive DDoS protection.
</p>
</div>
<div class="flex flex-col gap-4">
<div class="flex items-center gap-3">
<div class="grid size-8 place-content-center rounded-full bg-highlight-blue">
<svg
xmlns="http://www.w3.org/2000/svg"
width="16"
height="16"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="text-blue"
>
<path d="M12 2a10 10 0 1 0 10 10" />
<path d="M18 13a6 6 0 0 0-6-6" />
<path d="M13 2.05a10 10 0 0 1 2 2" />
<path d="M19.5 8.5a10 10 0 0 1 2 2" />
</svg>
</div>
<h2 class="relative m-0 text-xl font-medium leading-[155%] md:text-2xl">
Low Latency Connectivity
</h2>
</div>
<p
class="relative m-0 max-w-xl text-base font-normal leading-[155%] text-secondary md:text-[18px]"
>
Our three carefully chosen locations ensure optimal ping times and reliable
connections for players across multiple continents. Choose the region closest to
you for the best gaming experience.
</p>
</div>
</div>
</div>
<Globe />
</div>
</div>
</section>
<section
id="plan"
pyro-hash="plan"
@@ -651,6 +743,7 @@ import { useServersFetch } from "~/composables/servers/servers-fetch.ts";
import LoaderIcon from "~/components/ui/servers/icons/LoaderIcon.vue";
import ServerPlanSelector from "~/components/ui/servers/marketing/ServerPlanSelector.vue";
import OptionGroup from "~/components/ui/OptionGroup.vue";
import Globe from "~/components/ui/servers/Globe.vue";
const { locale } = useVIntl();
@@ -842,6 +935,7 @@ async function fetchPaymentData() {
const selectedProjectId = ref();
const route = useRoute();
const flags = useFeatureFlags();
const isAtCapacity = computed(
() => isSmallAtCapacity.value && isMediumAtCapacity.value && isLargeAtCapacity.value,
);

View File

@@ -0,0 +1,11 @@
<template>
<div
class="experimental-styles-within relative mx-auto mb-6 flex min-h-screen w-full max-w-[1280px] flex-col px-6"
>
<ServersManagePage />
</div>
</template>
<script lang="ts" setup>
import { ServersManagePage } from "@modrinth/ui";
</script>

View File

@@ -322,11 +322,12 @@ pub async fn is_visible_collection(
} else {
!collection_data.status.is_hidden()
}) && !collection_data.projects.is_empty();
if let Some(user) = &user_option
&& !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
{
authorized = true;
if let Some(user) = &user_option {
if !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
{
authorized = true;
}
}
Ok(authorized)
}
@@ -355,10 +356,10 @@ pub async fn filter_visible_collections(
for collection in check_collections {
// Collections are simple- if we are the owner or a mod, we can see it
if let Some(user) = user_option
&& (user.role.is_mod() || user.id == collection.user_id.into())
{
return_collections.push(collection.into());
if let Some(user) = user_option {
if user.role.is_mod() || user.id == collection.user_id.into() {
return_collections.push(collection.into());
}
}
}

View File

@@ -95,10 +95,10 @@ impl DBFlow {
redis: &RedisPool,
) -> Result<Option<DBFlow>, DatabaseError> {
let flow = Self::get(id, redis).await?;
if let Some(flow) = flow.as_ref()
&& predicate(flow)
{
Self::remove(id, redis).await?;
if let Some(flow) = flow.as_ref() {
if predicate(flow) {
Self::remove(id, redis).await?;
}
}
Ok(flow)
}

View File

@@ -801,24 +801,24 @@ impl VersionField {
};
if let Some(count) = countable {
if let Some(min) = loader_field.min_val
&& count < min
{
return Err(format!(
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
if let Some(min) = loader_field.min_val {
if count < min {
return Err(format!(
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
}
if let Some(max) = loader_field.max_val
&& count > max
{
return Err(format!(
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
if let Some(max) = loader_field.max_val {
if count > max {
return Err(format!(
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
}
}

View File

@@ -483,20 +483,20 @@ impl DBTeamMember {
.await?;
}
if let Some(accepted) = new_accepted
&& accepted
{
sqlx::query!(
"
if let Some(accepted) = new_accepted {
if accepted {
sqlx::query!(
"
UPDATE team_members
SET accepted = TRUE
WHERE (team_id = $1 AND user_id = $2)
",
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
}
}
if let Some(payouts_split) = new_payouts_split {

View File

@@ -353,10 +353,10 @@ impl RedisPool {
};
for (idx, key) in fetch_ids.into_iter().enumerate() {
if let Some(locked) = results.get(idx)
&& locked.is_none()
{
continue;
if let Some(locked) = results.get(idx) {
if locked.is_none() {
continue;
}
}
if let Some((key, raw_key)) = ids.remove(&key) {

View File

@@ -334,14 +334,18 @@ impl From<Version> for LegacyVersion {
// the v2 loaders are whatever the corresponding loader fields are
let mut loaders =
data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
if loaders.contains(&"mrpack".to_string())
&& let Some((_, mrpack_loaders)) = data
if loaders.contains(&"mrpack".to_string()) {
if let Some((_, mrpack_loaders)) = data
.fields
.into_iter()
.find(|(key, _)| key == "mrpack_loaders")
&& let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders)
{
loaders = mrpack_loaders;
{
if let Ok(mrpack_loaders) =
serde_json::from_value(mrpack_loaders)
{
loaders = mrpack_loaders;
}
}
}
let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>();

View File

@@ -43,33 +43,35 @@ impl LegacyResultSearchProject {
pub fn from(result_search_project: ResultSearchProject) -> Self {
let mut categories = result_search_project.categories;
categories.extend(result_search_project.loaders.clone());
if categories.contains(&"mrpack".to_string())
&& let Some(mrpack_loaders) = result_search_project
if categories.contains(&"mrpack".to_string()) {
if let Some(mrpack_loaders) = result_search_project
.project_loader_fields
.get("mrpack_loaders")
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
categories.retain(|c| c != "mrpack");
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
categories.retain(|c| c != "mrpack");
}
}
let mut display_categories = result_search_project.display_categories;
display_categories.extend(result_search_project.loaders);
if display_categories.contains(&"mrpack".to_string())
&& let Some(mrpack_loaders) = result_search_project
if display_categories.contains(&"mrpack".to_string()) {
if let Some(mrpack_loaders) = result_search_project
.project_loader_fields
.get("mrpack_loaders")
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
display_categories.retain(|c| c != "mrpack");
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
display_categories.retain(|c| c != "mrpack");
}
}
// Sort then remove duplicates

View File

@@ -166,10 +166,10 @@ impl From<ProjectQueryResult> for Project {
Ok(spdx_expr) => {
let mut vec: Vec<&str> = Vec::new();
for node in spdx_expr.iter() {
if let spdx::expression::ExprNode::Req(req) = node
&& let Some(id) = req.req.license.id()
{
vec.push(id.full_name);
if let spdx::expression::ExprNode::Req(req) = node {
if let Some(id) = req.req.license.id() {
vec.push(id.full_name);
}
}
}
// spdx crate returns AND/OR operations in postfix order

View File

@@ -51,16 +51,16 @@ impl ProjectPermissions {
return Some(ProjectPermissions::all());
}
if let Some(member) = project_team_member
&& member.accepted
{
return Some(member.permissions);
if let Some(member) = project_team_member {
if member.accepted {
return Some(member.permissions);
}
}
if let Some(member) = organization_team_member
&& member.accepted
{
return Some(member.permissions);
if let Some(member) = organization_team_member {
if member.accepted {
return Some(member.permissions);
}
}
if role.is_mod() {
@@ -107,10 +107,10 @@ impl OrganizationPermissions {
return Some(OrganizationPermissions::all());
}
if let Some(member) = team_member
&& member.accepted
{
return member.organization_permissions;
if let Some(member) = team_member {
if member.accepted {
return member.organization_permissions;
}
}
if role.is_mod() {
return Some(

View File

@@ -45,15 +45,17 @@ impl MaxMindIndexer {
if let Ok(entries) = archive.entries() {
for mut file in entries.flatten() {
if let Ok(path) = file.header().path()
&& path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
if let Ok(path) = file.header().path() {
if path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
let reader = maxminddb::Reader::from_source(buf).unwrap();
let reader =
maxminddb::Reader::from_source(buf).unwrap();
return Ok(Some(reader));
return Ok(Some(reader));
}
}
}
}

View File

@@ -371,8 +371,8 @@ impl AutomatedModerationQueue {
for file in
files.iter().filter(|x| x.version_id == version.id.into())
{
if let Some(hash) = file.hashes.get("sha1")
&& let Some((index, (sha1, _, file_name, _))) = hashes
if let Some(hash) = file.hashes.get("sha1") {
if let Some((index, (sha1, _, file_name, _))) = hashes
.iter()
.enumerate()
.find(|(_, (value, _, _, _))| value == hash)
@@ -382,6 +382,7 @@ impl AutomatedModerationQueue {
hashes.remove(index);
}
}
}
}
@@ -419,11 +420,12 @@ impl AutomatedModerationQueue {
.await?;
for row in rows {
if let Some(sha1) = row.sha1
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
if let Some(sha1) = row.sha1 {
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) });
hashes.remove(index);
}
}
}
if hashes.is_empty() {
@@ -497,8 +499,8 @@ impl AutomatedModerationQueue {
let mut insert_ids = Vec::new();
for row in rows {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id)
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id) {
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
final_hashes.insert(sha1.clone(), IdentifiedFile {
file_name: file_name.clone(),
status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified),
@@ -510,6 +512,7 @@ impl AutomatedModerationQueue {
hashes.remove(index);
flame_files.remove(curse_index);
}
}
}
if !insert_ids.is_empty() && !insert_hashes.is_empty() {
@@ -578,8 +581,8 @@ impl AutomatedModerationQueue {
for (sha1, _pack_file, file_name, _mumur2) in hashes {
let flame_file = flame_files.iter().find(|x| x.0 == sha1);
if let Some((_, flame_project_id)) = flame_file
&& let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
if let Some((_, flame_project_id)) = flame_file {
if let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
missing_metadata.flame_files.insert(sha1, MissingMetadataFlame {
title: project.name.clone(),
file_name,
@@ -589,6 +592,7 @@ impl AutomatedModerationQueue {
continue;
}
}
missing_metadata.unknown_files.insert(sha1, file_name);
}

View File

@@ -257,30 +257,31 @@ impl PayoutsQueue {
)
})?;
if !status.is_success()
&& let Some(obj) = value.as_object()
{
if let Some(array) = obj.get("errors") {
#[derive(Deserialize)]
struct TremendousError {
message: String,
if !status.is_success() {
if let Some(obj) = value.as_object() {
if let Some(array) = obj.get("errors") {
#[derive(Deserialize)]
struct TremendousError {
message: String,
}
let err = serde_json::from_value::<TremendousError>(
array.clone(),
)
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
}
let err =
serde_json::from_value::<TremendousError>(array.clone())
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
return Err(ApiError::Payments(
"could not retrieve Tremendous error body".to_string(),
));
}
return Err(ApiError::Payments(
"could not retrieve Tremendous error body".to_string(),
));
}
Ok(serde_json::from_value(value)?)
@@ -448,10 +449,10 @@ impl PayoutsQueue {
};
// we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly
if let PayoutInterval::Fixed { .. } = method.interval
&& !product.currency_codes.contains(&"USD".to_string())
{
continue;
if let PayoutInterval::Fixed { .. } = method.interval {
if !product.currency_codes.contains(&"USD".to_string()) {
continue;
}
}
methods.push(method);

View File

@@ -286,17 +286,17 @@ pub async fn refund_charge(
.upsert(&mut transaction)
.await?;
if body.0.unprovision.unwrap_or(false)
&& let Some(subscription_id) = charge.subscription_id
{
let open_charge =
DBCharge::get_open_subscription(subscription_id, &**pool)
.await?;
if let Some(mut open_charge) = open_charge {
open_charge.status = ChargeStatus::Cancelled;
open_charge.due = Utc::now();
if body.0.unprovision.unwrap_or(false) {
if let Some(subscription_id) = charge.subscription_id {
let open_charge =
DBCharge::get_open_subscription(subscription_id, &**pool)
.await?;
if let Some(mut open_charge) = open_charge {
open_charge.status = ChargeStatus::Cancelled;
open_charge.due = Utc::now();
open_charge.upsert(&mut transaction).await?;
open_charge.upsert(&mut transaction).await?;
}
}
}
@@ -392,16 +392,17 @@ pub async fn edit_subscription(
}
}
if let Some(interval) = &edit_subscription.interval
&& let Price::Recurring { intervals } = &current_price.prices
{
if let Some(price) = intervals.get(interval) {
open_charge.subscription_interval = Some(*interval);
open_charge.amount = *price as i64;
} else {
return Err(ApiError::InvalidInput(
"Interval is not valid for this subscription!".to_string(),
));
if let Some(interval) = &edit_subscription.interval {
if let Price::Recurring { intervals } = &current_price.prices {
if let Some(price) = intervals.get(interval) {
open_charge.subscription_interval = Some(*interval);
open_charge.amount = *price as i64;
} else {
return Err(ApiError::InvalidInput(
"Interval is not valid for this subscription!"
.to_string(),
));
}
}
}
@@ -1224,36 +1225,38 @@ pub async fn initiate_payment(
}
};
if let Price::Recurring { .. } = price_item.prices
&& product.unitary
{
let user_subscriptions =
if let Price::Recurring { .. } = price_item.prices {
if product.unitary {
let user_subscriptions =
user_subscription_item::DBUserSubscription::get_all_user(
user.id.into(),
&**pool,
)
.await?;
let user_products = product_item::DBProductPrice::get_many(
&user_subscriptions
.iter()
.filter(|x| {
x.status == SubscriptionStatus::Provisioned
})
.map(|x| x.price_id)
.collect::<Vec<_>>(),
&**pool,
)
.await?;
let user_products =
product_item::DBProductPrice::get_many(
&user_subscriptions
.iter()
.filter(|x| {
x.status
== SubscriptionStatus::Provisioned
})
.map(|x| x.price_id)
.collect::<Vec<_>>(),
&**pool,
)
.await?;
if user_products
.into_iter()
.any(|x| x.product_id == product.id)
{
return Err(ApiError::InvalidInput(
"You are already subscribed to this product!"
.to_string(),
));
if user_products
.into_iter()
.any(|x| x.product_id == product.id)
{
return Err(ApiError::InvalidInput(
"You are already subscribed to this product!"
.to_string(),
));
}
}
}
@@ -2001,36 +2004,38 @@ pub async fn stripe_webhook(
EventType::PaymentMethodAttached => {
if let EventObject::PaymentMethod(payment_method) =
event.data.object
&& let Some(customer_id) =
payment_method.customer.map(|x| x.id())
{
let customer = stripe::Customer::retrieve(
&stripe_client,
&customer_id,
&[],
)
.await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
if let Some(customer_id) =
payment_method.customer.map(|x| x.id())
{
stripe::Customer::update(
let customer = stripe::Customer::retrieve(
&stripe_client,
&customer_id,
UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
&[],
)
.await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
{
stripe::Customer::update(
&stripe_client,
&customer_id,
UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
)
.await?;
}
}
}
}

View File

@@ -79,12 +79,13 @@ impl TempUser {
file_host: &Arc<dyn FileHost + Send + Sync>,
redis: &RedisPool,
) -> Result<crate::database::models::DBUserId, AuthenticationError> {
if let Some(email) = &self.email
&& crate::database::models::DBUser::get_by_email(email, client)
if let Some(email) = &self.email {
if crate::database::models::DBUser::get_by_email(email, client)
.await?
.is_some()
{
return Err(AuthenticationError::DuplicateUser);
{
return Err(AuthenticationError::DuplicateUser);
}
}
let user_id =
@@ -1268,19 +1269,19 @@ pub async fn delete_auth_provider(
.update_user_id(user.id.into(), None, &mut transaction)
.await?;
if delete_provider.provider != AuthProvider::PayPal
&& let Some(email) = user.email
{
send_email(
email,
"Authentication method removed",
&format!(
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
delete_provider.provider.as_str()
),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
if delete_provider.provider != AuthProvider::PayPal {
if let Some(email) = user.email {
send_email(
email,
"Authentication method removed",
&format!(
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
delete_provider.provider.as_str()
),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
}
}
transaction.commit().await?;

View File

@@ -189,16 +189,17 @@ pub async fn get_project_meta(
.iter()
.find(|x| Some(x.1.id as i32) == row.flame_project_id)
.map(|x| x.0.clone())
&& let Some(val) = merged.flame_files.remove(&sha1)
{
merged.identified.insert(
sha1,
IdentifiedFile {
file_name: val.file_name.clone(),
status: ApprovalType::from_string(&row.status)
.unwrap_or(ApprovalType::Unidentified),
},
);
if let Some(val) = merged.flame_files.remove(&sha1) {
merged.identified.insert(
sha1,
IdentifiedFile {
file_name: val.file_name.clone(),
status: ApprovalType::from_string(&row.status)
.unwrap_or(ApprovalType::Unidentified),
},
);
}
}
}

View File

@@ -185,69 +185,69 @@ pub async fn edit_pat(
)
.await?;
if let Some(pat) = pat
&& pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
let mut transaction = pool.begin().await?;
if let Some(scopes) = &info.scopes {
if scopes.is_restricted() {
return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(),
));
}
if let Some(scopes) = &info.scopes {
if scopes.is_restricted() {
return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(),
));
}
sqlx::query!(
"
sqlx::query!(
"
UPDATE pats
SET scopes = $1
WHERE id = $2
",
scopes.bits() as i64,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(name) = &info.name {
sqlx::query!(
"
scopes.bits() as i64,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(name) = &info.name {
sqlx::query!(
"
UPDATE pats
SET name = $1
WHERE id = $2
",
name,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
name,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
}
sqlx::query!(
"
sqlx::query!(
"
UPDATE pats
SET expires = $1
WHERE id = $2
",
expires,
pat.id.0
expires,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
Ok(HttpResponse::NoContent().finish())
@@ -276,21 +276,21 @@ pub async fn delete_pat(
)
.await?;
if let Some(pat) = pat
&& pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?;
database::models::pat_item::DBPersonalAccessToken::remove(
pat.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
let mut transaction = pool.begin().await?;
database::models::pat_item::DBPersonalAccessToken::remove(
pat.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
}
Ok(HttpResponse::NoContent().finish())

View File

@@ -185,21 +185,21 @@ pub async fn delete(
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
if let Some(session) = session
&& session.user_id == current_user.id.into()
{
let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?;
transaction.commit().await?;
DBSession::clear_cache(
vec![(
Some(session.id),
Some(session.session),
Some(session.user_id),
)],
&redis,
)
.await?;
if let Some(session) = session {
if session.user_id == current_user.id.into() {
let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?;
transaction.commit().await?;
DBSession::clear_cache(
vec![(
Some(session.id),
Some(session.session),
Some(session.user_id),
)],
&redis,
)
.await?;
}
}
Ok(HttpResponse::NoContent().body(""))

View File

@@ -401,13 +401,14 @@ async fn broadcast_to_known_local_friends(
friend.user_id
};
if friend.accepted
&& let Some(socket_ids) =
if friend.accepted {
if let Some(socket_ids) =
sockets.sockets_by_user_id.get(&friend_id.into())
{
for socket_id in socket_ids.iter() {
if let Some(socket) = sockets.sockets.get(&socket_id) {
let _ = send_message(socket.value(), &message).await;
{
for socket_id in socket_ids.iter() {
if let Some(socket) = sockets.sockets.get(&socket_id) {
let _ = send_message(socket.value(), &message).await;
}
}
}
}

View File

@@ -387,16 +387,17 @@ pub async fn revenue_get(
.map(|x| (x.to_string(), HashMap::new()))
.collect::<HashMap<_, _>>();
for value in payouts_values {
if let Some(mod_id) = value.mod_id
&& let Some(amount) = value.amount_sum
&& let Some(interval_start) = value.interval_start
{
let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(interval_start.timestamp(), amount);
if let Some(mod_id) = value.mod_id {
if let Some(amount) = value.amount_sum {
if let Some(interval_start) = value.interval_start {
let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(interval_start.timestamp(), amount);
}
}
}
}
}

View File

@@ -192,10 +192,10 @@ pub async fn collection_get(
.map(|x| x.1)
.ok();
if let Some(data) = collection_data
&& is_visible_collection(&data, &user_option, false).await?
{
return Ok(HttpResponse::Ok().json(Collection::from(data)));
if let Some(data) = collection_data {
if is_visible_collection(&data, &user_option, false).await? {
return Ok(HttpResponse::Ok().json(Collection::from(data)));
}
}
Err(ApiError::NotFound)
}

View File

@@ -536,9 +536,11 @@ pub async fn create_payout(
Some(true),
)
.await
&& let Some(data) = res.items.first()
{
payout_item.platform_id = Some(data.payout_item_id.clone());
if let Some(data) = res.items.first() {
payout_item.platform_id =
Some(data.payout_item_id.clone());
}
}
}

View File

@@ -182,10 +182,10 @@ pub async fn project_get(
.map(|x| x.1)
.ok();
if let Some(data) = project_data
&& is_visible_project(&data.inner, &user_option, &pool, false).await?
{
return Ok(HttpResponse::Ok().json(Project::from(data)));
if let Some(data) = project_data {
if is_visible_project(&data.inner, &user_option, &pool, false).await? {
return Ok(HttpResponse::Ok().json(Project::from(data)));
}
}
Err(ApiError::NotFound)
}
@@ -403,36 +403,34 @@ pub async fn project_edit(
.await?;
}
if status.is_searchable()
&& !project_item.inner.webhook_sent
&& let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK")
{
crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(),
&pool,
&redis,
webhook_url,
None,
)
.await
.ok();
if status.is_searchable() && !project_item.inner.webhook_sent {
if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") {
crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(),
&pool,
&redis,
webhook_url,
None,
)
.await
.ok();
sqlx::query!(
"
sqlx::query!(
"
UPDATE mods
SET webhook_sent = TRUE
WHERE id = $1
",
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
}
}
if user.role.is_mod()
&& let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK")
{
crate::util::webhook::send_slack_webhook(
if user.role.is_mod() {
if let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK") {
crate::util::webhook::send_slack_webhook(
project_item.inner.id.into(),
&pool,
&redis,
@@ -451,6 +449,7 @@ pub async fn project_edit(
)
.await
.ok();
}
}
if team_member.is_none_or(|x| !x.accepted) {
@@ -693,45 +692,45 @@ pub async fn project_edit(
.await?;
}
if let Some(links) = &new_project.link_urls
&& !links.is_empty()
{
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
if let Some(links) = &new_project.link_urls {
if !links.is_empty() {
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the links of this project!"
.to_string(),
));
}
}
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
// Deletes all links from hashmap- either will be deleted or be replaced
sqlx::query!(
"
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
// Deletes all links from hashmap- either will be deleted or be replaced
sqlx::query!(
"
DELETE FROM mods_links
WHERE joining_mod_id = $1 AND joining_platform_id IN (
SELECT id FROM link_platforms WHERE name = ANY($2)
)
",
id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
.await?;
id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
.await?;
for (platform, url) in links {
if let Some(url) = url {
let platform_id = db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
})?;
sqlx::query!(
for (platform, url) in links {
if let Some(url) = url {
let platform_id =
db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
})?;
sqlx::query!(
"
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
VALUES ($1, $2, $3)
@@ -742,6 +741,7 @@ pub async fn project_edit(
)
.execute(&mut *transaction)
.await?;
}
}
}
}
@@ -2430,7 +2430,7 @@ pub async fn project_get_organization(
organization,
team_members,
);
Ok(HttpResponse::Ok().json(organization))
return Ok(HttpResponse::Ok().json(organization));
} else {
Err(ApiError::NotFound)
}

View File

@@ -767,13 +767,12 @@ pub async fn edit_team_member(
));
}
if let Some(new_permissions) = edit_member.permissions
&& !permissions.contains(new_permissions)
{
return Err(ApiError::InvalidInput(
"The new permissions have permissions that you don't have"
.to_string(),
));
if let Some(new_permissions) = edit_member.permissions {
if !permissions.contains(new_permissions) {
return Err(ApiError::InvalidInput(
"The new permissions have permissions that you don't have".to_string(),
));
}
}
if edit_member.organization_permissions.is_some() {
@@ -801,12 +800,13 @@ pub async fn edit_team_member(
}
if let Some(new_permissions) = edit_member.organization_permissions
&& !organization_permissions.contains(new_permissions)
{
return Err(ApiError::InvalidInput(
if !organization_permissions.contains(new_permissions) {
return Err(ApiError::InvalidInput(
"The new organization permissions have permissions that you don't have"
.to_string(),
));
}
}
if edit_member.permissions.is_some()
@@ -822,13 +822,13 @@ pub async fn edit_team_member(
}
}
if let Some(payouts_split) = edit_member.payouts_split
&& (payouts_split < Decimal::ZERO
|| payouts_split > Decimal::from(5000))
{
return Err(ApiError::InvalidInput(
"Payouts split must be between 0 and 5000!".to_string(),
));
if let Some(payouts_split) = edit_member.payouts_split {
if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000)
{
return Err(ApiError::InvalidInput(
"Payouts split must be between 0 and 5000!".to_string(),
));
}
}
DBTeamMember::edit_team_member(
@@ -883,13 +883,13 @@ pub async fn transfer_ownership(
DBTeam::get_association(id.into(), &**pool).await?;
if let Some(TeamAssociationId::Project(pid)) = team_association_id {
let result = DBProject::get_id(pid, &**pool, &redis).await?;
if let Some(project_item) = result
&& project_item.inner.organization_id.is_some()
{
return Err(ApiError::InvalidInput(
if let Some(project_item) = result {
if project_item.inner.organization_id.is_some() {
return Err(ApiError::InvalidInput(
"You cannot transfer ownership of a project team that is owend by an organization"
.to_string(),
));
}
}
}

View File

@@ -289,33 +289,36 @@ pub async fn thread_get(
.await?
.1;
if let Some(mut data) = thread_data
&& is_authorized_thread(&data, &user, &pool).await?
{
let authors = &mut data.members;
if let Some(mut data) = thread_data {
if is_authorized_thread(&data, &user, &pool).await? {
let authors = &mut data.members;
authors.append(
&mut data
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
authors.append(
&mut data
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
let users: Vec<User> =
database::models::DBUser::get_many_ids(authors, &**pool, &redis)
.await?
.into_iter()
.map(From::from)
.collect();
let users: Vec<User> = database::models::DBUser::get_many_ids(
authors, &**pool, &redis,
)
.await?
.into_iter()
.map(From::from)
.collect();
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
return Ok(
HttpResponse::Ok().json(Thread::from(data, users, &user))
);
}
}
Err(ApiError::NotFound)
}
@@ -451,32 +454,33 @@ pub async fn thread_send_message(
)
.await?;
if let Some(project) = project
&& project.inner.status != ProjectStatus::Processing
&& user.role.is_mod()
{
let members =
database::models::DBTeamMember::get_from_team_full(
project.inner.team_id,
&**pool,
if let Some(project) = project {
if project.inner.status != ProjectStatus::Processing
&& user.role.is_mod()
{
let members =
database::models::DBTeamMember::get_from_team_full(
project.inner.team_id,
&**pool,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis,
)
.await?;
}
} else if let Some(report_id) = thread.report_id {
let report = database::models::report_item::DBReport::get(

View File

@@ -522,10 +522,10 @@ async fn version_create_inner(
.fetch_optional(pool)
.await?;
if let Some(project_status) = project_status
&& project_status.status == ProjectStatus::Processing.as_str()
{
moderation_queue.projects.insert(project_id.into());
if let Some(project_status) = project_status {
if project_status.status == ProjectStatus::Processing.as_str() {
moderation_queue.projects.insert(project_id.into());
}
}
Ok(HttpResponse::Ok().json(response))
@@ -871,16 +871,16 @@ pub async fn upload_file(
ref format,
ref files,
} = validation_result
&& dependencies.is_empty()
{
let hashes: Vec<Vec<u8>> = format
.files
.iter()
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.map(|x| x.as_bytes().to_vec())
.collect();
if dependencies.is_empty() {
let hashes: Vec<Vec<u8>> = format
.files
.iter()
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.map(|x| x.as_bytes().to_vec())
.collect();
let res = sqlx::query!(
let res = sqlx::query!(
"
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
INNER JOIN files f on h.file_id = f.id
@@ -892,44 +892,45 @@ pub async fn upload_file(
.fetch_all(&mut **transaction)
.await?;
for file in &format.files {
if let Some(dep) = res.iter().find(|x| {
Some(&*x.hash)
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
dependencies.push(DependencyBuilder {
project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});
} else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(
first_download
.rsplit('/')
.next()
.unwrap_or(first_download)
.to_string(),
),
dependency_type: DependencyType::Embedded.to_string(),
});
for file in &format.files {
if let Some(dep) = res.iter().find(|x| {
Some(&*x.hash)
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
dependencies.push(DependencyBuilder {
project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});
} else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(
first_download
.rsplit('/')
.next()
.unwrap_or(first_download)
.to_string(),
),
dependency_type: DependencyType::Embedded.to_string(),
});
}
}
}
for file in files {
if !file.is_empty() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(file.to_string()),
dependency_type: DependencyType::Embedded.to_string(),
});
for file in files {
if !file.is_empty() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(file.to_string()),
dependency_type: DependencyType::Embedded.to_string(),
});
}
}
}
}
@@ -973,10 +974,10 @@ pub async fn upload_file(
));
}
if let ValidationResult::Warning(msg) = validation_result
&& primary
{
return Err(CreateError::InvalidInput(msg.to_string()));
if let ValidationResult::Warning(msg) = validation_result {
if primary {
return Err(CreateError::InvalidInput(msg.to_string()));
}
}
let url = format!("{cdn_url}/{file_path_encode}");

View File

@@ -148,55 +148,65 @@ pub async fn get_update_from_hash(
&redis,
)
.await?
&& let Some(project) = database::models::DBProject::get_id(
{
if let Some(project) = database::models::DBProject::get_id(
file.project_id,
&**pool,
&redis,
)
.await?
{
let mut versions = database::models::DBVersion::get_many(
&project.versions,
&**pool,
&redis,
)
.await?
.into_iter()
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &update_data.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) =
x.version_fields.iter().find(|y| y.field_name == *key)
{
values.iter().any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
{
let mut versions = database::models::DBVersion::get_many(
&project.versions,
&**pool,
&redis,
)
.await?
.into_iter()
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &update_data.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
}
bool
})
.sorted();
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values
.iter()
.any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.next_back() {
if !is_visible_version(&first.inner, &user_option, &pool, &redis)
if let Some(first) = versions.next_back() {
if !is_visible_version(
&first.inner,
&user_option,
&pool,
&redis,
)
.await?
{
return Err(ApiError::NotFound);
}
{
return Err(ApiError::NotFound);
}
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(first))
);
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(first)));
}
}
}
Err(ApiError::NotFound)
@@ -388,12 +398,13 @@ pub async fn update_files(
if let Some(version) = versions
.iter()
.find(|x| x.inner.project_id == file.project_id)
&& let Some(hash) = file.hashes.get(&algorithm)
{
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
if let Some(hash) = file.hashes.get(&algorithm) {
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
}
}
@@ -473,59 +484,69 @@ pub async fn update_individual_files(
for project in projects {
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
if let Some(hash) = file.hashes.get(&algorithm)
&& let Some(query_file) =
if let Some(hash) = file.hashes.get(&algorithm) {
if let Some(query_file) =
update_data.hashes.iter().find(|x| &x.hash == hash)
{
let version = all_versions
.iter()
.filter(|x| x.inner.project_id == file.project_id)
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &query_file.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &query_file.loaders {
bool &=
x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &query_file.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version
&& is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
let version = all_versions
.iter()
.filter(|x| x.inner.project_id == file.project_id)
.filter(|x| {
let mut bool = true;
if let Some(version_types) =
&query_file.version_types
{
bool &= version_types.iter().any(|y| {
y.as_str() == x.inner.version_type
});
}
if let Some(loaders) = &query_file.loaders {
bool &= x
.loaders
.iter()
.any(|y| loaders.contains(y));
}
if let Some(loader_fields) =
&query_file.loader_fields
{
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version {
if is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(
version.clone(),
),
);
}
}
}
}
}

View File

@@ -106,12 +106,13 @@ pub async fn version_project_get_helper(
|| x.inner.version_number == id.1
});
if let Some(version) = version
&& is_visible_version(&version.inner, &user_option, &pool, &redis)
if let Some(version) = version {
if is_visible_version(&version.inner, &user_option, &pool, &redis)
.await?
{
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version)));
{
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version)));
}
}
}
@@ -189,12 +190,12 @@ pub async fn version_get_helper(
.map(|x| x.1)
.ok();
if let Some(data) = version_data
&& is_visible_version(&data.inner, &user_option, &pool, &redis).await?
{
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(data))
);
if let Some(data) = version_data {
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? {
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(data))
);
}
}
Err(ApiError::NotFound)

View File

@@ -15,12 +15,14 @@ pub async fn get_user_status(
return Some(friend_status);
}
if let Ok(mut conn) = redis.pool.get().await
&& let Ok(mut statuses) =
if let Ok(mut conn) = redis.pool.get().await {
if let Ok(mut statuses) =
conn.sscan::<_, String>(get_field_name(user)).await
&& let Some(status_json) = statuses.next_item().await
{
return serde_json::from_str::<UserStatus>(&status_json).ok();
{
if let Some(status_json) = statuses.next_item().await {
return serde_json::from_str::<UserStatus>(&status_json).ok();
}
}
}
None

View File

@@ -138,11 +138,12 @@ fn process_image(
let (orig_width, orig_height) = img.dimensions();
let aspect_ratio = orig_width as f32 / orig_height as f32;
if let Some(target_width) = target_width
&& img.width() > target_width
{
let new_height = (target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3);
if let Some(target_width) = target_width {
if img.width() > target_width {
let new_height =
(target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3);
}
}
if let Some(min_aspect_ratio) = min_aspect_ratio {

View File

@@ -133,11 +133,12 @@ pub async fn rate_limit_middleware(
.expect("Rate limiter not configured properly")
.clone();
if let Some(key) = req.headers().get("x-ratelimit-key")
&& key.to_str().ok()
if let Some(key) = req.headers().get("x-ratelimit-key") {
if key.to_str().ok()
== dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref()
{
return Ok(next.call(req).await?.map_into_left_body());
{
return Ok(next.call(req).await?.map_into_left_body());
}
}
let conn_info = req.connection_info().clone();

View File

@@ -22,47 +22,46 @@ pub fn validation_errors_to_string(
let key_option = map.keys().next();
if let Some(field) = key_option
&& let Some(error) = map.get(field)
{
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(
*errors.clone(),
Some(format!("of item {field}")),
)
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
if let Some(field) = key_option {
if let Some(error) = map.get(field) {
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(
*errors.clone(),
Some(format!("of list {field} with index {index}")),
));
Some(format!("of item {field}")),
)
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(),
Some(format!("of list {field} with index {index}")),
));
}
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() {
if let Some(adder) = adder {
write!(
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() {
if let Some(adder) = adder {
write!(
&mut output,
"Field {field} {adder} failed validation with error: {}",
error.code
).unwrap();
} else {
write!(
&mut output,
"Field {field} failed validation with error: {}",
error.code
)
.unwrap();
} else {
write!(
&mut output,
"Field {field} failed validation with error: {}",
error.code
).unwrap();
}
}
}
output
}
};
output
}
};
}
}
String::new()

View File

@@ -238,17 +238,17 @@ pub async fn send_slack_webhook(
}
});
if let Some(icon_url) = metadata.project_icon_url
&& let Some(project_block) = project_block.as_object_mut()
{
project_block.insert(
"accessory".to_string(),
serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": metadata.project_title
}),
);
if let Some(icon_url) = metadata.project_icon_url {
if let Some(project_block) = project_block.as_object_mut() {
project_block.insert(
"accessory".to_string(),
serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": metadata.project_title
}),
);
}
}
blocks.push(project_block);

View File

@@ -222,10 +222,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
}
// Failure test- logged in on a non-team user
@@ -246,10 +246,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
}
// Failure test- logged in with EVERY non-relevant permission
@@ -270,10 +270,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
}
// Patch user's permissions to success permissions
@@ -300,10 +300,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK
&& let Some(success_json_check) = &self.success_json_check
{
success_json_check(&test::read_body_json(resp).await);
if resp.status() == StatusCode::OK {
if let Some(success_json_check) = &self.success_json_check {
success_json_check(&test::read_body_json(resp).await);
}
}
// If the remove_user flag is set, remove the user from the project

View File

@@ -1,2 +1,2 @@
allow-dbg-in-tests = true
msrv = "1.89.0"
msrv = "1.88.0"

View File

@@ -50,10 +50,10 @@ pub async fn parse_command(
// We assume anything else is a filepath to an .mrpack file
let path = PathBuf::from(command_string);
let path = io::canonicalize(path)?;
if let Some(ext) = path.extension()
&& ext == "mrpack"
{
return Ok(CommandPayload::RunMRPack { path });
if let Some(ext) = path.extension() {
if ext == "mrpack" {
return Ok(CommandPayload::RunMRPack { path });
}
}
emit_warning(&format!(
"Invalid command, unrecognized filetype: {}",

View File

@@ -106,13 +106,13 @@ pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
})?;
// removes the old installation of java
if let Some(file) = archive.file_names().next()
&& let Some(dir) = file.split('/').next()
{
let path = path.join(dir);
if let Some(file) = archive.file_names().next() {
if let Some(dir) = file.split('/').next() {
let path = path.join(dir);
if path.exists() {
io::remove_dir_all(path).await?;
if path.exists() {
io::remove_dir_all(path).await?;
}
}
}

View File

@@ -54,11 +54,11 @@ pub async fn remove_user(uuid: uuid::Uuid) -> crate::Result<()> {
if let Some((uuid, user)) = users.remove(&uuid) {
Credentials::remove(uuid, &state.pool).await?;
if user.active
&& let Some((_, mut user)) = users.into_iter().next()
{
user.active = true;
user.upsert(&state.pool).await?;
if user.active {
if let Some((_, mut user)) = users.into_iter().next() {
user.active = true;
user.upsert(&state.pool).await?;
}
}
}

View File

@@ -221,14 +221,14 @@ async fn import_atlauncher_unmanaged(
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id
&& let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: true,
})
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: true,
})
}
}
prof.icon_path = description

View File

@@ -383,18 +383,18 @@ pub async fn set_profile_information(
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id
&& let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: if !ignore_lock {
true
} else {
prof.linked_data.as_ref().is_none_or(|x| x.locked)
},
})
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: if !ignore_lock {
true
} else {
prof.linked_data.as_ref().is_none_or(|x| x.locked)
},
})
}
}
prof.icon_path = description

View File

@@ -149,12 +149,13 @@ pub async fn install_zipped_mrpack_files(
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env
&& env
if let Some(env) = project.env {
if env
.get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported)
{
return Ok(());
{
return Ok(());
}
}
let file = fetch_mirrors(
@@ -374,12 +375,12 @@ pub async fn remove_all_related_files(
)
.await?
{
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
if let Some(metadata) = &project.metadata {
if to_remove.contains(&metadata.project_id) {
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
}
}
}
}

View File

@@ -337,26 +337,28 @@ pub async fn update_project(
)
.await?
.remove(project_path)
&& let Some(update_version) = &file.update_version_id
{
let path = Profile::add_project_version(
profile_path,
update_version,
&state.pool,
&state.fetch_semaphore,
&state.io_semaphore,
)
.await?;
if let Some(update_version) = &file.update_version_id {
let path = Profile::add_project_version(
profile_path,
update_version,
&state.pool,
&state.fetch_semaphore,
&state.io_semaphore,
)
.await?;
if path != project_path {
Profile::remove_project(profile_path, project_path).await?;
if path != project_path {
Profile::remove_project(profile_path, project_path).await?;
}
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited)
.await?;
}
return Ok(path);
}
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited).await?;
}
return Ok(path);
}
Err(crate::ErrorKind::InputError(
@@ -477,10 +479,10 @@ pub async fn export_mrpack(
let included_export_candidates = included_export_candidates
.into_iter()
.filter(|x| {
if let Some(f) = PathBuf::from(x).file_name()
&& f.to_string_lossy().starts_with(".DS_Store")
{
return false;
if let Some(f) = PathBuf::from(x).file_name() {
if f.to_string_lossy().starts_with(".DS_Store") {
return false;
}
}
true
})

View File

@@ -184,7 +184,6 @@ pub enum LoadingBarType {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct LoadingPayload {
pub event: LoadingBarType,
pub loader_uuid: Uuid,
@@ -193,7 +192,11 @@ pub struct LoadingPayload {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct OfflinePayload {
pub offline: bool,
}
#[derive(Serialize, Clone)]
pub struct WarningPayload {
pub message: String,
}
@@ -217,14 +220,12 @@ pub enum CommandPayload {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProcessPayload {
pub profile_path_id: String,
pub uuid: Uuid,
pub event: ProcessPayloadType,
pub message: String,
}
#[derive(Serialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum ProcessPayloadType {
@@ -233,13 +234,11 @@ pub enum ProcessPayloadType {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProfilePayload {
pub profile_path_id: String,
#[serde(flatten)]
pub event: ProfilePayloadType,
}
#[derive(Serialize, Clone)]
#[serde(tag = "event", rename_all = "snake_case")]
pub enum ProfilePayloadType {
@@ -258,16 +257,6 @@ pub enum ProfilePayloadType {
Removed,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}
#[derive(Debug, thiserror::Error)]
pub enum EventError {
#[error("Event state was not properly initialized")]
@@ -280,3 +269,13 @@ pub enum EventError {
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}

View File

@@ -32,15 +32,15 @@ pub fn get_class_paths(
let mut cps = libraries
.iter()
.filter_map(|library| {
if let Some(rules) = &library.rules
&& !parse_rules(
if let Some(rules) = &library.rules {
if !parse_rules(
rules,
java_arch,
&QuickPlayType::None,
minecraft_updated,
)
{
return None;
) {
return None;
}
}
if !library.include_in_classpath {
@@ -504,10 +504,10 @@ pub async fn get_processor_main_class(
let mut line = line.map_err(IOError::from)?;
line.retain(|c| !c.is_whitespace());
if line.starts_with("Main-Class:")
&& let Some(class) = line.split(':').nth(1)
{
return Ok(Some(class.to_string()));
if line.starts_with("Main-Class:") {
if let Some(class) = line.split(':').nth(1) {
return Ok(Some(class.to_string()));
}
}
}

View File

@@ -290,11 +290,12 @@ pub async fn download_libraries(
loading_try_for_each_concurrent(
stream::iter(libraries.iter())
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
if let Some(rules) = &library.rules
&& !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
if let Some(rules) = &library.rules {
if !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
tracing::trace!("Skipped library {}", &library.name);
return Ok(());
}
}
if !library.downloadable {
tracing::trace!("Skipped non-downloadable library {}", &library.name);
@@ -310,14 +311,15 @@ pub async fn download_libraries(
return Ok(());
}
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads
&& !artifact.url.is_empty(){
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads {
if !artifact.url.is_empty(){
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool)
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
return Ok::<_, crate::Error>(());
}
}
let url = [
library

View File

@@ -341,10 +341,10 @@ pub async fn install_minecraft(
// Forge processors (90-100)
for (index, processor) in processors.iter().enumerate() {
if let Some(sides) = &processor.sides
&& !sides.contains(&String::from("client"))
{
continue;
if let Some(sides) = &processor.sides {
if !sides.contains(&String::from("client")) {
continue;
}
}
let cp = {

View File

@@ -385,10 +385,10 @@ impl DirectoryInfo {
return Err(e);
}
} else {
if let Some(disk_usage) = get_disk_usage(&move_dir)?
&& total_size > disk_usage
{
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
if let Some(disk_usage) = get_disk_usage(&move_dir)? {
if total_size > disk_usage {
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
}
}
let loader_bar_id = Arc::new(&loader_bar_id);

View File

@@ -9,7 +9,7 @@ use ariadne::networking::message::{
ClientToServerMessage, ServerToClientMessage,
};
use ariadne::users::UserStatus;
use async_tungstenite::WebSocketSender;
use async_tungstenite::WebSocketStream;
use async_tungstenite::tokio::{ConnectStream, connect_async};
use async_tungstenite::tungstenite::Message;
use async_tungstenite::tungstenite::client::IntoClientRequest;
@@ -17,6 +17,7 @@ use bytes::Bytes;
use chrono::{DateTime, Utc};
use dashmap::DashMap;
use either::Either;
use futures::stream::SplitSink;
use futures::{SinkExt, StreamExt};
use reqwest::Method;
use reqwest::header::HeaderValue;
@@ -31,7 +32,7 @@ use tokio::sync::{Mutex, RwLock};
use uuid::Uuid;
pub(super) type WriteSocket =
Arc<RwLock<Option<WebSocketSender<ConnectStream>>>>;
Arc<RwLock<Option<SplitSink<WebSocketStream<ConnectStream>, Message>>>>;
pub(super) type TunnelSockets = Arc<DashMap<Uuid, Arc<InternalTunnelSocket>>>;
pub struct FriendsSocket {
@@ -179,24 +180,27 @@ impl FriendsSocket {
ServerToClientMessage::FriendSocketStoppedListening { .. } => {}, // TODO
ServerToClientMessage::SocketConnected { to_socket, new_socket } => {
if let Some(connected_to) = sockets.get(&to_socket)
&& let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone()
&& let Ok(new_stream) = TcpStream::connect(local_addr).await {
if let Some(connected_to) = sockets.get(&to_socket) {
if let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone() {
if let Ok(new_stream) = TcpStream::connect(local_addr).await {
let (read, write) = new_stream.into_split();
sockets.insert(new_socket, Arc::new(InternalTunnelSocket::Connected(Mutex::new(write))));
Self::socket_read_loop(write_handle.clone(), read, new_socket);
continue;
}
}
}
let _ = Self::send_message(&write_handle, ClientToServerMessage::SocketClose { socket: new_socket }).await;
},
ServerToClientMessage::SocketClosed { socket } => {
sockets.remove_if(&socket, |_, x| matches!(*x.clone(), InternalTunnelSocket::Connected(_)));
},
ServerToClientMessage::SocketData { socket, data } => {
if let Some(mut socket) = sockets.get_mut(&socket)
&& let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
if let Some(mut socket) = sockets.get_mut(&socket) {
if let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
let _ = stream.lock().await.write_all(&data).await;
}
}
},
}
}

View File

@@ -100,8 +100,8 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
let profile_path_str = profile_path_str.clone();
let world = world.clone();
tokio::spawn(async move {
if let Ok(state) = State::get().await
&& let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
if let Ok(state) = State::get().await {
if let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
&profile_path_str,
WorldType::Singleplayer,
&world,
@@ -109,6 +109,7 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
).await {
tracing::warn!("Failed to remove AttachedWorldData for '{world}': {e}")
}
}
});
}
Some(ProfilePayloadType::WorldUpdated { world })
@@ -149,14 +150,14 @@ pub(crate) async fn watch_profiles_init(
) {
if let Ok(profiles_dir) = std::fs::read_dir(dirs.profiles_dir()) {
for profile_dir in profiles_dir {
if let Ok(file_name) = profile_dir.map(|x| x.file_name())
&& let Some(file_name) = file_name.to_str()
{
if file_name.starts_with(".DS_Store") {
continue;
};
if let Ok(file_name) = profile_dir.map(|x| x.file_name()) {
if let Some(file_name) = file_name.to_str() {
if file_name.starts_with(".DS_Store") {
continue;
};
watch_profile(file_name, watcher, dirs).await;
watch_profile(file_name, watcher, dirs).await;
}
}
}
}

View File

@@ -76,9 +76,10 @@ where
.loaded_config_dir
.clone()
.and_then(|x| x.to_str().map(|x| x.to_string()))
&& path != old_launcher_root_str
{
settings.custom_dir = Some(path);
if path != old_launcher_root_str {
settings.custom_dir = Some(path);
}
}
settings.prev_custom_dir = Some(old_launcher_root_str.clone());
@@ -135,27 +136,31 @@ where
.await?;
}
if let Some(device_token) = minecraft_auth.token
&& let Ok(private_key) =
if let Some(device_token) = minecraft_auth.token {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&device_token.private_key)
&& let Ok(uuid) = Uuid::parse_str(&device_token.id)
{
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token.token.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
{
if let Ok(uuid) = Uuid::parse_str(&device_token.id) {
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token
.token
.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
}
.upsert(exec)
.await?;
}
}
@@ -202,93 +207,100 @@ where
update_version,
..
} = project.metadata
&& let Some(file) = version
{
if let Some(file) = version
.files
.iter()
.find(|x| x.hashes.get("sha512") == Some(&sha512))
&& let Some(sha1) = file.hashes.get("sha1")
{
if let Ok(metadata) = full_path.metadata() {
let file_name = format!(
"{}/{}",
profile.path,
path.replace('\\', "/")
.replace(".disabled", "")
);
{
if let Some(sha1) = file.hashes.get("sha1") {
if let Ok(metadata) = full_path.metadata() {
let file_name = format!(
"{}/{}",
profile.path,
path.replace('\\', "/")
.replace(".disabled", "")
);
cached_entries.push(CacheValue::FileHash(
CachedFileHash {
path: file_name,
size: metadata.len(),
hash: sha1.clone(),
project_type:
ProjectType::get_from_parent_folder(
&full_path,
),
},
));
}
cached_entries.push(CacheValue::File(CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
}));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader.as_str().to_string(),
],
update_version_id: update_version
.id
.clone(),
},
));
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries
.push(CacheValue::User(user.clone()));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
cached_entries.push(CacheValue::FileHash(
CachedFileHash {
path: file_name,
size: metadata.len(),
hash: sha1.clone(),
project_type: ProjectType::get_from_parent_folder(&full_path),
},
));
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries.push(CacheValue::File(
CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
},
));
cached_entries
.push(CacheValue::Version((*version).into()));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(
CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader
.as_str()
.to_string(),
],
update_version_id:
update_version.id.clone(),
},
),
);
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries.push(CacheValue::User(
user.clone(),
));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries.push(CacheValue::Version(
(*version).into(),
));
}
}
}
}
@@ -320,15 +332,16 @@ where
.map(|x| x.id),
groups: profile.metadata.groups,
linked_data: profile.metadata.linked_data.and_then(|x| {
if let Some(project_id) = x.project_id
&& let Some(version_id) = x.version_id
&& let Some(locked) = x.locked
{
return Some(LinkedData {
project_id,
version_id,
locked,
});
if let Some(project_id) = x.project_id {
if let Some(version_id) = x.version_id {
if let Some(locked) = x.locked {
return Some(LinkedData {
project_id,
version_id,
locked,
});
}
}
}
None

View File

@@ -393,9 +393,10 @@ impl Credentials {
..
},
) = *err.raw
&& (source.is_connect() || source.is_timeout())
{
return Ok(Some(creds));
if source.is_connect() || source.is_timeout() {
return Ok(Some(creds));
}
}
Err(err)
@@ -639,31 +640,36 @@ impl DeviceTokenPair {
.fetch_optional(exec)
.await?;
if let Some(x) = res
&& let Ok(uuid) = Uuid::parse_str(&x.uuid)
&& let Ok(private_key) = SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(x.display_claims)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
if let Some(x) = res {
if let Ok(uuid) = Uuid::parse_str(&x.uuid) {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(
x.display_claims,
)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
}
}
}
Ok(None)
@@ -718,7 +724,7 @@ const MICROSOFT_CLIENT_ID: &str = "00000000402b5328";
const AUTH_REPLY_URL: &str = "https://login.live.com/oauth20_desktop.srf";
const REQUESTED_SCOPE: &str = "service::user.auth.xboxlive.com::MBI_SSL";
pub struct RequestWithDate<T> {
struct RequestWithDate<T> {
pub date: DateTime<Utc>,
pub value: T,
}

View File

@@ -360,17 +360,18 @@ impl Process {
}
// Write the throwable if present
if !current_content.is_empty()
&& let Err(e) =
if !current_content.is_empty() {
if let Err(e) =
Process::append_to_log_file(
&log_path,
&current_content,
)
{
tracing::error!(
"Failed to write throwable to log file: {}",
e
);
{
tracing::error!(
"Failed to write throwable to log file: {}",
e
);
}
}
}
}
@@ -428,13 +429,15 @@ impl Process {
if let Some(timestamp) =
current_event.timestamp.as_deref()
&& let Err(e) = Self::maybe_handle_server_join_logging(
{
if let Err(e) = Self::maybe_handle_server_join_logging(
profile_path,
timestamp,
message
).await {
tracing::error!("Failed to handle server join logging: {e}");
}
}
}
}
_ => {}
@@ -442,29 +445,35 @@ impl Process {
}
Ok(Event::Text(mut e)) => {
if in_message || in_throwable {
if let Ok(text) = e.xml_content() {
if let Ok(text) = e.unescape() {
current_content.push_str(&text);
}
} else if !in_event
&& !e.inplace_trim_end()
&& !e.inplace_trim_start()
&& let Ok(text) = e.xml_content()
&& let Err(e) = Process::append_to_log_file(
&log_path,
&format!("{text}\n"),
)
{
tracing::error!(
"Failed to write to log file: {}",
e
);
if let Ok(text) = e.unescape() {
if let Err(e) = Process::append_to_log_file(
&log_path,
&format!("{text}\n"),
) {
tracing::error!(
"Failed to write to log file: {}",
e
);
}
}
}
}
Ok(Event::CData(e)) => {
if (in_message || in_throwable)
&& let Ok(text) = e.xml_content()
{
current_content.push_str(&text);
if in_message || in_throwable {
if let Ok(text) = e
.escape()
.map_err(|x| x.into())
.and_then(|x| x.unescape())
{
current_content.push_str(&text);
}
}
}
_ => (),
@@ -711,13 +720,16 @@ impl Process {
let logs_folder = state.directories.profile_logs_dir(&profile_path);
let log_path = logs_folder.join(LAUNCHER_LOG_PATH);
if log_path.exists()
&& let Err(e) = Process::append_to_log_file(
if log_path.exists() {
if let Err(e) = Process::append_to_log_file(
&log_path,
&format!("\n# Process exited with status: {mc_exit_status}\n"),
)
{
tracing::warn!("Failed to write exit status to log file: {}", e);
) {
tracing::warn!(
"Failed to write exit status to log file: {}",
e
);
}
}
let _ = state.discord_rpc.clear_to_default(true).await;

View File

@@ -595,8 +595,8 @@ impl Profile {
}
#[tracing::instrument(skip(self, semaphore, icon))]
pub async fn set_icon(
&mut self,
pub async fn set_icon<'a>(
&'a mut self,
cache_dir: &Path,
semaphore: &IoSemaphore,
icon: bytes::Bytes,
@@ -629,20 +629,21 @@ impl Profile {
{
let subdirectory =
subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file()
&& let Some(file_name) = subdirectory
if subdirectory.is_file() {
if let Some(file_name) = subdirectory
.file_name()
.and_then(|x| x.to_str())
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
keys.push(format!(
"{file_size}-{}/{folder}/{file_name}",
profile.path
));
keys.push(format!(
"{file_size}-{}/{folder}/{file_name}",
profile.path
));
}
}
}
}
@@ -900,29 +901,30 @@ impl Profile {
{
let subdirectory =
subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file()
&& let Some(file_name) =
if subdirectory.is_file() {
if let Some(file_name) =
subdirectory.file_name().and_then(|x| x.to_str())
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
keys.push(InitialScanFile {
path: format!(
"{}/{folder}/{}",
self.path,
file_name.trim_end_matches(".disabled")
),
file_name: file_name.to_string(),
project_type,
size: file_size,
cache_key: format!(
"{file_size}-{}/{folder}/{file_name}",
self.path
),
});
keys.push(InitialScanFile {
path: format!(
"{}/{folder}/{}",
self.path,
file_name.trim_end_matches(".disabled")
),
file_name: file_name.to_string(),
project_type,
size: file_size,
cache_key: format!(
"{file_size}-{}/{folder}/{file_name}",
self.path
),
});
}
}
}
}

View File

@@ -254,7 +254,7 @@ where
}
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn write(
pub async fn write<'a>(
path: &Path,
bytes: &[u8],
semaphore: &IoSemaphore,

View File

@@ -191,21 +191,22 @@ async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir();
if base_path.is_dir()
&& let Ok(dir) = std::fs::read_dir(base_path)
{
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if base_path.is_dir() {
if let Ok(dir) = std::fs::read_dir(base_path) {
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if let Ok(contents) = std::fs::read_to_string(file_path.clone())
{
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
if let Ok(contents) =
std::fs::read_to_string(file_path.clone())
{
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
}
}
}
@@ -299,20 +300,20 @@ pub async fn check_java_at_filepath(path: &Path) -> crate::Result<JavaVersion> {
}
// Extract version info from it
if let Some(arch) = java_arch
&& let Some(version) = java_version
{
if let Ok(version) = extract_java_version(version) {
let path = java.to_string_lossy().to_string();
return Ok(JavaVersion {
parsed_version: version,
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
if let Some(arch) = java_arch {
if let Some(version) = java_version {
if let Ok(version) = extract_java_version(version) {
let path = java.to_string_lossy().to_string();
return Ok(JavaVersion {
parsed_version: version,
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
}
}
Err(JREError::FailedJavaCheck(java).into())

View File

@@ -33,11 +33,12 @@ pub fn is_feature_supported_in(
if part_version == part_first_release {
continue;
}
if let Ok(part_version) = part_version.parse::<u32>()
&& let Ok(part_first_release) = part_first_release.parse::<u32>()
&& part_version > part_first_release
{
return true;
if let Ok(part_version) = part_version.parse::<u32>() {
if let Ok(part_first_release) = part_first_release.parse::<u32>() {
if part_version > part_first_release {
return true;
}
}
}
}
false

View File

@@ -0,0 +1,56 @@
<svg viewBox="0 0 592 384" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M0 3C0 1.34315 1.34315 0 3 0H93C94.6569 0 96 1.34315 96 3V93C96 94.6569 94.6569 96 93 96H3C1.34315 96 0 94.6569 0 93V3Z"
fill="#F3F4F6" />
<path
d="M120 18C120 16.8954 120.895 16 122 16H590C591.105 16 592 16.8954 592 18V38C592 39.1046 591.105 40 590 40H122C120.895 40 120 39.1046 120 38V18Z"
fill="#F3F4F6" />
<path
d="M120 62C120 60.8954 120.895 60 122 60H142C143.105 60 144 60.8954 144 62V82C144 83.1046 143.105 84 142 84H122C120.895 84 120 83.1046 120 82V62Z"
fill="#F3F4F6" />
<path
d="M160 62C160 60.8954 160.895 60 162 60H298C299.105 60 300 60.8954 300 62V82C300 83.1046 299.105 84 298 84H162C160.895 84 160 83.1046 160 82V62Z"
fill="#F3F4F6" />
<path
d="M324 62C324 60.8954 324.895 60 326 60H346C347.105 60 348 60.8954 348 62V82C348 83.1046 347.105 84 346 84H326C324.895 84 324 83.1046 324 82V62Z"
fill="#F3F4F6" />
<path
d="M364 62C364 60.8954 364.895 60 366 60H466C467.105 60 468 60.8954 468 62V82C468 83.1046 467.105 84 466 84H366C364.895 84 364 83.1046 364 82V62Z"
fill="#F3F4F6" />
<path
d="M0 147C0 145.343 1.34315 144 3 144H93C94.6569 144 96 145.343 96 147V237C96 238.657 94.6569 240 93 240H3C1.34315 240 0 238.657 0 237V147Z"
fill="#F3F4F6" />
<path
d="M120 162C120 160.895 120.895 160 122 160H590C591.105 160 592 160.895 592 162V182C592 183.105 591.105 184 590 184H122C120.895 184 120 183.105 120 182V162Z"
fill="#F3F4F6" />
<path
d="M120 206C120 204.895 120.895 204 122 204H142C143.105 204 144 204.895 144 206V226C144 227.105 143.105 228 142 228H122C120.895 228 120 227.105 120 226V206Z"
fill="#F3F4F6" />
<path
d="M160 206C160 204.895 160.895 204 162 204H298C299.105 204 300 204.895 300 206V226C300 227.105 299.105 228 298 228H162C160.895 228 160 227.105 160 226V206Z"
fill="#F3F4F6" />
<path
d="M324 206C324 204.895 324.895 204 326 204H346C347.105 204 348 204.895 348 206V226C348 227.105 347.105 228 346 228H326C324.895 228 324 227.105 324 226V206Z"
fill="#F3F4F6" />
<path
d="M364 206C364 204.895 364.895 204 366 204H466C467.105 204 468 204.895 468 206V226C468 227.105 467.105 228 466 228H366C364.895 228 364 227.105 364 226V206Z"
fill="#F3F4F6" />
<path
d="M0 291C0 289.343 1.34315 288 3 288H93C94.6569 288 96 289.343 96 291V381C96 382.657 94.6569 384 93 384H3C1.34315 384 0 382.657 0 381V291Z"
fill="#F3F4F6" />
<path
d="M120 306C120 304.895 120.895 304 122 304H590C591.105 304 592 304.895 592 306V326C592 327.105 591.105 328 590 328H122C120.895 328 120 327.105 120 326V306Z"
fill="#F3F4F6" />
<path
d="M120 350C120 348.895 120.895 348 122 348H142C143.105 348 144 348.895 144 350V370C144 371.105 143.105 372 142 372H122C120.895 372 120 371.105 120 370V350Z"
fill="#F3F4F6" />
<path
d="M160 350C160 348.895 160.895 348 162 348H298C299.105 348 300 348.895 300 350V370C300 371.105 299.105 372 298 372H162C160.895 372 160 371.105 160 370V350Z"
fill="#F3F4F6" />
<path
d="M324 350C324 348.895 324.895 348 326 348H346C347.105 348 348 348.895 348 350V370C348 371.105 347.105 372 346 372H326C324.895 372 324 371.105 324 370V350Z"
fill="#F3F4F6" />
<path
d="M364 350C364 348.895 364.895 348 366 348H466C467.105 348 468 348.895 468 350V370C468 371.105 467.105 372 466 372H366C364.895 372 364 371.105 364 370V350Z"
fill="#F3F4F6" />
</svg>

After

Width:  |  Height:  |  Size: 3.5 KiB

View File

@@ -50,6 +50,7 @@ import _CubeIcon from './icons/cube.svg?component'
import _CurrencyIcon from './icons/currency.svg?component'
import _DashboardIcon from './icons/dashboard.svg?component'
import _DatabaseIcon from './icons/database.svg?component'
import _DotIcon from './icons/dot.svg?component'
import _DownloadIcon from './icons/download.svg?component'
import _DropdownIcon from './icons/dropdown.svg?component'
import _EditIcon from './icons/edit.svg?component'
@@ -243,6 +244,7 @@ export const CubeIcon = _CubeIcon
export const CurrencyIcon = _CurrencyIcon
export const DashboardIcon = _DashboardIcon
export const DatabaseIcon = _DatabaseIcon
export const DotIcon = _DotIcon
export const DownloadIcon = _DownloadIcon
export const DropdownIcon = _DropdownIcon
export const EditIcon = _EditIcon

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-dot-icon lucide-dot"><circle cx="12.1" cy="12.1" r="1"/></svg>

After

Width:  |  Height:  |  Size: 264 B

View File

@@ -12,6 +12,7 @@ import './omorphia.scss'
import _ModrinthIcon from './branding/logo.svg?component'
import _FourOhFourNotFound from './branding/404.svg?component'
import _ModrinthPlusIcon from './branding/modrinth-plus.svg?component'
import _ServersManageIllustration from './branding/illustrations/servers-background.svg?component'
import _AngryRinthbot from './branding/rinthbot/angry.webp'
import _AnnoyedRinthbot from './branding/rinthbot/annoyed.webp'
import _ConfusedRinthbot from './branding/rinthbot/confused.webp'
@@ -50,6 +51,7 @@ import _YouTubeIcon from './external/youtube.svg?component'
export const ModrinthIcon = _ModrinthIcon
export const FourOhFourNotFound = _FourOhFourNotFound
export const ModrinthPlusIcon = _ModrinthPlusIcon
export const ServersManageIllustration = _ServersManageIllustration
export const AngryRinthbot = _AngryRinthbot
export const AnnoyedRinthbot = _AnnoyedRinthbot
export const ConfusedRinthbot = _ConfusedRinthbot

View File

@@ -1,3 +1,4 @@
export * from './src/components'
export * from './src/utils'
export * from './src/composables'
export * from './src/servers'

View File

@@ -0,0 +1,5 @@
<template>
<div class="flex items-center gap-2 w-fit px-3 py-1 bg-button-bg rounded-full text-sm">
<slot />
</div>
</template>

View File

@@ -36,6 +36,7 @@ export { default as ProgressBar } from './base/ProgressBar.vue'
export { default as ProjectCard } from './base/ProjectCard.vue'
export { default as RadialHeader } from './base/RadialHeader.vue'
export { default as RadioButtons } from './base/RadioButtons.vue'
export { default as RaisedBadge } from './base/RaisedBadge.vue'
export { default as ScrollablePanel } from './base/ScrollablePanel.vue'
export { default as ServerNotice } from './base/ServerNotice.vue'
export { default as SimpleBadge } from './base/SimpleBadge.vue'

View File

@@ -0,0 +1,141 @@
<template>
<Card>
<div class="server-card-grid">
<div class="header-section flex gap-4 items-center mb-4">
<Avatar size="4rem" />
<div class="flex flex-col gap-2">
<span class="text-xl text-contrast font-bold">{{ server_name }}</span>
<span class="text-md text-secondary" v-tooltip="server_created.toLocaleString()">
Created {{ formatRelativeTime(server_created) }}
</span>
</div>
</div>
<div class="badges-section flex gap-2 items-center mb-4">
<RaisedBadge>{{ server_plan }}</RaisedBadge>
<RaisedBadge class="text-lg" :color="serverStatusColor">
&bull; {{ formattedServerStatus }}
</RaisedBadge>
</div>
<div class="content-section flex flex-col gap-2 mb-4">
<div class="flex flex-row gap-2">
<UsersIcon class="size-4 my-auto" />
<span class="text-secondary">
{{ players_online }} / {{ max_players_online }} players
</span>
</div>
<div class="flex flex-row gap-2">
<GlobeIcon class="size-4 my-auto" />
<span class="text-secondary">{{ world_name }}</span>
</div>
<div class="flex flex-row gap-2">
<LinkIcon class="size-4 my-auto" />
<CopyCode :text="ip" />
</div>
</div>
<div class="actions-section flex gap-2">
<ButtonStyled color="brand">
<RouterLink :to="`/servers/manage/${id}`">
<EditIcon class="size-4" />
Manage
</RouterLink>
</ButtonStyled>
<ButtonStyled>
<RouterLink :to="`/servers/manage/${id}`">
<CurrencyIcon class="size-4" />
Billing
</RouterLink>
</ButtonStyled>
</div>
</div>
</Card>
</template>
<script setup lang="ts">
import { CurrencyIcon, EditIcon, GlobeIcon, LinkIcon, UsersIcon } from '@modrinth/assets'
import { Avatar, Card, RaisedBadge, useRelativeTime, CopyCode, ButtonStyled } from '@modrinth/ui'
import { computed } from 'vue'
import { RouterLink } from 'vue-router'
const props = defineProps<{
server_name: string
server_created: Date
server_plan: string
server_status: string
players_online: number
max_players_online: number
world_name: string
ip: string
id: string
}>()
const formatRelativeTime = useRelativeTime()
const serverStatusColor = computed(() => {
switch (props.server_status) {
case 'online':
return 'green'
case 'restarting':
return 'orange'
case 'offline':
return undefined
default:
return undefined
}
})
const formattedServerStatus = computed(() => {
return props.server_status.slice(0, 1).toUpperCase() + props.server_status.slice(1)
})
</script>
<style scoped>
.server-card-grid {
display: grid;
grid-template-areas:
'header badges'
'content content'
'actions actions';
grid-template-columns: 1fr auto;
align-items: start;
}
@media (max-width: 768px) {
.server-card-grid {
grid-template-areas:
'header'
'badges'
'content'
'actions';
grid-template-columns: 1fr;
}
.badges-section {
justify-self: start;
}
}
@media (min-width: 769px) {
.badges-section {
justify-self: end;
}
}
.header-section {
grid-area: header;
}
.badges-section {
grid-area: badges;
}
.content-section {
grid-area: content;
}
.actions-section {
grid-area: actions;
}
</style>

View File

@@ -0,0 +1 @@
export { default as ServersManagePage } from './pages/manage.vue'

View File

View File

@@ -0,0 +1,130 @@
<template>
<div v-if="servers.length + sharedServers.length === 0" class="text-center py-24 relative">
<ServersManageIllustration class="servers-manage-illustration" />
<ServerIcon class="size-24 mx-auto text-contrast mb-4" />
<h3 class="text-3xl font-medium text-contrast mb-2">No servers found</h3>
<p class="text-gray-500 mb-6 px-4">Get started by creating your first server</p>
<ButtonStyled color="green" size="large" type="outlined">
<button class="flex items-center justify-center gap-2 mx-auto">
<PlusIcon class="size-4" /> New server
</button>
</ButtonStyled>
</div>
<div v-else class="flex flex-col sm:flex-row gap-4 sm:gap-0 my-4">
<div class="flex flex-col gap-2">
<span class="text-3xl text-contrast font-bold">Servers</span>
<span class="text-sm sm:text-base text-secondary">View and manage all your servers</span>
</div>
<div class="sm:ml-auto">
<ButtonStyled color="green" size="large" class="w-full sm:w-auto">
<button class="flex items-center justify-center gap-2">
<PlusIcon class="size-4" />
<span>New server</span>
</button>
</ButtonStyled>
</div>
</div>
<template v-if="servers.length > 0">
<span class="text-xl text-contrast font-bold mb-4 flex flex-row gap-2"> Your servers </span>
<div class="grid grid-cols-1 lg:grid-cols-2 gap-4 gap-y-2">
<ServerCard
v-for="server in servers"
:id="server.id"
:key="server.id"
:server_name="server.server_name"
:server_created="server.server_created"
:server_plan="server.server_plan"
:server_status="server.server_status"
:players_online="server.players_online"
:max_players_online="server.max_players_online"
:world_name="server.world_name"
:ip="server.ip"
/>
</div>
</template>
<template v-if="sharedServers.length > 0">
<span class="text-xl text-contrast font-bold mb-4 flex flex-row gap-2"> Shared servers </span>
<div class="grid grid-cols-1 lg:grid-cols-2 gap-4 gap-y-2">
<ServerCard
v-for="server in sharedServers"
:id="server.id"
:key="server.id"
:server_name="server.server_name"
:server_created="server.server_created"
:server_plan="server.server_plan"
:server_status="server.server_status"
:players_online="server.players_online"
:max_players_online="server.max_players_online"
:world_name="server.world_name"
:ip="server.ip"
/>
</div>
</template>
</template>
<script lang="ts" setup>
import { ref } from 'vue'
import { ButtonStyled } from '@modrinth/ui'
import ServerCard from '../components/management/ServerCard.vue'
import { PlusIcon, ServerIcon, ServersManageIllustration } from '@modrinth/assets'
const sharedServers = ref([
{
id: 'server-1',
server_name: 'Rinth SMP',
server_created: new Date('2023-10-01T12:00:00Z'),
server_plan: 'Large',
server_status: 'online',
players_online: 5,
max_players_online: 20,
world_name: 'Example World',
ip: 'valiant-apple.modrinth.gg',
},
])
// const sharedServers = ref([])
// const servers = ref([])
const servers = ref([
{
id: 'server-1',
server_name: 'Rinth SMP',
server_created: new Date('2023-10-01T12:00:00Z'),
server_plan: 'Large',
server_status: 'online',
players_online: 5,
max_players_online: 20,
world_name: 'Example World',
ip: 'valiant-apple.modrinth.gg',
},
{
id: 'server-1',
server_name: 'Rinth SMP',
server_created: new Date('2023-10-01T12:00:00Z'),
server_plan: 'Large',
server_status: 'online',
players_online: 5,
max_players_online: 20,
world_name: 'Example World',
ip: 'valiant-apple.modrinth.gg',
},
])
</script>
<style scoped lang="scss">
.servers-manage-illustration {
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
z-index: -1;
width: 500px;
height: 500px;
opacity: 0.05;
mask: linear-gradient(to bottom, rgba(0, 0, 0, 1) 0%, rgba(0, 0, 0, 0) 100%);
-webkit-mask: linear-gradient(to bottom, rgba(0, 0, 0, 1) 0%, rgba(0, 0, 0, 0) 100%);
}
</style>

View File

@@ -1,2 +1,2 @@
[toolchain]
channel = "1.89.0"
channel = "1.88.0"