Update Rust version

This commit is contained in:
Josiah Glosson 2025-08-07 09:27:26 -05:00
parent d22c9e24f4
commit 8d16834e39
62 changed files with 1059 additions and 1171 deletions

View File

@ -197,15 +197,13 @@ pub async fn open_link<R: Runtime>(
if url::Url::parse(&path).is_ok() if url::Url::parse(&path).is_ok()
&& !state.malicious_origins.contains(&origin) && !state.malicious_origins.contains(&origin)
&& let Some(last_click) = state.last_click
&& last_click.elapsed() < Duration::from_millis(100)
{ {
if let Some(last_click) = state.last_click { let _ = app.opener().open_url(&path, None::<String>);
if last_click.elapsed() < Duration::from_millis(100) { state.last_click = None;
let _ = app.opener().open_url(&path, None::<String>);
state.last_click = None;
return Ok(()); return Ok(());
}
}
} }
tracing::info!("Malicious click: {path} origin {origin}"); tracing::info!("Malicious click: {path} origin {origin}");

View File

@ -59,16 +59,13 @@ pub async fn login<R: Runtime>(
.url()? .url()?
.as_str() .as_str()
.starts_with("https://login.live.com/oauth20_desktop.srf") .starts_with("https://login.live.com/oauth20_desktop.srf")
{ && let Some((_, code)) =
if let Some((_, code)) =
window.url()?.query_pairs().find(|x| x.0 == "code") window.url()?.query_pairs().find(|x| x.0 == "code")
{ {
window.close()?; window.close()?;
let val = let val = minecraft_auth::finish_login(&code.clone(), flow).await?;
minecraft_auth::finish_login(&code.clone(), flow).await?;
return Ok(Some(val)); return Ok(Some(val));
}
} }
tokio::time::sleep(std::time::Duration::from_millis(50)).await; tokio::time::sleep(std::time::Duration::from_millis(50)).await;

View File

@ -63,11 +63,11 @@ pub async fn should_disable_mouseover() -> bool {
// We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety // We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety
if let tauri_plugin_os::Version::Semantic(major, minor, _) = if let tauri_plugin_os::Version::Semantic(major, minor, _) =
tauri_plugin_os::version() tauri_plugin_os::version()
&& major >= 12
&& minor >= 3
{ {
if major >= 12 && minor >= 3 { // Mac os version is 12.3 or higher, we allow mouseover
// Mac os version is 12.3 or higher, we allow mouseover return false;
return false;
}
} }
true true
} else { } else {

View File

@ -233,10 +233,10 @@ fn main() {
}); });
#[cfg(not(target_os = "linux"))] #[cfg(not(target_os = "linux"))]
if let Some(window) = app.get_window("main") { if let Some(window) = app.get_window("main")
if let Err(e) = window.set_shadow(true) { && let Err(e) = window.set_shadow(true)
tracing::warn!("Failed to set window shadow: {e}"); {
} tracing::warn!("Failed to set window shadow: {e}");
} }
Ok(()) Ok(())

View File

@ -506,27 +506,25 @@ async fn fetch(
return Ok(lib); return Ok(lib);
} }
} else if let Some(url) = &lib.url { } else if let Some(url) = &lib.url
if !url.is_empty() { && !url.is_empty()
insert_mirrored_artifact( {
&lib.name, insert_mirrored_artifact(
None, &lib.name,
vec![ None,
url.clone(), vec![
"https://libraries.minecraft.net/" url.clone(),
.to_string(), "https://libraries.minecraft.net/".to_string(),
"https://maven.creeperhost.net/" "https://maven.creeperhost.net/".to_string(),
.to_string(), maven_url.to_string(),
maven_url.to_string(), ],
], false,
false, mirror_artifacts,
mirror_artifacts, )?;
)?;
lib.url = Some(format_url("maven/")); lib.url = Some(format_url("maven/"));
return Ok(lib); return Ok(lib);
}
} }
// Other libraries are generally available in the "maven" directory of the installer. If they are // Other libraries are generally available in the "maven" directory of the installer. If they are

View File

@ -93,22 +93,22 @@ async fn main() -> Result<()> {
.ok() .ok()
.and_then(|x| x.parse::<bool>().ok()) .and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false) .unwrap_or(false)
&& let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN")
&& let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID")
{ {
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") { let cache_clears = upload_files
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") { .into_iter()
let cache_clears = upload_files .map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter() .into_iter()
.map(|x| format_url(&x.0)) .map(|x| format_url(&format!("maven/{}", x.0))),
.chain( )
mirror_artifacts .collect::<Vec<_>>();
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
// Cloudflare ratelimits cache clears to 500 files per request // Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) { for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache")) REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
.bearer_auth(&token) .bearer_auth(&token)
.json(&serde_json::json!({ .json(&serde_json::json!({
"files": chunk "files": chunk
@ -128,8 +128,6 @@ async fn main() -> Result<()> {
item: "cloudflare clear cache".to_string(), item: "cloudflare clear cache".to_string(),
} }
})?; })?;
}
}
} }
} }

View File

@ -167,20 +167,18 @@ pub async fn download_file(
let bytes = x.bytes().await; let bytes = x.bytes().await;
if let Ok(bytes) = bytes { if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 { if let Some(sha1) = sha1
if &*sha1_async(bytes.clone()).await? != sha1 { && &*sha1_async(bytes.clone()).await? != sha1
if attempt <= 3 { {
continue; if attempt <= 3 {
} else { continue;
return Err( } else {
crate::ErrorKind::ChecksumFailure { return Err(crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(), hash: sha1.to_string(),
url: url.to_string(), url: url.to_string(),
tries: attempt, tries: attempt,
}
.into(),
);
} }
.into());
} }
} }

View File

@ -322,12 +322,11 @@ pub async fn is_visible_collection(
} else { } else {
!collection_data.status.is_hidden() !collection_data.status.is_hidden()
}) && !collection_data.projects.is_empty(); }) && !collection_data.projects.is_empty();
if let Some(user) = &user_option { if let Some(user) = &user_option
if !authorized && !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into()) && (user.role.is_mod() || user.id == collection_data.user_id.into())
{ {
authorized = true; authorized = true;
}
} }
Ok(authorized) Ok(authorized)
} }
@ -356,10 +355,10 @@ pub async fn filter_visible_collections(
for collection in check_collections { for collection in check_collections {
// Collections are simple- if we are the owner or a mod, we can see it // Collections are simple- if we are the owner or a mod, we can see it
if let Some(user) = user_option { if let Some(user) = user_option
if user.role.is_mod() || user.id == collection.user_id.into() { && (user.role.is_mod() || user.id == collection.user_id.into())
return_collections.push(collection.into()); {
} return_collections.push(collection.into());
} }
} }

View File

@ -95,10 +95,10 @@ impl DBFlow {
redis: &RedisPool, redis: &RedisPool,
) -> Result<Option<DBFlow>, DatabaseError> { ) -> Result<Option<DBFlow>, DatabaseError> {
let flow = Self::get(id, redis).await?; let flow = Self::get(id, redis).await?;
if let Some(flow) = flow.as_ref() { if let Some(flow) = flow.as_ref()
if predicate(flow) { && predicate(flow)
Self::remove(id, redis).await?; {
} Self::remove(id, redis).await?;
} }
Ok(flow) Ok(flow)
} }

View File

@ -801,24 +801,24 @@ impl VersionField {
}; };
if let Some(count) = countable { if let Some(count) = countable {
if let Some(min) = loader_field.min_val { if let Some(min) = loader_field.min_val
if count < min { && count < min
return Err(format!( {
"Provided value '{v}' for {field_name} is less than the minimum of {min}", return Err(format!(
v = serde_json::to_string(&value).unwrap_or_default(), "Provided value '{v}' for {field_name} is less than the minimum of {min}",
field_name = loader_field.field, v = serde_json::to_string(&value).unwrap_or_default(),
)); field_name = loader_field.field,
} ));
} }
if let Some(max) = loader_field.max_val { if let Some(max) = loader_field.max_val
if count > max { && count > max
return Err(format!( {
"Provided value '{v}' for {field_name} is greater than the maximum of {max}", return Err(format!(
v = serde_json::to_string(&value).unwrap_or_default(), "Provided value '{v}' for {field_name} is greater than the maximum of {max}",
field_name = loader_field.field, v = serde_json::to_string(&value).unwrap_or_default(),
)); field_name = loader_field.field,
} ));
} }
} }

View File

@ -483,20 +483,20 @@ impl DBTeamMember {
.await?; .await?;
} }
if let Some(accepted) = new_accepted { if let Some(accepted) = new_accepted
if accepted { && accepted
sqlx::query!( {
" sqlx::query!(
"
UPDATE team_members UPDATE team_members
SET accepted = TRUE SET accepted = TRUE
WHERE (team_id = $1 AND user_id = $2) WHERE (team_id = $1 AND user_id = $2)
", ",
id as DBTeamId, id as DBTeamId,
user_id as DBUserId, user_id as DBUserId,
) )
.execute(&mut **transaction) .execute(&mut **transaction)
.await?; .await?;
}
} }
if let Some(payouts_split) = new_payouts_split { if let Some(payouts_split) = new_payouts_split {

View File

@ -353,10 +353,10 @@ impl RedisPool {
}; };
for (idx, key) in fetch_ids.into_iter().enumerate() { for (idx, key) in fetch_ids.into_iter().enumerate() {
if let Some(locked) = results.get(idx) { if let Some(locked) = results.get(idx)
if locked.is_none() { && locked.is_none()
continue; {
} continue;
} }
if let Some((key, raw_key)) = ids.remove(&key) { if let Some((key, raw_key)) = ids.remove(&key) {

View File

@ -334,18 +334,14 @@ impl From<Version> for LegacyVersion {
// the v2 loaders are whatever the corresponding loader fields are // the v2 loaders are whatever the corresponding loader fields are
let mut loaders = let mut loaders =
data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>(); data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
if loaders.contains(&"mrpack".to_string()) { if loaders.contains(&"mrpack".to_string())
if let Some((_, mrpack_loaders)) = data && let Some((_, mrpack_loaders)) = data
.fields .fields
.into_iter() .into_iter()
.find(|(key, _)| key == "mrpack_loaders") .find(|(key, _)| key == "mrpack_loaders")
{ && let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders)
if let Ok(mrpack_loaders) = {
serde_json::from_value(mrpack_loaders) loaders = mrpack_loaders;
{
loaders = mrpack_loaders;
}
}
} }
let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>(); let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>();

View File

@ -43,35 +43,33 @@ impl LegacyResultSearchProject {
pub fn from(result_search_project: ResultSearchProject) -> Self { pub fn from(result_search_project: ResultSearchProject) -> Self {
let mut categories = result_search_project.categories; let mut categories = result_search_project.categories;
categories.extend(result_search_project.loaders.clone()); categories.extend(result_search_project.loaders.clone());
if categories.contains(&"mrpack".to_string()) { if categories.contains(&"mrpack".to_string())
if let Some(mrpack_loaders) = result_search_project && let Some(mrpack_loaders) = result_search_project
.project_loader_fields .project_loader_fields
.get("mrpack_loaders") .get("mrpack_loaders")
{ {
categories.extend( categories.extend(
mrpack_loaders mrpack_loaders
.iter() .iter()
.filter_map(|c| c.as_str()) .filter_map(|c| c.as_str())
.map(String::from), .map(String::from),
); );
categories.retain(|c| c != "mrpack"); categories.retain(|c| c != "mrpack");
}
} }
let mut display_categories = result_search_project.display_categories; let mut display_categories = result_search_project.display_categories;
display_categories.extend(result_search_project.loaders); display_categories.extend(result_search_project.loaders);
if display_categories.contains(&"mrpack".to_string()) { if display_categories.contains(&"mrpack".to_string())
if let Some(mrpack_loaders) = result_search_project && let Some(mrpack_loaders) = result_search_project
.project_loader_fields .project_loader_fields
.get("mrpack_loaders") .get("mrpack_loaders")
{ {
categories.extend( categories.extend(
mrpack_loaders mrpack_loaders
.iter() .iter()
.filter_map(|c| c.as_str()) .filter_map(|c| c.as_str())
.map(String::from), .map(String::from),
); );
display_categories.retain(|c| c != "mrpack"); display_categories.retain(|c| c != "mrpack");
}
} }
// Sort then remove duplicates // Sort then remove duplicates

View File

@ -166,10 +166,10 @@ impl From<ProjectQueryResult> for Project {
Ok(spdx_expr) => { Ok(spdx_expr) => {
let mut vec: Vec<&str> = Vec::new(); let mut vec: Vec<&str> = Vec::new();
for node in spdx_expr.iter() { for node in spdx_expr.iter() {
if let spdx::expression::ExprNode::Req(req) = node { if let spdx::expression::ExprNode::Req(req) = node
if let Some(id) = req.req.license.id() { && let Some(id) = req.req.license.id()
vec.push(id.full_name); {
} vec.push(id.full_name);
} }
} }
// spdx crate returns AND/OR operations in postfix order // spdx crate returns AND/OR operations in postfix order

View File

@ -51,16 +51,16 @@ impl ProjectPermissions {
return Some(ProjectPermissions::all()); return Some(ProjectPermissions::all());
} }
if let Some(member) = project_team_member { if let Some(member) = project_team_member
if member.accepted { && member.accepted
return Some(member.permissions); {
} return Some(member.permissions);
} }
if let Some(member) = organization_team_member { if let Some(member) = organization_team_member
if member.accepted { && member.accepted
return Some(member.permissions); {
} return Some(member.permissions);
} }
if role.is_mod() { if role.is_mod() {
@ -107,10 +107,10 @@ impl OrganizationPermissions {
return Some(OrganizationPermissions::all()); return Some(OrganizationPermissions::all());
} }
if let Some(member) = team_member { if let Some(member) = team_member
if member.accepted { && member.accepted
return member.organization_permissions; {
} return member.organization_permissions;
} }
if role.is_mod() { if role.is_mod() {
return Some( return Some(

View File

@ -45,17 +45,15 @@ impl MaxMindIndexer {
if let Ok(entries) = archive.entries() { if let Ok(entries) = archive.entries() {
for mut file in entries.flatten() { for mut file in entries.flatten() {
if let Ok(path) = file.header().path() { if let Ok(path) = file.header().path()
if path.extension().and_then(|x| x.to_str()) == Some("mmdb") && path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{ {
let mut buf = Vec::new(); let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap(); file.read_to_end(&mut buf).unwrap();
let reader = let reader = maxminddb::Reader::from_source(buf).unwrap();
maxminddb::Reader::from_source(buf).unwrap();
return Ok(Some(reader)); return Ok(Some(reader));
}
} }
} }
} }

View File

@ -371,8 +371,8 @@ impl AutomatedModerationQueue {
for file in for file in
files.iter().filter(|x| x.version_id == version.id.into()) files.iter().filter(|x| x.version_id == version.id.into())
{ {
if let Some(hash) = file.hashes.get("sha1") { if let Some(hash) = file.hashes.get("sha1")
if let Some((index, (sha1, _, file_name, _))) = hashes && let Some((index, (sha1, _, file_name, _))) = hashes
.iter() .iter()
.enumerate() .enumerate()
.find(|(_, (value, _, _, _))| value == hash) .find(|(_, (value, _, _, _))| value == hash)
@ -382,7 +382,6 @@ impl AutomatedModerationQueue {
hashes.remove(index); hashes.remove(index);
} }
}
} }
} }
@ -420,12 +419,11 @@ impl AutomatedModerationQueue {
.await?; .await?;
for row in rows { for row in rows {
if let Some(sha1) = row.sha1 { if let Some(sha1) = row.sha1
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) { && let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) }); final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) });
hashes.remove(index); hashes.remove(index);
} }
}
} }
if hashes.is_empty() { if hashes.is_empty() {
@ -499,8 +497,8 @@ impl AutomatedModerationQueue {
let mut insert_ids = Vec::new(); let mut insert_ids = Vec::new();
for row in rows { for row in rows {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id) { if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id)
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) { && let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
final_hashes.insert(sha1.clone(), IdentifiedFile { final_hashes.insert(sha1.clone(), IdentifiedFile {
file_name: file_name.clone(), file_name: file_name.clone(),
status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified),
@ -512,7 +510,6 @@ impl AutomatedModerationQueue {
hashes.remove(index); hashes.remove(index);
flame_files.remove(curse_index); flame_files.remove(curse_index);
} }
}
} }
if !insert_ids.is_empty() && !insert_hashes.is_empty() { if !insert_ids.is_empty() && !insert_hashes.is_empty() {
@ -581,8 +578,8 @@ impl AutomatedModerationQueue {
for (sha1, _pack_file, file_name, _mumur2) in hashes { for (sha1, _pack_file, file_name, _mumur2) in hashes {
let flame_file = flame_files.iter().find(|x| x.0 == sha1); let flame_file = flame_files.iter().find(|x| x.0 == sha1);
if let Some((_, flame_project_id)) = flame_file { if let Some((_, flame_project_id)) = flame_file
if let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) { && let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
missing_metadata.flame_files.insert(sha1, MissingMetadataFlame { missing_metadata.flame_files.insert(sha1, MissingMetadataFlame {
title: project.name.clone(), title: project.name.clone(),
file_name, file_name,
@ -592,7 +589,6 @@ impl AutomatedModerationQueue {
continue; continue;
} }
}
missing_metadata.unknown_files.insert(sha1, file_name); missing_metadata.unknown_files.insert(sha1, file_name);
} }

View File

@ -257,31 +257,30 @@ impl PayoutsQueue {
) )
})?; })?;
if !status.is_success() { if !status.is_success()
if let Some(obj) = value.as_object() { && let Some(obj) = value.as_object()
if let Some(array) = obj.get("errors") { {
#[derive(Deserialize)] if let Some(array) = obj.get("errors") {
struct TremendousError { #[derive(Deserialize)]
message: String, struct TremendousError {
} message: String,
let err = serde_json::from_value::<TremendousError>(
array.clone(),
)
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
} }
return Err(ApiError::Payments( let err =
"could not retrieve Tremendous error body".to_string(), serde_json::from_value::<TremendousError>(array.clone())
)); .map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
} }
return Err(ApiError::Payments(
"could not retrieve Tremendous error body".to_string(),
));
} }
Ok(serde_json::from_value(value)?) Ok(serde_json::from_value(value)?)
@ -449,10 +448,10 @@ impl PayoutsQueue {
}; };
// we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly // we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly
if let PayoutInterval::Fixed { .. } = method.interval { if let PayoutInterval::Fixed { .. } = method.interval
if !product.currency_codes.contains(&"USD".to_string()) { && !product.currency_codes.contains(&"USD".to_string())
continue; {
} continue;
} }
methods.push(method); methods.push(method);

View File

@ -286,17 +286,17 @@ pub async fn refund_charge(
.upsert(&mut transaction) .upsert(&mut transaction)
.await?; .await?;
if body.0.unprovision.unwrap_or(false) { if body.0.unprovision.unwrap_or(false)
if let Some(subscription_id) = charge.subscription_id { && let Some(subscription_id) = charge.subscription_id
let open_charge = {
DBCharge::get_open_subscription(subscription_id, &**pool) let open_charge =
.await?; DBCharge::get_open_subscription(subscription_id, &**pool)
if let Some(mut open_charge) = open_charge { .await?;
open_charge.status = ChargeStatus::Cancelled; if let Some(mut open_charge) = open_charge {
open_charge.due = Utc::now(); open_charge.status = ChargeStatus::Cancelled;
open_charge.due = Utc::now();
open_charge.upsert(&mut transaction).await?; open_charge.upsert(&mut transaction).await?;
}
} }
} }
@ -392,17 +392,16 @@ pub async fn edit_subscription(
} }
} }
if let Some(interval) = &edit_subscription.interval { if let Some(interval) = &edit_subscription.interval
if let Price::Recurring { intervals } = &current_price.prices { && let Price::Recurring { intervals } = &current_price.prices
if let Some(price) = intervals.get(interval) { {
open_charge.subscription_interval = Some(*interval); if let Some(price) = intervals.get(interval) {
open_charge.amount = *price as i64; open_charge.subscription_interval = Some(*interval);
} else { open_charge.amount = *price as i64;
return Err(ApiError::InvalidInput( } else {
"Interval is not valid for this subscription!" return Err(ApiError::InvalidInput(
.to_string(), "Interval is not valid for this subscription!".to_string(),
)); ));
}
} }
} }
@ -1225,38 +1224,36 @@ pub async fn initiate_payment(
} }
}; };
if let Price::Recurring { .. } = price_item.prices { if let Price::Recurring { .. } = price_item.prices
if product.unitary { && product.unitary
let user_subscriptions = {
let user_subscriptions =
user_subscription_item::DBUserSubscription::get_all_user( user_subscription_item::DBUserSubscription::get_all_user(
user.id.into(), user.id.into(),
&**pool, &**pool,
) )
.await?; .await?;
let user_products = let user_products = product_item::DBProductPrice::get_many(
product_item::DBProductPrice::get_many( &user_subscriptions
&user_subscriptions .iter()
.iter() .filter(|x| {
.filter(|x| { x.status == SubscriptionStatus::Provisioned
x.status })
== SubscriptionStatus::Provisioned .map(|x| x.price_id)
}) .collect::<Vec<_>>(),
.map(|x| x.price_id) &**pool,
.collect::<Vec<_>>(), )
&**pool, .await?;
)
.await?;
if user_products if user_products
.into_iter() .into_iter()
.any(|x| x.product_id == product.id) .any(|x| x.product_id == product.id)
{ {
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"You are already subscribed to this product!" "You are already subscribed to this product!"
.to_string(), .to_string(),
)); ));
}
} }
} }
@ -2004,38 +2001,36 @@ pub async fn stripe_webhook(
EventType::PaymentMethodAttached => { EventType::PaymentMethodAttached => {
if let EventObject::PaymentMethod(payment_method) = if let EventObject::PaymentMethod(payment_method) =
event.data.object event.data.object
{ && let Some(customer_id) =
if let Some(customer_id) =
payment_method.customer.map(|x| x.id()) payment_method.customer.map(|x| x.id())
{
let customer = stripe::Customer::retrieve(
&stripe_client,
&customer_id,
&[],
)
.await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
{ {
let customer = stripe::Customer::retrieve( stripe::Customer::update(
&stripe_client, &stripe_client,
&customer_id, &customer_id,
&[], UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
) )
.await?; .await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
{
stripe::Customer::update(
&stripe_client,
&customer_id,
UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
)
.await?;
}
} }
} }
} }

View File

@ -79,13 +79,12 @@ impl TempUser {
file_host: &Arc<dyn FileHost + Send + Sync>, file_host: &Arc<dyn FileHost + Send + Sync>,
redis: &RedisPool, redis: &RedisPool,
) -> Result<crate::database::models::DBUserId, AuthenticationError> { ) -> Result<crate::database::models::DBUserId, AuthenticationError> {
if let Some(email) = &self.email { if let Some(email) = &self.email
if crate::database::models::DBUser::get_by_email(email, client) && crate::database::models::DBUser::get_by_email(email, client)
.await? .await?
.is_some() .is_some()
{ {
return Err(AuthenticationError::DuplicateUser); return Err(AuthenticationError::DuplicateUser);
}
} }
let user_id = let user_id =
@ -1269,19 +1268,19 @@ pub async fn delete_auth_provider(
.update_user_id(user.id.into(), None, &mut transaction) .update_user_id(user.id.into(), None, &mut transaction)
.await?; .await?;
if delete_provider.provider != AuthProvider::PayPal { if delete_provider.provider != AuthProvider::PayPal
if let Some(email) = user.email { && let Some(email) = user.email
send_email( {
email, send_email(
"Authentication method removed", email,
&format!( "Authentication method removed",
"When logging into Modrinth, you can no longer log in using the {} authentication provider.", &format!(
delete_provider.provider.as_str() "When logging into Modrinth, you can no longer log in using the {} authentication provider.",
), delete_provider.provider.as_str()
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).", ),
None, "If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
)?; None,
} )?;
} }
transaction.commit().await?; transaction.commit().await?;

View File

@ -189,17 +189,16 @@ pub async fn get_project_meta(
.iter() .iter()
.find(|x| Some(x.1.id as i32) == row.flame_project_id) .find(|x| Some(x.1.id as i32) == row.flame_project_id)
.map(|x| x.0.clone()) .map(|x| x.0.clone())
&& let Some(val) = merged.flame_files.remove(&sha1)
{ {
if let Some(val) = merged.flame_files.remove(&sha1) { merged.identified.insert(
merged.identified.insert( sha1,
sha1, IdentifiedFile {
IdentifiedFile { file_name: val.file_name.clone(),
file_name: val.file_name.clone(), status: ApprovalType::from_string(&row.status)
status: ApprovalType::from_string(&row.status) .unwrap_or(ApprovalType::Unidentified),
.unwrap_or(ApprovalType::Unidentified), },
}, );
);
}
} }
} }

View File

@ -185,69 +185,69 @@ pub async fn edit_pat(
) )
.await?; .await?;
if let Some(pat) = pat { if let Some(pat) = pat
if pat.user_id == user.id.into() { && pat.user_id == user.id.into()
let mut transaction = pool.begin().await?; {
let mut transaction = pool.begin().await?;
if let Some(scopes) = &info.scopes { if let Some(scopes) = &info.scopes {
if scopes.is_restricted() { if scopes.is_restricted() {
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(), "Invalid scopes requested!".to_string(),
)); ));
} }
sqlx::query!( sqlx::query!(
" "
UPDATE pats UPDATE pats
SET scopes = $1 SET scopes = $1
WHERE id = $2 WHERE id = $2
", ",
scopes.bits() as i64, scopes.bits() as i64,
pat.id.0 pat.id.0
) )
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
} }
if let Some(name) = &info.name { if let Some(name) = &info.name {
sqlx::query!( sqlx::query!(
" "
UPDATE pats UPDATE pats
SET name = $1 SET name = $1
WHERE id = $2 WHERE id = $2
", ",
name, name,
pat.id.0 pat.id.0
) )
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
} }
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
}
sqlx::query!( sqlx::query!(
" "
UPDATE pats UPDATE pats
SET expires = $1 SET expires = $1
WHERE id = $2 WHERE id = $2
", ",
expires, expires,
pat.id.0 pat.id.0
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
) )
.execute(&mut *transaction)
.await?; .await?;
} }
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
} }
Ok(HttpResponse::NoContent().finish()) Ok(HttpResponse::NoContent().finish())
@ -276,21 +276,21 @@ pub async fn delete_pat(
) )
.await?; .await?;
if let Some(pat) = pat { if let Some(pat) = pat
if pat.user_id == user.id.into() { && pat.user_id == user.id.into()
let mut transaction = pool.begin().await?; {
database::models::pat_item::DBPersonalAccessToken::remove( let mut transaction = pool.begin().await?;
pat.id, database::models::pat_item::DBPersonalAccessToken::remove(
&mut transaction, pat.id,
) &mut transaction,
.await?; )
transaction.commit().await?; .await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache( transaction.commit().await?;
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))], database::models::pat_item::DBPersonalAccessToken::clear_cache(
&redis, vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
) &redis,
.await?; )
} .await?;
} }
Ok(HttpResponse::NoContent().finish()) Ok(HttpResponse::NoContent().finish())

View File

@ -185,21 +185,21 @@ pub async fn delete(
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?; let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
if let Some(session) = session { if let Some(session) = session
if session.user_id == current_user.id.into() { && session.user_id == current_user.id.into()
let mut transaction = pool.begin().await?; {
DBSession::remove(session.id, &mut transaction).await?; let mut transaction = pool.begin().await?;
transaction.commit().await?; DBSession::remove(session.id, &mut transaction).await?;
DBSession::clear_cache( transaction.commit().await?;
vec![( DBSession::clear_cache(
Some(session.id), vec![(
Some(session.session), Some(session.id),
Some(session.user_id), Some(session.session),
)], Some(session.user_id),
&redis, )],
) &redis,
.await?; )
} .await?;
} }
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))

View File

@ -401,14 +401,13 @@ async fn broadcast_to_known_local_friends(
friend.user_id friend.user_id
}; };
if friend.accepted { if friend.accepted
if let Some(socket_ids) = && let Some(socket_ids) =
sockets.sockets_by_user_id.get(&friend_id.into()) sockets.sockets_by_user_id.get(&friend_id.into())
{ {
for socket_id in socket_ids.iter() { for socket_id in socket_ids.iter() {
if let Some(socket) = sockets.sockets.get(&socket_id) { if let Some(socket) = sockets.sockets.get(&socket_id) {
let _ = send_message(socket.value(), &message).await; let _ = send_message(socket.value(), &message).await;
}
} }
} }
} }

View File

@ -387,17 +387,16 @@ pub async fn revenue_get(
.map(|x| (x.to_string(), HashMap::new())) .map(|x| (x.to_string(), HashMap::new()))
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
for value in payouts_values { for value in payouts_values {
if let Some(mod_id) = value.mod_id { if let Some(mod_id) = value.mod_id
if let Some(amount) = value.amount_sum { && let Some(amount) = value.amount_sum
if let Some(interval_start) = value.interval_start { && let Some(interval_start) = value.interval_start
let id_string = to_base62(mod_id as u64); {
if !hm.contains_key(&id_string) { let id_string = to_base62(mod_id as u64);
hm.insert(id_string.clone(), HashMap::new()); if !hm.contains_key(&id_string) {
} hm.insert(id_string.clone(), HashMap::new());
if let Some(hm) = hm.get_mut(&id_string) { }
hm.insert(interval_start.timestamp(), amount); if let Some(hm) = hm.get_mut(&id_string) {
} hm.insert(interval_start.timestamp(), amount);
}
} }
} }
} }

View File

@ -192,10 +192,10 @@ pub async fn collection_get(
.map(|x| x.1) .map(|x| x.1)
.ok(); .ok();
if let Some(data) = collection_data { if let Some(data) = collection_data
if is_visible_collection(&data, &user_option, false).await? { && is_visible_collection(&data, &user_option, false).await?
return Ok(HttpResponse::Ok().json(Collection::from(data))); {
} return Ok(HttpResponse::Ok().json(Collection::from(data)));
} }
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }

View File

@ -536,11 +536,9 @@ pub async fn create_payout(
Some(true), Some(true),
) )
.await .await
&& let Some(data) = res.items.first()
{ {
if let Some(data) = res.items.first() { payout_item.platform_id = Some(data.payout_item_id.clone());
payout_item.platform_id =
Some(data.payout_item_id.clone());
}
} }
} }

View File

@ -182,10 +182,10 @@ pub async fn project_get(
.map(|x| x.1) .map(|x| x.1)
.ok(); .ok();
if let Some(data) = project_data { if let Some(data) = project_data
if is_visible_project(&data.inner, &user_option, &pool, false).await? { && is_visible_project(&data.inner, &user_option, &pool, false).await?
return Ok(HttpResponse::Ok().json(Project::from(data))); {
} return Ok(HttpResponse::Ok().json(Project::from(data)));
} }
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }
@ -403,34 +403,36 @@ pub async fn project_edit(
.await?; .await?;
} }
if status.is_searchable() && !project_item.inner.webhook_sent { if status.is_searchable()
if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") { && !project_item.inner.webhook_sent
crate::util::webhook::send_discord_webhook( && let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK")
project_item.inner.id.into(), {
&pool, crate::util::webhook::send_discord_webhook(
&redis, project_item.inner.id.into(),
webhook_url, &pool,
None, &redis,
) webhook_url,
.await None,
.ok(); )
.await
.ok();
sqlx::query!( sqlx::query!(
" "
UPDATE mods UPDATE mods
SET webhook_sent = TRUE SET webhook_sent = TRUE
WHERE id = $1 WHERE id = $1
", ",
id as db_ids::DBProjectId, id as db_ids::DBProjectId,
) )
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
}
} }
if user.role.is_mod() { if user.role.is_mod()
if let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK") { && let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK")
crate::util::webhook::send_slack_webhook( {
crate::util::webhook::send_slack_webhook(
project_item.inner.id.into(), project_item.inner.id.into(),
&pool, &pool,
&redis, &redis,
@ -449,7 +451,6 @@ pub async fn project_edit(
) )
.await .await
.ok(); .ok();
}
} }
if team_member.is_none_or(|x| !x.accepted) { if team_member.is_none_or(|x| !x.accepted) {
@ -692,45 +693,45 @@ pub async fn project_edit(
.await?; .await?;
} }
if let Some(links) = &new_project.link_urls { if let Some(links) = &new_project.link_urls
if !links.is_empty() { && !links.is_empty()
if !perms.contains(ProjectPermissions::EDIT_DETAILS) { {
return Err(ApiError::CustomAuthentication( if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the links of this project!" "You do not have the permissions to edit the links of this project!"
.to_string(), .to_string(),
)); ));
} }
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>(); let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
// Deletes all links from hashmap- either will be deleted or be replaced // Deletes all links from hashmap- either will be deleted or be replaced
sqlx::query!( sqlx::query!(
" "
DELETE FROM mods_links DELETE FROM mods_links
WHERE joining_mod_id = $1 AND joining_platform_id IN ( WHERE joining_mod_id = $1 AND joining_platform_id IN (
SELECT id FROM link_platforms WHERE name = ANY($2) SELECT id FROM link_platforms WHERE name = ANY($2)
) )
", ",
id as db_ids::DBProjectId, id as db_ids::DBProjectId,
&ids_to_delete &ids_to_delete
) )
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
for (platform, url) in links { for (platform, url) in links {
if let Some(url) = url { if let Some(url) = url {
let platform_id = let platform_id = db_models::categories::LinkPlatform::get_id(
db_models::categories::LinkPlatform::get_id( platform,
platform, &mut *transaction,
&mut *transaction, )
) .await?
.await? .ok_or_else(|| {
.ok_or_else(|| { ApiError::InvalidInput(format!(
ApiError::InvalidInput(format!( "Platform {} does not exist.",
"Platform {} does not exist.", platform.clone()
platform.clone() ))
)) })?;
})?; sqlx::query!(
sqlx::query!(
" "
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url) INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
VALUES ($1, $2, $3) VALUES ($1, $2, $3)
@ -741,7 +742,6 @@ pub async fn project_edit(
) )
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
}
} }
} }
} }
@ -2430,7 +2430,7 @@ pub async fn project_get_organization(
organization, organization,
team_members, team_members,
); );
return Ok(HttpResponse::Ok().json(organization)); Ok(HttpResponse::Ok().json(organization))
} else { } else {
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }

View File

@ -767,12 +767,13 @@ pub async fn edit_team_member(
)); ));
} }
if let Some(new_permissions) = edit_member.permissions { if let Some(new_permissions) = edit_member.permissions
if !permissions.contains(new_permissions) { && !permissions.contains(new_permissions)
return Err(ApiError::InvalidInput( {
"The new permissions have permissions that you don't have".to_string(), return Err(ApiError::InvalidInput(
)); "The new permissions have permissions that you don't have"
} .to_string(),
));
} }
if edit_member.organization_permissions.is_some() { if edit_member.organization_permissions.is_some() {
@ -800,13 +801,12 @@ pub async fn edit_team_member(
} }
if let Some(new_permissions) = edit_member.organization_permissions if let Some(new_permissions) = edit_member.organization_permissions
&& !organization_permissions.contains(new_permissions)
{ {
if !organization_permissions.contains(new_permissions) { return Err(ApiError::InvalidInput(
return Err(ApiError::InvalidInput(
"The new organization permissions have permissions that you don't have" "The new organization permissions have permissions that you don't have"
.to_string(), .to_string(),
)); ));
}
} }
if edit_member.permissions.is_some() if edit_member.permissions.is_some()
@ -822,13 +822,13 @@ pub async fn edit_team_member(
} }
} }
if let Some(payouts_split) = edit_member.payouts_split { if let Some(payouts_split) = edit_member.payouts_split
if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000) && (payouts_split < Decimal::ZERO
{ || payouts_split > Decimal::from(5000))
return Err(ApiError::InvalidInput( {
"Payouts split must be between 0 and 5000!".to_string(), return Err(ApiError::InvalidInput(
)); "Payouts split must be between 0 and 5000!".to_string(),
} ));
} }
DBTeamMember::edit_team_member( DBTeamMember::edit_team_member(
@ -883,13 +883,13 @@ pub async fn transfer_ownership(
DBTeam::get_association(id.into(), &**pool).await?; DBTeam::get_association(id.into(), &**pool).await?;
if let Some(TeamAssociationId::Project(pid)) = team_association_id { if let Some(TeamAssociationId::Project(pid)) = team_association_id {
let result = DBProject::get_id(pid, &**pool, &redis).await?; let result = DBProject::get_id(pid, &**pool, &redis).await?;
if let Some(project_item) = result { if let Some(project_item) = result
if project_item.inner.organization_id.is_some() { && project_item.inner.organization_id.is_some()
return Err(ApiError::InvalidInput( {
return Err(ApiError::InvalidInput(
"You cannot transfer ownership of a project team that is owend by an organization" "You cannot transfer ownership of a project team that is owend by an organization"
.to_string(), .to_string(),
)); ));
}
} }
} }

View File

@ -289,36 +289,33 @@ pub async fn thread_get(
.await? .await?
.1; .1;
if let Some(mut data) = thread_data { if let Some(mut data) = thread_data
if is_authorized_thread(&data, &user, &pool).await? { && is_authorized_thread(&data, &user, &pool).await?
let authors = &mut data.members; {
let authors = &mut data.members;
authors.append( authors.append(
&mut data &mut data
.messages .messages
.iter() .iter()
.filter_map(|x| { .filter_map(|x| {
if x.hide_identity && !user.role.is_mod() { if x.hide_identity && !user.role.is_mod() {
None None
} else { } else {
x.author_id x.author_id
} }
}) })
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
); );
let users: Vec<User> = database::models::DBUser::get_many_ids( let users: Vec<User> =
authors, &**pool, &redis, database::models::DBUser::get_many_ids(authors, &**pool, &redis)
) .await?
.await? .into_iter()
.into_iter() .map(From::from)
.map(From::from) .collect();
.collect();
return Ok( return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
HttpResponse::Ok().json(Thread::from(data, users, &user))
);
}
} }
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }
@ -454,33 +451,32 @@ pub async fn thread_send_message(
) )
.await?; .await?;
if let Some(project) = project { if let Some(project) = project
if project.inner.status != ProjectStatus::Processing && project.inner.status != ProjectStatus::Processing
&& user.role.is_mod() && user.role.is_mod()
{ {
let members = let members =
database::models::DBTeamMember::get_from_team_full( database::models::DBTeamMember::get_from_team_full(
project.inner.team_id, project.inner.team_id,
&**pool, &**pool,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis, &redis,
) )
.await?; .await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
} }
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis,
)
.await?;
} }
} else if let Some(report_id) = thread.report_id { } else if let Some(report_id) = thread.report_id {
let report = database::models::report_item::DBReport::get( let report = database::models::report_item::DBReport::get(

View File

@ -522,10 +522,10 @@ async fn version_create_inner(
.fetch_optional(pool) .fetch_optional(pool)
.await?; .await?;
if let Some(project_status) = project_status { if let Some(project_status) = project_status
if project_status.status == ProjectStatus::Processing.as_str() { && project_status.status == ProjectStatus::Processing.as_str()
moderation_queue.projects.insert(project_id.into()); {
} moderation_queue.projects.insert(project_id.into());
} }
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(response))
@ -871,16 +871,16 @@ pub async fn upload_file(
ref format, ref format,
ref files, ref files,
} = validation_result } = validation_result
&& dependencies.is_empty()
{ {
if dependencies.is_empty() { let hashes: Vec<Vec<u8>> = format
let hashes: Vec<Vec<u8>> = format .files
.files .iter()
.iter() .filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1)) .map(|x| x.as_bytes().to_vec())
.map(|x| x.as_bytes().to_vec()) .collect();
.collect();
let res = sqlx::query!( let res = sqlx::query!(
" "
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
INNER JOIN files f on h.file_id = f.id INNER JOIN files f on h.file_id = f.id
@ -892,45 +892,44 @@ pub async fn upload_file(
.fetch_all(&mut **transaction) .fetch_all(&mut **transaction)
.await?; .await?;
for file in &format.files { for file in &format.files {
if let Some(dep) = res.iter().find(|x| { if let Some(dep) = res.iter().find(|x| {
Some(&*x.hash) Some(&*x.hash)
== file == file
.hashes .hashes
.get(&PackFileHash::Sha1) .get(&PackFileHash::Sha1)
.map(|x| x.as_bytes()) .map(|x| x.as_bytes())
}) { }) {
dependencies.push(DependencyBuilder { dependencies.push(DependencyBuilder {
project_id: Some(models::DBProjectId(dep.project_id)), project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)), version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None, file_name: None,
dependency_type: DependencyType::Embedded.to_string(), dependency_type: DependencyType::Embedded.to_string(),
}); });
} else if let Some(first_download) = file.downloads.first() { } else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder { dependencies.push(DependencyBuilder {
project_id: None, project_id: None,
version_id: None, version_id: None,
file_name: Some( file_name: Some(
first_download first_download
.rsplit('/') .rsplit('/')
.next() .next()
.unwrap_or(first_download) .unwrap_or(first_download)
.to_string(), .to_string(),
), ),
dependency_type: DependencyType::Embedded.to_string(), dependency_type: DependencyType::Embedded.to_string(),
}); });
}
} }
}
for file in files { for file in files {
if !file.is_empty() { if !file.is_empty() {
dependencies.push(DependencyBuilder { dependencies.push(DependencyBuilder {
project_id: None, project_id: None,
version_id: None, version_id: None,
file_name: Some(file.to_string()), file_name: Some(file.to_string()),
dependency_type: DependencyType::Embedded.to_string(), dependency_type: DependencyType::Embedded.to_string(),
}); });
}
} }
} }
} }
@ -974,10 +973,10 @@ pub async fn upload_file(
)); ));
} }
if let ValidationResult::Warning(msg) = validation_result { if let ValidationResult::Warning(msg) = validation_result
if primary { && primary
return Err(CreateError::InvalidInput(msg.to_string())); {
} return Err(CreateError::InvalidInput(msg.to_string()));
} }
let url = format!("{cdn_url}/{file_path_encode}"); let url = format!("{cdn_url}/{file_path_encode}");

View File

@ -148,65 +148,55 @@ pub async fn get_update_from_hash(
&redis, &redis,
) )
.await? .await?
{ && let Some(project) = database::models::DBProject::get_id(
if let Some(project) = database::models::DBProject::get_id(
file.project_id, file.project_id,
&**pool, &**pool,
&redis, &redis,
) )
.await? .await?
{ {
let mut versions = database::models::DBVersion::get_many( let mut versions = database::models::DBVersion::get_many(
&project.versions, &project.versions,
&**pool, &**pool,
&redis, &redis,
) )
.await? .await?
.into_iter() .into_iter()
.filter(|x| { .filter(|x| {
let mut bool = true; let mut bool = true;
if let Some(version_types) = &update_data.version_types { if let Some(version_types) = &update_data.version_types {
bool &= version_types bool &= version_types
.iter() .iter()
.any(|y| y.as_str() == x.inner.version_type); .any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values
.iter()
.any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.next_back() {
if !is_visible_version(
&first.inner,
&user_option,
&pool,
&redis,
)
.await?
{
return Err(ApiError::NotFound);
}
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(first)));
} }
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) =
x.version_fields.iter().find(|y| y.field_name == *key)
{
values.iter().any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.next_back() {
if !is_visible_version(&first.inner, &user_option, &pool, &redis)
.await?
{
return Err(ApiError::NotFound);
}
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(first))
);
} }
} }
Err(ApiError::NotFound) Err(ApiError::NotFound)
@ -398,13 +388,12 @@ pub async fn update_files(
if let Some(version) = versions if let Some(version) = versions
.iter() .iter()
.find(|x| x.inner.project_id == file.project_id) .find(|x| x.inner.project_id == file.project_id)
&& let Some(hash) = file.hashes.get(&algorithm)
{ {
if let Some(hash) = file.hashes.get(&algorithm) { response.insert(
response.insert( hash.clone(),
hash.clone(), models::projects::Version::from(version.clone()),
models::projects::Version::from(version.clone()), );
);
}
} }
} }
@ -484,69 +473,59 @@ pub async fn update_individual_files(
for project in projects { for project in projects {
for file in files.iter().filter(|x| x.project_id == project.inner.id) { for file in files.iter().filter(|x| x.project_id == project.inner.id) {
if let Some(hash) = file.hashes.get(&algorithm) { if let Some(hash) = file.hashes.get(&algorithm)
if let Some(query_file) = && let Some(query_file) =
update_data.hashes.iter().find(|x| &x.hash == hash) update_data.hashes.iter().find(|x| &x.hash == hash)
{ {
let version = all_versions let version = all_versions
.iter() .iter()
.filter(|x| x.inner.project_id == file.project_id) .filter(|x| x.inner.project_id == file.project_id)
.filter(|x| { .filter(|x| {
let mut bool = true; let mut bool = true;
if let Some(version_types) = if let Some(version_types) = &query_file.version_types {
&query_file.version_types bool &= version_types
{ .iter()
bool &= version_types.iter().any(|y| { .any(|y| y.as_str() == x.inner.version_type);
y.as_str() == x.inner.version_type
});
}
if let Some(loaders) = &query_file.loaders {
bool &= x
.loaders
.iter()
.any(|y| loaders.contains(y));
}
if let Some(loader_fields) =
&query_file.loader_fields
{
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version {
if is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(
version.clone(),
),
);
} }
} if let Some(loaders) = &query_file.loaders {
bool &=
x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &query_file.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version
&& is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
} }
} }
} }

View File

@ -106,13 +106,12 @@ pub async fn version_project_get_helper(
|| x.inner.version_number == id.1 || x.inner.version_number == id.1
}); });
if let Some(version) = version { if let Some(version) = version
if is_visible_version(&version.inner, &user_option, &pool, &redis) && is_visible_version(&version.inner, &user_option, &pool, &redis)
.await? .await?
{ {
return Ok(HttpResponse::Ok() return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version))); .json(models::projects::Version::from(version)));
}
} }
} }
@ -190,12 +189,12 @@ pub async fn version_get_helper(
.map(|x| x.1) .map(|x| x.1)
.ok(); .ok();
if let Some(data) = version_data { if let Some(data) = version_data
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? { && is_visible_version(&data.inner, &user_option, &pool, &redis).await?
return Ok( {
HttpResponse::Ok().json(models::projects::Version::from(data)) return Ok(
); HttpResponse::Ok().json(models::projects::Version::from(data))
} );
} }
Err(ApiError::NotFound) Err(ApiError::NotFound)

View File

@ -15,14 +15,12 @@ pub async fn get_user_status(
return Some(friend_status); return Some(friend_status);
} }
if let Ok(mut conn) = redis.pool.get().await { if let Ok(mut conn) = redis.pool.get().await
if let Ok(mut statuses) = && let Ok(mut statuses) =
conn.sscan::<_, String>(get_field_name(user)).await conn.sscan::<_, String>(get_field_name(user)).await
{ && let Some(status_json) = statuses.next_item().await
if let Some(status_json) = statuses.next_item().await { {
return serde_json::from_str::<UserStatus>(&status_json).ok(); return serde_json::from_str::<UserStatus>(&status_json).ok();
}
}
} }
None None

View File

@ -138,12 +138,11 @@ fn process_image(
let (orig_width, orig_height) = img.dimensions(); let (orig_width, orig_height) = img.dimensions();
let aspect_ratio = orig_width as f32 / orig_height as f32; let aspect_ratio = orig_width as f32 / orig_height as f32;
if let Some(target_width) = target_width { if let Some(target_width) = target_width
if img.width() > target_width { && img.width() > target_width
let new_height = {
(target_width as f32 / aspect_ratio).round() as u32; let new_height = (target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3); img = img.resize(target_width, new_height, FilterType::Lanczos3);
}
} }
if let Some(min_aspect_ratio) = min_aspect_ratio { if let Some(min_aspect_ratio) = min_aspect_ratio {

View File

@ -133,12 +133,11 @@ pub async fn rate_limit_middleware(
.expect("Rate limiter not configured properly") .expect("Rate limiter not configured properly")
.clone(); .clone();
if let Some(key) = req.headers().get("x-ratelimit-key") { if let Some(key) = req.headers().get("x-ratelimit-key")
if key.to_str().ok() && key.to_str().ok()
== dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref() == dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref()
{ {
return Ok(next.call(req).await?.map_into_left_body()); return Ok(next.call(req).await?.map_into_left_body());
}
} }
let conn_info = req.connection_info().clone(); let conn_info = req.connection_info().clone();

View File

@ -22,46 +22,47 @@ pub fn validation_errors_to_string(
let key_option = map.keys().next(); let key_option = map.keys().next();
if let Some(field) = key_option { if let Some(field) = key_option
if let Some(error) = map.get(field) { && let Some(error) = map.get(field)
return match error { {
ValidationErrorsKind::Struct(errors) => { return match error {
validation_errors_to_string( ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(
*errors.clone(),
Some(format!("of item {field}")),
)
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(), *errors.clone(),
Some(format!("of item {field}")), Some(format!("of list {field} with index {index}")),
) ));
} }
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(),
Some(format!("of list {field} with index {index}")),
));
}
output output
} }
ValidationErrorsKind::Field(errors) => { ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() { if let Some(error) = errors.first() {
if let Some(adder) = adder { if let Some(adder) = adder {
write!( write!(
&mut output, &mut output,
"Field {field} {adder} failed validation with error: {}", "Field {field} {adder} failed validation with error: {}",
error.code error.code
).unwrap(); ).unwrap();
} else { } else {
write!( write!(
&mut output, &mut output,
"Field {field} failed validation with error: {}", "Field {field} failed validation with error: {}",
error.code error.code
).unwrap(); )
} .unwrap();
} }
output
} }
};
} output
}
};
} }
String::new() String::new()

View File

@ -238,17 +238,17 @@ pub async fn send_slack_webhook(
} }
}); });
if let Some(icon_url) = metadata.project_icon_url { if let Some(icon_url) = metadata.project_icon_url
if let Some(project_block) = project_block.as_object_mut() { && let Some(project_block) = project_block.as_object_mut()
project_block.insert( {
"accessory".to_string(), project_block.insert(
serde_json::json!({ "accessory".to_string(),
"type": "image", serde_json::json!({
"image_url": icon_url, "type": "image",
"alt_text": metadata.project_title "image_url": icon_url,
}), "alt_text": metadata.project_title
); }),
} );
} }
blocks.push(project_block); blocks.push(project_block);

View File

@ -222,10 +222,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(failure_json_check) = &self.failure_json_check { && let Some(failure_json_check) = &self.failure_json_check
failure_json_check(&test::read_body_json(resp).await); {
} failure_json_check(&test::read_body_json(resp).await);
} }
// Failure test- logged in on a non-team user // Failure test- logged in on a non-team user
@ -246,10 +246,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(failure_json_check) = &self.failure_json_check { && let Some(failure_json_check) = &self.failure_json_check
failure_json_check(&test::read_body_json(resp).await); {
} failure_json_check(&test::read_body_json(resp).await);
} }
// Failure test- logged in with EVERY non-relevant permission // Failure test- logged in with EVERY non-relevant permission
@ -270,10 +270,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(failure_json_check) = &self.failure_json_check { && let Some(failure_json_check) = &self.failure_json_check
failure_json_check(&test::read_body_json(resp).await); {
} failure_json_check(&test::read_body_json(resp).await);
} }
// Patch user's permissions to success permissions // Patch user's permissions to success permissions
@ -300,10 +300,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(success_json_check) = &self.success_json_check { && let Some(success_json_check) = &self.success_json_check
success_json_check(&test::read_body_json(resp).await); {
} success_json_check(&test::read_body_json(resp).await);
} }
// If the remove_user flag is set, remove the user from the project // If the remove_user flag is set, remove the user from the project

View File

@ -1,2 +1,2 @@
allow-dbg-in-tests = true allow-dbg-in-tests = true
msrv = "1.88.0" msrv = "1.89.0"

View File

@ -50,10 +50,10 @@ pub async fn parse_command(
// We assume anything else is a filepath to an .mrpack file // We assume anything else is a filepath to an .mrpack file
let path = PathBuf::from(command_string); let path = PathBuf::from(command_string);
let path = io::canonicalize(path)?; let path = io::canonicalize(path)?;
if let Some(ext) = path.extension() { if let Some(ext) = path.extension()
if ext == "mrpack" { && ext == "mrpack"
return Ok(CommandPayload::RunMRPack { path }); {
} return Ok(CommandPayload::RunMRPack { path });
} }
emit_warning(&format!( emit_warning(&format!(
"Invalid command, unrecognized filetype: {}", "Invalid command, unrecognized filetype: {}",

View File

@ -106,13 +106,13 @@ pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
})?; })?;
// removes the old installation of java // removes the old installation of java
if let Some(file) = archive.file_names().next() { if let Some(file) = archive.file_names().next()
if let Some(dir) = file.split('/').next() { && let Some(dir) = file.split('/').next()
let path = path.join(dir); {
let path = path.join(dir);
if path.exists() { if path.exists() {
io::remove_dir_all(path).await?; io::remove_dir_all(path).await?;
}
} }
} }

View File

@ -54,11 +54,11 @@ pub async fn remove_user(uuid: uuid::Uuid) -> crate::Result<()> {
if let Some((uuid, user)) = users.remove(&uuid) { if let Some((uuid, user)) = users.remove(&uuid) {
Credentials::remove(uuid, &state.pool).await?; Credentials::remove(uuid, &state.pool).await?;
if user.active { if user.active
if let Some((_, mut user)) = users.into_iter().next() { && let Some((_, mut user)) = users.into_iter().next()
user.active = true; {
user.upsert(&state.pool).await?; user.active = true;
} user.upsert(&state.pool).await?;
} }
} }

View File

@ -221,14 +221,14 @@ async fn import_atlauncher_unmanaged(
.unwrap_or_else(|| backup_name.to_string()); .unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling; prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id { if let Some(ref project_id) = description.project_id
if let Some(ref version_id) = description.version_id { && let Some(ref version_id) = description.version_id
prof.linked_data = Some(LinkedData { {
project_id: project_id.clone(), prof.linked_data = Some(LinkedData {
version_id: version_id.clone(), project_id: project_id.clone(),
locked: true, version_id: version_id.clone(),
}) locked: true,
} })
} }
prof.icon_path = description prof.icon_path = description

View File

@ -383,18 +383,18 @@ pub async fn set_profile_information(
.unwrap_or_else(|| backup_name.to_string()); .unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling; prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id { if let Some(ref project_id) = description.project_id
if let Some(ref version_id) = description.version_id { && let Some(ref version_id) = description.version_id
prof.linked_data = Some(LinkedData { {
project_id: project_id.clone(), prof.linked_data = Some(LinkedData {
version_id: version_id.clone(), project_id: project_id.clone(),
locked: if !ignore_lock { version_id: version_id.clone(),
true locked: if !ignore_lock {
} else { true
prof.linked_data.as_ref().is_none_or(|x| x.locked) } else {
}, prof.linked_data.as_ref().is_none_or(|x| x.locked)
}) },
} })
} }
prof.icon_path = description prof.icon_path = description

View File

@ -149,13 +149,12 @@ pub async fn install_zipped_mrpack_files(
let profile_path = profile_path.clone(); let profile_path = profile_path.clone();
async move { async move {
//TODO: Future update: prompt user for optional files in a modpack //TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env { if let Some(env) = project.env
if env && env
.get(&EnvType::Client) .get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported) .is_some_and(|x| x == &SideType::Unsupported)
{ {
return Ok(()); return Ok(());
}
} }
let file = fetch_mirrors( let file = fetch_mirrors(
@ -375,12 +374,12 @@ pub async fn remove_all_related_files(
) )
.await? .await?
{ {
if let Some(metadata) = &project.metadata { if let Some(metadata) = &project.metadata
if to_remove.contains(&metadata.project_id) { && to_remove.contains(&metadata.project_id)
let path = profile_full_path.join(file_path); {
if path.exists() { let path = profile_full_path.join(file_path);
io::remove_file(&path).await?; if path.exists() {
} io::remove_file(&path).await?;
} }
} }
} }

View File

@ -337,28 +337,26 @@ pub async fn update_project(
) )
.await? .await?
.remove(project_path) .remove(project_path)
&& let Some(update_version) = &file.update_version_id
{ {
if let Some(update_version) = &file.update_version_id { let path = Profile::add_project_version(
let path = Profile::add_project_version( profile_path,
profile_path, update_version,
update_version, &state.pool,
&state.pool, &state.fetch_semaphore,
&state.fetch_semaphore, &state.io_semaphore,
&state.io_semaphore, )
) .await?;
.await?;
if path != project_path { if path != project_path {
Profile::remove_project(profile_path, project_path).await?; Profile::remove_project(profile_path, project_path).await?;
}
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited)
.await?;
}
return Ok(path);
} }
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited).await?;
}
return Ok(path);
} }
Err(crate::ErrorKind::InputError( Err(crate::ErrorKind::InputError(
@ -479,10 +477,10 @@ pub async fn export_mrpack(
let included_export_candidates = included_export_candidates let included_export_candidates = included_export_candidates
.into_iter() .into_iter()
.filter(|x| { .filter(|x| {
if let Some(f) = PathBuf::from(x).file_name() { if let Some(f) = PathBuf::from(x).file_name()
if f.to_string_lossy().starts_with(".DS_Store") { && f.to_string_lossy().starts_with(".DS_Store")
return false; {
} return false;
} }
true true
}) })

View File

@ -191,11 +191,6 @@ pub struct LoadingPayload {
pub message: String, pub message: String,
} }
#[derive(Serialize, Clone)]
pub struct OfflinePayload {
pub offline: bool,
}
#[derive(Serialize, Clone)] #[derive(Serialize, Clone)]
pub struct WarningPayload { pub struct WarningPayload {
pub message: String, pub message: String,

View File

@ -32,15 +32,15 @@ pub fn get_class_paths(
let mut cps = libraries let mut cps = libraries
.iter() .iter()
.filter_map(|library| { .filter_map(|library| {
if let Some(rules) = &library.rules { if let Some(rules) = &library.rules
if !parse_rules( && !parse_rules(
rules, rules,
java_arch, java_arch,
&QuickPlayType::None, &QuickPlayType::None,
minecraft_updated, minecraft_updated,
) { )
return None; {
} return None;
} }
if !library.include_in_classpath { if !library.include_in_classpath {
@ -504,10 +504,10 @@ pub async fn get_processor_main_class(
let mut line = line.map_err(IOError::from)?; let mut line = line.map_err(IOError::from)?;
line.retain(|c| !c.is_whitespace()); line.retain(|c| !c.is_whitespace());
if line.starts_with("Main-Class:") { if line.starts_with("Main-Class:")
if let Some(class) = line.split(':').nth(1) { && let Some(class) = line.split(':').nth(1)
return Ok(Some(class.to_string())); {
} return Ok(Some(class.to_string()));
} }
} }

View File

@ -290,12 +290,11 @@ pub async fn download_libraries(
loading_try_for_each_concurrent( loading_try_for_each_concurrent(
stream::iter(libraries.iter()) stream::iter(libraries.iter())
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move { .map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
if let Some(rules) = &library.rules { if let Some(rules) = &library.rules
if !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) { && !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
tracing::trace!("Skipped library {}", &library.name); tracing::trace!("Skipped library {}", &library.name);
return Ok(()); return Ok(());
} }
}
if !library.downloadable { if !library.downloadable {
tracing::trace!("Skipped non-downloadable library {}", &library.name); tracing::trace!("Skipped non-downloadable library {}", &library.name);
@ -311,15 +310,14 @@ pub async fn download_libraries(
return Ok(()); return Ok(());
} }
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads { if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads
if !artifact.url.is_empty(){ && !artifact.url.is_empty(){
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool) let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool)
.await?; .await?;
write(&path, &bytes, &st.io_semaphore).await?; write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path); tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
return Ok::<_, crate::Error>(()); return Ok::<_, crate::Error>(());
} }
}
let url = [ let url = [
library library

View File

@ -341,10 +341,10 @@ pub async fn install_minecraft(
// Forge processors (90-100) // Forge processors (90-100)
for (index, processor) in processors.iter().enumerate() { for (index, processor) in processors.iter().enumerate() {
if let Some(sides) = &processor.sides { if let Some(sides) = &processor.sides
if !sides.contains(&String::from("client")) { && !sides.contains(&String::from("client"))
continue; {
} continue;
} }
let cp = { let cp = {

View File

@ -385,10 +385,10 @@ impl DirectoryInfo {
return Err(e); return Err(e);
} }
} else { } else {
if let Some(disk_usage) = get_disk_usage(&move_dir)? { if let Some(disk_usage) = get_disk_usage(&move_dir)?
if total_size > disk_usage { && total_size > disk_usage
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into()); {
} return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
} }
let loader_bar_id = Arc::new(&loader_bar_id); let loader_bar_id = Arc::new(&loader_bar_id);

View File

@ -180,27 +180,24 @@ impl FriendsSocket {
ServerToClientMessage::FriendSocketStoppedListening { .. } => {}, // TODO ServerToClientMessage::FriendSocketStoppedListening { .. } => {}, // TODO
ServerToClientMessage::SocketConnected { to_socket, new_socket } => { ServerToClientMessage::SocketConnected { to_socket, new_socket } => {
if let Some(connected_to) = sockets.get(&to_socket) { if let Some(connected_to) = sockets.get(&to_socket)
if let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone() { && let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone()
if let Ok(new_stream) = TcpStream::connect(local_addr).await { && let Ok(new_stream) = TcpStream::connect(local_addr).await {
let (read, write) = new_stream.into_split(); let (read, write) = new_stream.into_split();
sockets.insert(new_socket, Arc::new(InternalTunnelSocket::Connected(Mutex::new(write)))); sockets.insert(new_socket, Arc::new(InternalTunnelSocket::Connected(Mutex::new(write))));
Self::socket_read_loop(write_handle.clone(), read, new_socket); Self::socket_read_loop(write_handle.clone(), read, new_socket);
continue; continue;
} }
}
}
let _ = Self::send_message(&write_handle, ClientToServerMessage::SocketClose { socket: new_socket }).await; let _ = Self::send_message(&write_handle, ClientToServerMessage::SocketClose { socket: new_socket }).await;
}, },
ServerToClientMessage::SocketClosed { socket } => { ServerToClientMessage::SocketClosed { socket } => {
sockets.remove_if(&socket, |_, x| matches!(*x.clone(), InternalTunnelSocket::Connected(_))); sockets.remove_if(&socket, |_, x| matches!(*x.clone(), InternalTunnelSocket::Connected(_)));
}, },
ServerToClientMessage::SocketData { socket, data } => { ServerToClientMessage::SocketData { socket, data } => {
if let Some(mut socket) = sockets.get_mut(&socket) { if let Some(mut socket) = sockets.get_mut(&socket)
if let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() { && let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
let _ = stream.lock().await.write_all(&data).await; let _ = stream.lock().await.write_all(&data).await;
} }
}
}, },
} }
} }

View File

@ -100,8 +100,8 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
let profile_path_str = profile_path_str.clone(); let profile_path_str = profile_path_str.clone();
let world = world.clone(); let world = world.clone();
tokio::spawn(async move { tokio::spawn(async move {
if let Ok(state) = State::get().await { if let Ok(state) = State::get().await
if let Err(e) = attached_world_data::AttachedWorldData::remove_for_world( && let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
&profile_path_str, &profile_path_str,
WorldType::Singleplayer, WorldType::Singleplayer,
&world, &world,
@ -109,7 +109,6 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
).await { ).await {
tracing::warn!("Failed to remove AttachedWorldData for '{world}': {e}") tracing::warn!("Failed to remove AttachedWorldData for '{world}': {e}")
} }
}
}); });
} }
Some(ProfilePayloadType::WorldUpdated { world }) Some(ProfilePayloadType::WorldUpdated { world })
@ -150,14 +149,14 @@ pub(crate) async fn watch_profiles_init(
) { ) {
if let Ok(profiles_dir) = std::fs::read_dir(dirs.profiles_dir()) { if let Ok(profiles_dir) = std::fs::read_dir(dirs.profiles_dir()) {
for profile_dir in profiles_dir { for profile_dir in profiles_dir {
if let Ok(file_name) = profile_dir.map(|x| x.file_name()) { if let Ok(file_name) = profile_dir.map(|x| x.file_name())
if let Some(file_name) = file_name.to_str() { && let Some(file_name) = file_name.to_str()
if file_name.starts_with(".DS_Store") { {
continue; if file_name.starts_with(".DS_Store") {
}; continue;
};
watch_profile(file_name, watcher, dirs).await; watch_profile(file_name, watcher, dirs).await;
}
} }
} }
} }

View File

@ -76,10 +76,9 @@ where
.loaded_config_dir .loaded_config_dir
.clone() .clone()
.and_then(|x| x.to_str().map(|x| x.to_string())) .and_then(|x| x.to_str().map(|x| x.to_string()))
&& path != old_launcher_root_str
{ {
if path != old_launcher_root_str { settings.custom_dir = Some(path);
settings.custom_dir = Some(path);
}
} }
settings.prev_custom_dir = Some(old_launcher_root_str.clone()); settings.prev_custom_dir = Some(old_launcher_root_str.clone());
@ -136,31 +135,27 @@ where
.await?; .await?;
} }
if let Some(device_token) = minecraft_auth.token { if let Some(device_token) = minecraft_auth.token
if let Ok(private_key) = && let Ok(private_key) =
SigningKey::from_pkcs8_pem(&device_token.private_key) SigningKey::from_pkcs8_pem(&device_token.private_key)
{ && let Ok(uuid) = Uuid::parse_str(&device_token.id)
if let Ok(uuid) = Uuid::parse_str(&device_token.id) { {
DeviceTokenPair { DeviceTokenPair {
token: DeviceToken { token: DeviceToken {
issue_instant: device_token.token.issue_instant, issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after, not_after: device_token.token.not_after,
token: device_token.token.token, token: device_token.token.token,
display_claims: device_token display_claims: device_token.token.display_claims,
.token },
.display_claims, key: DeviceTokenKey {
}, id: uuid,
key: DeviceTokenKey { key: private_key,
id: uuid, x: device_token.x,
key: private_key, y: device_token.y,
x: device_token.x, },
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
} }
.upsert(exec)
.await?;
} }
} }
@ -207,100 +202,93 @@ where
update_version, update_version,
.. ..
} = project.metadata } = project.metadata
{ && let Some(file) = version
if let Some(file) = version
.files .files
.iter() .iter()
.find(|x| x.hashes.get("sha512") == Some(&sha512)) .find(|x| x.hashes.get("sha512") == Some(&sha512))
{ && let Some(sha1) = file.hashes.get("sha1")
if let Some(sha1) = file.hashes.get("sha1") { {
if let Ok(metadata) = full_path.metadata() { if let Ok(metadata) = full_path.metadata() {
let file_name = format!( let file_name = format!(
"{}/{}", "{}/{}",
profile.path, profile.path,
path.replace('\\', "/") path.replace('\\', "/")
.replace(".disabled", "") .replace(".disabled", "")
); );
cached_entries.push(CacheValue::FileHash( cached_entries.push(CacheValue::FileHash(
CachedFileHash { CachedFileHash {
path: file_name, path: file_name,
size: metadata.len(), size: metadata.len(),
hash: sha1.clone(), hash: sha1.clone(),
project_type: ProjectType::get_from_parent_folder(&full_path), project_type:
}, ProjectType::get_from_parent_folder(
)); &full_path,
}
cached_entries.push(CacheValue::File(
CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
},
));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(
CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader
.as_str()
.to_string(),
],
update_version_id:
update_version.id.clone(),
},
), ),
); },
));
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries.push(CacheValue::User(
user.clone(),
));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries.push(CacheValue::Version(
(*version).into(),
));
}
} }
cached_entries.push(CacheValue::File(CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
}));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader.as_str().to_string(),
],
update_version_id: update_version
.id
.clone(),
},
));
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries
.push(CacheValue::User(user.clone()));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries
.push(CacheValue::Version((*version).into()));
} }
} }
@ -332,16 +320,15 @@ where
.map(|x| x.id), .map(|x| x.id),
groups: profile.metadata.groups, groups: profile.metadata.groups,
linked_data: profile.metadata.linked_data.and_then(|x| { linked_data: profile.metadata.linked_data.and_then(|x| {
if let Some(project_id) = x.project_id { if let Some(project_id) = x.project_id
if let Some(version_id) = x.version_id { && let Some(version_id) = x.version_id
if let Some(locked) = x.locked { && let Some(locked) = x.locked
return Some(LinkedData { {
project_id, return Some(LinkedData {
version_id, project_id,
locked, version_id,
}); locked,
} });
}
} }
None None

View File

@ -393,10 +393,9 @@ impl Credentials {
.. ..
}, },
) = *err.raw ) = *err.raw
&& (source.is_connect() || source.is_timeout())
{ {
if source.is_connect() || source.is_timeout() { return Ok(Some(creds));
return Ok(Some(creds));
}
} }
Err(err) Err(err)
@ -640,36 +639,31 @@ impl DeviceTokenPair {
.fetch_optional(exec) .fetch_optional(exec)
.await?; .await?;
if let Some(x) = res { if let Some(x) = res
if let Ok(uuid) = Uuid::parse_str(&x.uuid) { && let Ok(uuid) = Uuid::parse_str(&x.uuid)
if let Ok(private_key) = && let Ok(private_key) = SigningKey::from_pkcs8_pem(&x.private_key)
SigningKey::from_pkcs8_pem(&x.private_key) {
{ return Ok(Some(Self {
return Ok(Some(Self { token: DeviceToken {
token: DeviceToken { issue_instant: Utc
issue_instant: Utc .timestamp_opt(x.issue_instant, 0)
.timestamp_opt(x.issue_instant, 0) .single()
.single() .unwrap_or_else(Utc::now),
.unwrap_or_else(Utc::now), not_after: Utc
not_after: Utc .timestamp_opt(x.not_after, 0)
.timestamp_opt(x.not_after, 0) .single()
.single() .unwrap_or_else(Utc::now),
.unwrap_or_else(Utc::now), token: x.token,
token: x.token, display_claims: serde_json::from_value(x.display_claims)
display_claims: serde_json::from_value( .unwrap_or_default(),
x.display_claims, },
) key: DeviceTokenKey {
.unwrap_or_default(), id: uuid,
}, key: private_key,
key: DeviceTokenKey { x: x.x,
id: uuid, y: x.y,
key: private_key, },
x: x.x, }));
y: x.y,
},
}));
}
}
} }
Ok(None) Ok(None)

View File

@ -360,18 +360,17 @@ impl Process {
} }
// Write the throwable if present // Write the throwable if present
if !current_content.is_empty() { if !current_content.is_empty()
if let Err(e) = && let Err(e) =
Process::append_to_log_file( Process::append_to_log_file(
&log_path, &log_path,
&current_content, &current_content,
) )
{ {
tracing::error!( tracing::error!(
"Failed to write throwable to log file: {}", "Failed to write throwable to log file: {}",
e e
); );
}
} }
} }
} }
@ -429,15 +428,13 @@ impl Process {
if let Some(timestamp) = if let Some(timestamp) =
current_event.timestamp.as_deref() current_event.timestamp.as_deref()
{ && let Err(e) = Self::maybe_handle_server_join_logging(
if let Err(e) = Self::maybe_handle_server_join_logging(
profile_path, profile_path,
timestamp, timestamp,
message message
).await { ).await {
tracing::error!("Failed to handle server join logging: {e}"); tracing::error!("Failed to handle server join logging: {e}");
} }
}
} }
} }
_ => {} _ => {}
@ -451,29 +448,26 @@ impl Process {
} else if !in_event } else if !in_event
&& !e.inplace_trim_end() && !e.inplace_trim_end()
&& !e.inplace_trim_start() && !e.inplace_trim_start()
&& let Ok(text) = e.unescape()
&& let Err(e) = Process::append_to_log_file(
&log_path,
&format!("{text}\n"),
)
{ {
if let Ok(text) = e.unescape() { tracing::error!(
if let Err(e) = Process::append_to_log_file( "Failed to write to log file: {}",
&log_path, e
&format!("{text}\n"), );
) {
tracing::error!(
"Failed to write to log file: {}",
e
);
}
}
} }
} }
Ok(Event::CData(e)) => { Ok(Event::CData(e)) => {
if in_message || in_throwable { if (in_message || in_throwable)
if let Ok(text) = e && let Ok(text) = e
.escape() .escape()
.map_err(|x| x.into()) .map_err(|x| x.into())
.and_then(|x| x.unescape()) .and_then(|x| x.unescape())
{ {
current_content.push_str(&text); current_content.push_str(&text);
}
} }
} }
_ => (), _ => (),
@ -720,16 +714,13 @@ impl Process {
let logs_folder = state.directories.profile_logs_dir(&profile_path); let logs_folder = state.directories.profile_logs_dir(&profile_path);
let log_path = logs_folder.join(LAUNCHER_LOG_PATH); let log_path = logs_folder.join(LAUNCHER_LOG_PATH);
if log_path.exists() { if log_path.exists()
if let Err(e) = Process::append_to_log_file( && let Err(e) = Process::append_to_log_file(
&log_path, &log_path,
&format!("\n# Process exited with status: {mc_exit_status}\n"), &format!("\n# Process exited with status: {mc_exit_status}\n"),
) { )
tracing::warn!( {
"Failed to write exit status to log file: {}", tracing::warn!("Failed to write exit status to log file: {}", e);
e
);
}
} }
let _ = state.discord_rpc.clear_to_default(true).await; let _ = state.discord_rpc.clear_to_default(true).await;

View File

@ -629,21 +629,20 @@ impl Profile {
{ {
let subdirectory = let subdirectory =
subdirectory.map_err(io::IOError::from)?.path(); subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() { if subdirectory.is_file()
if let Some(file_name) = subdirectory && let Some(file_name) = subdirectory
.file_name() .file_name()
.and_then(|x| x.to_str()) .and_then(|x| x.to_str())
{ {
let file_size = subdirectory let file_size = subdirectory
.metadata() .metadata()
.map_err(io::IOError::from)? .map_err(io::IOError::from)?
.len(); .len();
keys.push(format!( keys.push(format!(
"{file_size}-{}/{folder}/{file_name}", "{file_size}-{}/{folder}/{file_name}",
profile.path profile.path
)); ));
}
} }
} }
} }
@ -901,30 +900,29 @@ impl Profile {
{ {
let subdirectory = let subdirectory =
subdirectory.map_err(io::IOError::from)?.path(); subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() { if subdirectory.is_file()
if let Some(file_name) = && let Some(file_name) =
subdirectory.file_name().and_then(|x| x.to_str()) subdirectory.file_name().and_then(|x| x.to_str())
{ {
let file_size = subdirectory let file_size = subdirectory
.metadata() .metadata()
.map_err(io::IOError::from)? .map_err(io::IOError::from)?
.len(); .len();
keys.push(InitialScanFile { keys.push(InitialScanFile {
path: format!( path: format!(
"{}/{folder}/{}", "{}/{folder}/{}",
self.path, self.path,
file_name.trim_end_matches(".disabled") file_name.trim_end_matches(".disabled")
), ),
file_name: file_name.to_string(), file_name: file_name.to_string(),
project_type, project_type,
size: file_size, size: file_size,
cache_key: format!( cache_key: format!(
"{file_size}-{}/{folder}/{file_name}", "{file_size}-{}/{folder}/{file_name}",
self.path self.path
), ),
}); });
}
} }
} }
} }

View File

@ -191,22 +191,21 @@ async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
let mut jre_paths = HashSet::new(); let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir(); let base_path = state.directories.java_versions_dir();
if base_path.is_dir() { if base_path.is_dir()
if let Ok(dir) = std::fs::read_dir(base_path) { && let Ok(dir) = std::fs::read_dir(base_path)
for entry in dir.flatten() { {
let file_path = entry.path().join("bin"); for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if let Ok(contents) = if let Ok(contents) = std::fs::read_to_string(file_path.clone())
std::fs::read_to_string(file_path.clone()) {
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{ {
let entry = entry.path().join(contents); let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(entry); jre_paths.insert(file_path);
} else {
#[cfg(not(target_os = "macos"))]
{
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
} }
} }
} }
@ -300,20 +299,20 @@ pub async fn check_java_at_filepath(path: &Path) -> crate::Result<JavaVersion> {
} }
// Extract version info from it // Extract version info from it
if let Some(arch) = java_arch { if let Some(arch) = java_arch
if let Some(version) = java_version { && let Some(version) = java_version
if let Ok(version) = extract_java_version(version) { {
let path = java.to_string_lossy().to_string(); if let Ok(version) = extract_java_version(version) {
return Ok(JavaVersion { let path = java.to_string_lossy().to_string();
parsed_version: version, return Ok(JavaVersion {
path, parsed_version: version,
version: version.to_string(), path,
architecture: arch.to_string(), version: version.to_string(),
}); architecture: arch.to_string(),
} });
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
} }
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
} }
Err(JREError::FailedJavaCheck(java).into()) Err(JREError::FailedJavaCheck(java).into())

View File

@ -33,12 +33,11 @@ pub fn is_feature_supported_in(
if part_version == part_first_release { if part_version == part_first_release {
continue; continue;
} }
if let Ok(part_version) = part_version.parse::<u32>() { if let Ok(part_version) = part_version.parse::<u32>()
if let Ok(part_first_release) = part_first_release.parse::<u32>() { && let Ok(part_first_release) = part_first_release.parse::<u32>()
if part_version > part_first_release { && part_version > part_first_release
return true; {
} return true;
}
} }
} }
false false

View File

@ -1,2 +1,2 @@
[toolchain] [toolchain]
channel = "1.88.0" channel = "1.89.0"