More staging fixes (#768)
* Fixes issues * staging fixes * passes tests * fixes. fmt/clippy * drops datapack/plugin extras * fixed failing test --------- Co-authored-by: Geometrically <18202329+Geometrically@users.noreply.github.com>
This commit is contained in:
parent
0efbbed5e2
commit
fd18185ef0
56
.sqlx/query-846b66683e6abd40acd158195d8836a02ff5dc408c9fc233e8b5ad3b48125dc4.json
generated
Normal file
56
.sqlx/query-846b66683e6abd40acd158195d8836a02ff5dc408c9fc233e8b5ad3b48125dc4.json
generated
Normal file
@ -0,0 +1,56 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type\n FROM loader_fields lf\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "field",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "field_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "optional",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "min_val",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "max_val",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "enum_type",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "846b66683e6abd40acd158195d8836a02ff5dc408c9fc233e8b5ad3b48125dc4"
|
||||
}
|
||||
31
migrations/20231125080100_drops_mods_dp_plugins.sql
Normal file
31
migrations/20231125080100_drops_mods_dp_plugins.sql
Normal file
@ -0,0 +1,31 @@
|
||||
-- For every loader that has a loaders_project_types entry that connects it to the project_types 'plugin',
|
||||
-- remove all non plugin project_types entries for that loader.
|
||||
-- This is to ensure that the plugin project_types is the only one that is used for the plugin loaders
|
||||
|
||||
--plugin
|
||||
DELETE FROM loaders_project_types
|
||||
WHERE joining_loader_id IN (
|
||||
SELECT DISTINCT l.id
|
||||
FROM loaders l
|
||||
LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||
LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
WHERE pt.name = 'plugin'
|
||||
)
|
||||
AND joining_project_type_id NOT IN (
|
||||
SELECT id FROM project_types
|
||||
WHERE name = 'plugin'
|
||||
);
|
||||
|
||||
--datapack
|
||||
DELETE FROM loaders_project_types
|
||||
WHERE joining_loader_id IN (
|
||||
SELECT DISTINCT l.id
|
||||
FROM loaders l
|
||||
LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||
LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
WHERE pt.name = 'datapack'
|
||||
)
|
||||
AND joining_project_type_id NOT IN (
|
||||
SELECT id FROM project_types
|
||||
WHERE name = 'datapack'
|
||||
);
|
||||
@ -13,6 +13,7 @@ const GAMES_LIST_NAMESPACE: &str = "games";
|
||||
const LOADER_ID: &str = "loader_id";
|
||||
const LOADERS_LIST_NAMESPACE: &str = "loaders";
|
||||
const LOADER_FIELDS_NAMESPACE: &str = "loader_fields";
|
||||
const LOADER_FIELDS_NAMESPACE_ALL: &str = "loader_fields_all";
|
||||
const LOADER_FIELD_ENUMS_ID_NAMESPACE: &str = "loader_field_enums";
|
||||
const LOADER_FIELD_ENUM_VALUES_NAMESPACE: &str = "loader_field_enum_values";
|
||||
|
||||
@ -396,8 +397,57 @@ impl LoaderField {
|
||||
.collect();
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
// Gets all fields for a given loader(s)
|
||||
// This is for tags, which need all fields for all loaders
|
||||
// We want to return them even in testing situations where we dont have loaders or loader_fields_loaders set up
|
||||
pub async fn get_fields_all<'a, E>(
|
||||
exec: E,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<LoaderField>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let mut redis = redis.connect().await?;
|
||||
|
||||
let cached_fields: Option<Vec<LoaderField>> = redis
|
||||
.get(LOADER_FIELDS_NAMESPACE_ALL, "")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<LoaderField>>(&x).ok());
|
||||
|
||||
if let Some(cached_fields) = cached_fields {
|
||||
return Ok(cached_fields);
|
||||
}
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type
|
||||
FROM loader_fields lf
|
||||
",
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().and_then(|r| {
|
||||
Some(LoaderField {
|
||||
id: LoaderFieldId(r.id),
|
||||
field_type: LoaderFieldType::build(&r.field_type, r.enum_type)?,
|
||||
field: r.field,
|
||||
optional: r.optional,
|
||||
min_val: r.min_val,
|
||||
max_val: r.max_val,
|
||||
})
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<LoaderField>>()
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set_serialized_to_json(LOADER_FIELDS_NAMESPACE_ALL, "", &result, None)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
impl LoaderFieldEnum {
|
||||
pub async fn get<'a, E>(
|
||||
enum_name: &str, // Note: NOT loader field name
|
||||
|
||||
@ -1,2 +1,3 @@
|
||||
// Legacy models from V2, where its useful to keep the struct for rerouting/conversion
|
||||
pub mod projects;
|
||||
pub mod search;
|
||||
|
||||
@ -77,7 +77,11 @@ impl LegacyProject {
|
||||
|
||||
// V2 versions only have one project type- v3 versions can rarely have multiple.
|
||||
// We'll just use the first one.
|
||||
let mut project_type = data.project_types.first().cloned().unwrap_or_default();
|
||||
let mut project_type = data
|
||||
.project_types
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or("unknown".to_string());
|
||||
let mut loaders = data.loaders;
|
||||
|
||||
if let Some(versions_item) = versions_item {
|
||||
|
||||
124
src/models/v2/search.rs
Normal file
124
src/models/v2/search.rs
Normal file
@ -0,0 +1,124 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::search::ResultSearchProject;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct LegacySearchResults {
|
||||
pub hits: Vec<LegacyResultSearchProject>,
|
||||
pub offset: usize,
|
||||
pub limit: usize,
|
||||
pub total_hits: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct LegacyResultSearchProject {
|
||||
pub project_id: String,
|
||||
pub project_type: String,
|
||||
pub slug: Option<String>,
|
||||
pub author: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub categories: Vec<String>,
|
||||
pub display_categories: Vec<String>,
|
||||
pub versions: Vec<String>,
|
||||
pub downloads: i32,
|
||||
pub follows: i32,
|
||||
pub icon_url: String,
|
||||
/// RFC 3339 formatted creation date of the project
|
||||
pub date_created: String,
|
||||
/// RFC 3339 formatted modification date of the project
|
||||
pub date_modified: String,
|
||||
pub latest_version: String,
|
||||
pub license: String,
|
||||
pub client_side: String,
|
||||
pub server_side: String,
|
||||
pub gallery: Vec<String>,
|
||||
pub featured_gallery: Option<String>,
|
||||
pub color: Option<u32>,
|
||||
}
|
||||
|
||||
// TODO: In other PR, when these are merged, make sure the v2 search testing functions use these
|
||||
impl LegacyResultSearchProject {
|
||||
pub fn from(result_search_project: ResultSearchProject) -> Self {
|
||||
let mut categories = result_search_project.categories;
|
||||
if categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project.loader_fields.get("mrpack_loaders")
|
||||
{
|
||||
categories.extend(mrpack_loaders.clone());
|
||||
categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
}
|
||||
let mut display_categories = result_search_project.display_categories;
|
||||
if display_categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project.loader_fields.get("mrpack_loaders")
|
||||
{
|
||||
display_categories.extend(mrpack_loaders.clone());
|
||||
display_categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
}
|
||||
|
||||
// Sort then remove duplicates
|
||||
categories.sort();
|
||||
categories.dedup();
|
||||
display_categories.sort();
|
||||
display_categories.dedup();
|
||||
|
||||
Self {
|
||||
project_type: result_search_project
|
||||
.project_types
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
client_side: result_search_project
|
||||
.loader_fields
|
||||
.get("client_side")
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.join(","),
|
||||
server_side: result_search_project
|
||||
.loader_fields
|
||||
.get("server_side")
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.join(","),
|
||||
versions: result_search_project
|
||||
.loader_fields
|
||||
.get("game_versions")
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
latest_version: result_search_project.version_id,
|
||||
categories,
|
||||
|
||||
project_id: result_search_project.project_id,
|
||||
slug: result_search_project.slug,
|
||||
author: result_search_project.author,
|
||||
title: result_search_project.title,
|
||||
description: result_search_project.description,
|
||||
display_categories,
|
||||
downloads: result_search_project.downloads,
|
||||
follows: result_search_project.follows,
|
||||
icon_url: result_search_project.icon_url,
|
||||
license: result_search_project.license,
|
||||
date_created: result_search_project.date_created,
|
||||
date_modified: result_search_project.date_modified,
|
||||
gallery: result_search_project.gallery,
|
||||
featured_gallery: result_search_project.featured_gallery,
|
||||
color: result_search_project.color,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LegacySearchResults {
|
||||
pub fn from(search_results: crate::search::SearchResults) -> Self {
|
||||
Self {
|
||||
hits: search_results
|
||||
.hits
|
||||
.into_iter()
|
||||
.map(LegacyResultSearchProject::from)
|
||||
.collect(),
|
||||
offset: search_results.offset,
|
||||
limit: search_results.limit,
|
||||
total_hits: search_results.total_hits,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -6,6 +6,7 @@ use crate::models::projects::{
|
||||
DonationLink, MonetizationStatus, Project, ProjectStatus, SearchRequest, SideType,
|
||||
};
|
||||
use crate::models::v2::projects::LegacyProject;
|
||||
use crate::models::v2::search::LegacySearchResults;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::projects::ProjectIds;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
@ -95,7 +96,7 @@ pub async fn project_search(
|
||||
|
||||
let results = search_for_project(&info, &config).await?;
|
||||
|
||||
// TODO: convert to v2 format-we may need a new v2 struct for this for 'original' format
|
||||
let results = LegacySearchResults::from(results);
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
@ -7,7 +7,7 @@ use crate::database::models::loader_fields::{
|
||||
};
|
||||
use crate::database::redis::RedisPool;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use itertools::Itertools;
|
||||
|
||||
use serde_json::Value;
|
||||
use sqlx::PgPool;
|
||||
|
||||
@ -121,20 +121,16 @@ pub async fn loader_fields_list(
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let query = query.into_inner();
|
||||
let all_loader_ids = Loader::list(&**pool, &redis)
|
||||
let loader_field = LoaderField::get_fields_all(&**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| x.id)
|
||||
.collect_vec();
|
||||
let loader_field =
|
||||
LoaderField::get_field(&query.loader_field, &all_loader_ids, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"'{}' was not a valid loader field.",
|
||||
query.loader_field
|
||||
))
|
||||
})?;
|
||||
.find(|x| x.field == query.loader_field)
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"'{}' was not a valid loader field.",
|
||||
query.loader_field
|
||||
))
|
||||
})?;
|
||||
|
||||
let loader_field_enum_id = match loader_field.field_type {
|
||||
LoaderFieldType::Enum(enum_id) | LoaderFieldType::ArrayEnum(enum_id) => enum_id,
|
||||
|
||||
@ -124,10 +124,16 @@ async fn create_and_add_to_index(
|
||||
let index = create_index(client, name, custom_rules).await?;
|
||||
|
||||
let mut new_filterable_attributes = index.get_filterable_attributes().await?;
|
||||
let mut new_displayed_attributes = index.get_displayed_attributes().await?;
|
||||
|
||||
new_filterable_attributes.extend(additional_fields.iter().map(|s| s.to_string()));
|
||||
new_displayed_attributes.extend(additional_fields.iter().map(|s| s.to_string()));
|
||||
index
|
||||
.set_filterable_attributes(new_filterable_attributes)
|
||||
.await?;
|
||||
index
|
||||
.set_displayed_attributes(new_displayed_attributes)
|
||||
.await?;
|
||||
|
||||
add_to_index(client, index, projects).await?;
|
||||
Ok(())
|
||||
|
||||
@ -2,12 +2,9 @@ use std::collections::HashMap;
|
||||
|
||||
use actix_web::dev::ServiceResponse;
|
||||
use async_trait::async_trait;
|
||||
use labrinth::{
|
||||
models::{
|
||||
projects::{ProjectId, VersionType},
|
||||
teams::{OrganizationPermissions, ProjectPermissions},
|
||||
},
|
||||
search::SearchResults,
|
||||
use labrinth::models::{
|
||||
projects::{ProjectId, VersionType},
|
||||
teams::{OrganizationPermissions, ProjectPermissions},
|
||||
};
|
||||
|
||||
use crate::common::{api_v2::ApiV2, api_v3::ApiV3, dummy_data::TestFile};
|
||||
@ -76,7 +73,6 @@ delegate_api_variant!(
|
||||
[edit_project, ServiceResponse, id_or_slug: &str, patch: serde_json::Value, pat: &str],
|
||||
[edit_project_bulk, ServiceResponse, ids_or_slugs: &[&str], patch: serde_json::Value, pat: &str],
|
||||
[edit_project_icon, ServiceResponse, id_or_slug: &str, icon: Option<CommonImageData>, pat: &str],
|
||||
[search_deserialized_common, SearchResults, query: Option<&str>, facets: Option<serde_json::Value>, pat: &str],
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@ -11,7 +11,6 @@ use labrinth::{
|
||||
projects::{ProjectId, VersionType},
|
||||
teams::{OrganizationPermissions, ProjectPermissions},
|
||||
},
|
||||
search::SearchResults,
|
||||
LabrinthConfig,
|
||||
};
|
||||
|
||||
@ -66,12 +65,6 @@ pub trait ApiProject {
|
||||
icon: Option<CommonImageData>,
|
||||
pat: &str,
|
||||
) -> ServiceResponse;
|
||||
async fn search_deserialized_common(
|
||||
&self,
|
||||
query: Option<&str>,
|
||||
facets: Option<serde_json::Value>,
|
||||
pat: &str,
|
||||
) -> SearchResults;
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
|
||||
@ -13,7 +13,8 @@ use actix_web::{
|
||||
use async_trait::async_trait;
|
||||
use bytes::Bytes;
|
||||
use labrinth::{
|
||||
models::v2::projects::LegacyProject, search::SearchResults, util::actix::AppendsMultipart,
|
||||
models::v2::{projects::LegacyProject, search::LegacySearchResults},
|
||||
util::actix::AppendsMultipart,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
@ -37,6 +38,34 @@ impl ApiV2 {
|
||||
assert_eq!(resp.status(), 200);
|
||||
test::read_body_json(resp).await
|
||||
}
|
||||
|
||||
pub async fn search_deserialized(
|
||||
&self,
|
||||
query: Option<&str>,
|
||||
facets: Option<serde_json::Value>,
|
||||
pat: &str,
|
||||
) -> LegacySearchResults {
|
||||
let query_field = if let Some(query) = query {
|
||||
format!("&query={}", urlencoding::encode(query))
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let facets_field = if let Some(facets) = facets {
|
||||
format!("&facets={}", urlencoding::encode(&facets.to_string()))
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let req = test::TestRequest::get()
|
||||
.uri(&format!("/v2/search?{}{}", query_field, facets_field))
|
||||
.append_header(("Authorization", pat))
|
||||
.to_request();
|
||||
let resp = self.call(req).await;
|
||||
let status = resp.status();
|
||||
assert_eq!(status, 200);
|
||||
test::read_body_json(resp).await
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@ -195,32 +224,4 @@ impl ApiProject for ApiV2 {
|
||||
self.call(req).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn search_deserialized_common(
|
||||
&self,
|
||||
query: Option<&str>,
|
||||
facets: Option<serde_json::Value>,
|
||||
pat: &str,
|
||||
) -> SearchResults {
|
||||
let query_field = if let Some(query) = query {
|
||||
format!("&query={}", urlencoding::encode(query))
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let facets_field = if let Some(facets) = facets {
|
||||
format!("&facets={}", urlencoding::encode(&facets.to_string()))
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let req = test::TestRequest::get()
|
||||
.uri(&format!("/v2/search?{}{}", query_field, facets_field))
|
||||
.append_header(("Authorization", pat))
|
||||
.to_request();
|
||||
let resp = self.call(req).await;
|
||||
let status = resp.status();
|
||||
assert_eq!(status, 200);
|
||||
test::read_body_json(resp).await
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,8 +179,10 @@ impl ApiProject for ApiV3 {
|
||||
self.call(req).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn search_deserialized_common(
|
||||
impl ApiV3 {
|
||||
pub async fn search_deserialized(
|
||||
&self,
|
||||
query: Option<&str>,
|
||||
facets: Option<serde_json::Value>,
|
||||
@ -207,9 +209,7 @@ impl ApiProject for ApiV3 {
|
||||
assert_eq!(status, 200);
|
||||
test::read_body_json(resp).await
|
||||
}
|
||||
}
|
||||
|
||||
impl ApiV3 {
|
||||
pub async fn get_analytics_revenue(
|
||||
&self,
|
||||
id_or_slugs: Vec<&str>,
|
||||
|
||||
@ -289,11 +289,7 @@ async fn search_projects() {
|
||||
let test_name = test_name.clone();
|
||||
async move {
|
||||
let projects = api
|
||||
.search_deserialized_common(
|
||||
Some(&test_name),
|
||||
Some(facets.clone()),
|
||||
USER_USER_PAT,
|
||||
)
|
||||
.search_deserialized(Some(&test_name), Some(facets.clone()), USER_USER_PAT)
|
||||
.await;
|
||||
let mut found_project_ids: Vec<u64> = projects
|
||||
.hits
|
||||
|
||||
@ -281,11 +281,7 @@ async fn search_projects() {
|
||||
let test_name = test_name.clone();
|
||||
async move {
|
||||
let projects = api
|
||||
.search_deserialized_common(
|
||||
Some(&test_name),
|
||||
Some(facets.clone()),
|
||||
USER_USER_PAT,
|
||||
)
|
||||
.search_deserialized(Some(&test_name), Some(facets.clone()), USER_USER_PAT)
|
||||
.await;
|
||||
let mut found_project_ids: Vec<u64> = projects
|
||||
.hits
|
||||
|
||||
@ -427,7 +427,7 @@ async fn add_version_project_types_v2() {
|
||||
let test_project = api
|
||||
.get_project_deserialized(&test_project.slug.unwrap(), USER_USER_PAT)
|
||||
.await;
|
||||
assert_eq!(test_project.project_type, ""); // No project_type set, as no versions are set
|
||||
assert_eq!(test_project.project_type, "unknown"); // No project_type set, as no versions are set
|
||||
// This is a known difference between older v2 ,but is acceptable.
|
||||
// This would be the appropriate test on older v2:
|
||||
// assert_eq!(test_project.project_type, "modpack");
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user