From c886e7949edbf0f18a9867d4cea72fd16dd2b9a0 Mon Sep 17 00:00:00 2001 From: Geometrically <18202329+Geometrically@users.noreply.github.com> Date: Mon, 19 Oct 2020 14:23:05 -0700 Subject: [PATCH] Support for using a master key (#83) * Support for using a master key * Expand scope of PR, add wrapper struct, add files to intitial versions/mods * Change changelog path, run formatter * Split file changes into different PR * Formatting, rename main variable Co-authored-by: Aeledfyr --- .env | 2 ++ docker-compose.yml | 2 ++ src/main.rs | 27 ++++++++++++++++++++++----- src/routes/mod.rs | 4 ++++ src/routes/mods.rs | 15 ++++++++++++--- src/search/indexing/mod.rs | 28 ++++++++++++++++------------ src/search/indexing/queue.rs | 8 ++++++-- src/search/mod.rs | 14 +++++++++++--- 8 files changed, 75 insertions(+), 25 deletions(-) diff --git a/.env b/.env index 16e159205..1fa036094 100644 --- a/.env +++ b/.env @@ -5,7 +5,9 @@ CORS_ORIGINS='["http://localhost:3000","https://modrinth.com"]' CDN_URL=https://cdn.modrinth.com DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth + MEILISEARCH_ADDR=http://localhost:7700 +MEILISEARCH_KEY=modrinth BIND_ADDR=127.0.0.1:8000 MOCK_FILE_PATH=/tmp/modrinth diff --git a/docker-compose.yml b/docker-compose.yml index 643c47c64..86a414d76 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,6 +18,8 @@ services: - 7700:7700 volumes: - meilisearch-data:/data.ms + environment: + MEILI_MASTER_KEY: modrinth pgadmin: image: dpage/pgadmin4:latest environment: diff --git a/src/main.rs b/src/main.rs index e365fca32..48bbff298 100644 --- a/src/main.rs +++ b/src/main.rs @@ -39,13 +39,22 @@ async fn main() -> std::io::Result<()> { check_env_vars(); + let search_config = search::SearchConfig { + address: dotenv::var("MEILISEARCH_ADDR").unwrap(), + key: dotenv::var("MEILISEARCH_KEY").unwrap(), + }; + if config.reset_indices { info!("Resetting indices"); - search::indexing::reset_indices().await.unwrap(); + search::indexing::reset_indices(&search_config) + .await + .unwrap(); return Ok(()); } else if config.reconfigure_indices { info!("Reconfiguring indices"); - search::indexing::reconfigure_indices().await.unwrap(); + search::indexing::reconfigure_indices(&search_config) + .await + .unwrap(); return Ok(()); } @@ -106,9 +115,11 @@ async fn main() -> std::io::Result<()> { ); let pool_ref = pool.clone(); + let thread_search_config = search_config.clone(); let mut skip = skip_initial; scheduler.run(local_index_interval, move || { let pool_ref = pool_ref.clone(); + let thread_search_config = thread_search_config.clone(); let local_skip = skip; if skip { skip = false; @@ -122,7 +133,7 @@ async fn main() -> std::io::Result<()> { index_local: true, index_external: false, }; - let result = index_mods(pool_ref, settings).await; + let result = index_mods(pool_ref, settings, &thread_search_config).await; if let Err(e) = result { warn!("Local mod indexing failed: {:?}", e); } @@ -133,9 +144,11 @@ async fn main() -> std::io::Result<()> { let indexing_queue = Arc::new(search::indexing::queue::CreationQueue::new()); let queue_ref = indexing_queue.clone(); + let thread_search_config = search_config.clone(); let mut skip = skip_initial; scheduler.run(std::time::Duration::from_secs(15 * 60), move || { let queue = queue_ref.clone(); + let thread_search_config = thread_search_config.clone(); let local_skip = skip; if skip { skip = false; @@ -145,7 +158,7 @@ async fn main() -> std::io::Result<()> { return; } info!("Indexing created mod queue"); - let result = search::indexing::queue::index_queue(&*queue).await; + let result = search::indexing::queue::index_queue(&*queue, &thread_search_config).await; if let Err(e) = result { warn!("Indexing created mods failed: {:?}", e); } @@ -168,15 +181,17 @@ async fn main() -> std::io::Result<()> { ); let pool_ref = pool.clone(); + let thread_search_config = search_config.clone(); scheduler.run(external_index_interval, move || { info!("Indexing curseforge"); let pool_ref = pool_ref.clone(); + let thread_search_config = thread_search_config.clone(); async move { let settings = IndexingSettings { index_local: false, index_external: true, }; - let result = index_mods(pool_ref, settings).await; + let result = index_mods(pool_ref, settings, &thread_search_config).await; if let Err(e) = result { warn!("External mod indexing failed: {:?}", e); } @@ -210,6 +225,7 @@ async fn main() -> std::io::Result<()> { .data(pool.clone()) .data(file_host.clone()) .data(indexing_queue.clone()) + .data(search_config.clone()) .service(routes::index_get) .service( web::scope("/api/v1/") @@ -253,6 +269,7 @@ fn check_env_vars() { check_var::("CDN_URL"); check_var::("DATABASE_URL"); check_var::("MEILISEARCH_ADDR"); + check_var::("MEILISEARCH_KEY"); check_var::("BIND_ADDR"); check_var::("STORAGE_BACKEND"); diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 6aa8d6531..bbf215643 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -60,6 +60,8 @@ pub enum ApiError { JsonError(#[from] serde_json::Error), #[error("Authentication Error")] AuthenticationError, + #[error("Search Error: {0}")] + SearchError(#[from] meilisearch_sdk::errors::Error), } impl actix_web::ResponseError for ApiError { @@ -68,6 +70,7 @@ impl actix_web::ResponseError for ApiError { ApiError::DatabaseError(..) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR, ApiError::AuthenticationError => actix_web::http::StatusCode::UNAUTHORIZED, ApiError::JsonError(..) => actix_web::http::StatusCode::BAD_REQUEST, + ApiError::SearchError(..) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR, } } @@ -78,6 +81,7 @@ impl actix_web::ResponseError for ApiError { ApiError::DatabaseError(..) => "database_error", ApiError::AuthenticationError => "unauthorized", ApiError::JsonError(..) => "json_error", + ApiError::SearchError(..) => "search_error", }, description: &self.to_string(), }, diff --git a/src/routes/mods.rs b/src/routes/mods.rs index 01ece52d2..e72622ac6 100644 --- a/src/routes/mods.rs +++ b/src/routes/mods.rs @@ -3,7 +3,7 @@ use crate::auth::check_is_moderator_from_headers; use crate::database; use crate::models; use crate::models::mods::SearchRequest; -use crate::search::{search_for_mod, SearchError}; +use crate::search::{search_for_mod, SearchConfig, SearchError}; use actix_web::{delete, get, web, HttpRequest, HttpResponse}; use serde::{Deserialize, Serialize}; use sqlx::PgPool; @@ -11,8 +11,9 @@ use sqlx::PgPool; #[get("mod")] pub async fn mod_search( web::Query(info): web::Query, + config: web::Data, ) -> Result { - let results = search_for_mod(&info).await?; + let results = search_for_mod(&info, &**config).await?; Ok(HttpResponse::Ok().json(results)) } @@ -121,12 +122,13 @@ pub async fn mod_get( Ok(HttpResponse::NotFound().body("")) } } -// TODO: The mod remains in meilisearch's index until the index is deleted + #[delete("{id}")] pub async fn mod_delete( req: HttpRequest, info: web::Path<(models::ids::ModId,)>, pool: web::Data, + config: web::Data, ) -> Result { check_is_moderator_from_headers( req.headers(), @@ -143,6 +145,13 @@ pub async fn mod_delete( .await .map_err(|e| ApiError::DatabaseError(e.into()))?; + let client = meilisearch_sdk::client::Client::new(&*config.key, &*config.address); + + let indexes: Vec = client.get_indexes().await?; + for index in indexes { + index.delete_document(format!("local-{}", id)).await?; + } + if result.is_some() { Ok(HttpResponse::Ok().body("")) } else { diff --git a/src/search/indexing/mod.rs b/src/search/indexing/mod.rs index c8930a0f7..3dd70619c 100644 --- a/src/search/indexing/mod.rs +++ b/src/search/indexing/mod.rs @@ -3,7 +3,7 @@ pub mod curseforge_import; pub mod local_import; pub mod queue; -use crate::search::UploadSearchMod; +use crate::search::{SearchConfig, UploadSearchMod}; use curseforge_import::index_curseforge; use local_import::index_local; use meilisearch_sdk::client::Client; @@ -56,7 +56,11 @@ impl IndexingSettings { } } -pub async fn index_mods(pool: PgPool, settings: IndexingSettings) -> Result<(), IndexingError> { +pub async fn index_mods( + pool: PgPool, + settings: IndexingSettings, + config: &SearchConfig, +) -> Result<(), IndexingError> { let mut docs_to_add: Vec = vec![]; if settings.index_local { @@ -73,14 +77,13 @@ pub async fn index_mods(pool: PgPool, settings: IndexingSettings) -> Result<(), // Write Indices - add_mods(docs_to_add).await?; + add_mods(docs_to_add, config).await?; Ok(()) } -pub async fn reset_indices() -> Result<(), IndexingError> { - let address = &*dotenv::var("MEILISEARCH_ADDR")?; - let client = Client::new(address, ""); +pub async fn reset_indices(config: &SearchConfig) -> Result<(), IndexingError> { + let client = Client::new(&*config.address, &*config.key); client.delete_index("relevance_mods").await?; client.delete_index("downloads_mods").await?; @@ -89,9 +92,8 @@ pub async fn reset_indices() -> Result<(), IndexingError> { Ok(()) } -pub async fn reconfigure_indices() -> Result<(), IndexingError> { - let address = &*dotenv::var("MEILISEARCH_ADDR")?; - let client = Client::new(address, ""); +pub async fn reconfigure_indices(config: &SearchConfig) -> Result<(), IndexingError> { + let client = Client::new(&*config.address, &*config.key); // Relevance Index update_index(&client, "relevance_mods", { @@ -184,9 +186,11 @@ async fn add_to_index(index: Index<'_>, mods: &[UploadSearchMod]) -> Result<(), Ok(()) } -pub async fn add_mods(mods: Vec) -> Result<(), IndexingError> { - let address = &*dotenv::var("MEILISEARCH_ADDR")?; - let client = Client::new(address, ""); +pub async fn add_mods( + mods: Vec, + config: &SearchConfig, +) -> Result<(), IndexingError> { + let client = Client::new(&*config.address, &*config.key); // Relevance Index let relevance_index = create_index(&client, "relevance_mods", || { diff --git a/src/search/indexing/queue.rs b/src/search/indexing/queue.rs index f72d90deb..809e39c3f 100644 --- a/src/search/indexing/queue.rs +++ b/src/search/indexing/queue.rs @@ -1,4 +1,5 @@ use super::{add_mods, IndexingError, UploadSearchMod}; +use crate::search::SearchConfig; use std::sync::Mutex; pub struct CreationQueue { @@ -25,7 +26,10 @@ impl CreationQueue { } } -pub async fn index_queue(queue: &CreationQueue) -> Result<(), IndexingError> { +pub async fn index_queue( + queue: &CreationQueue, + config: &SearchConfig, +) -> Result<(), IndexingError> { let queue = queue.take(); - add_mods(queue).await + add_mods(queue, config).await } diff --git a/src/search/mod.rs b/src/search/mod.rs index 9e67aab93..c3f59c159 100644 --- a/src/search/mod.rs +++ b/src/search/mod.rs @@ -52,6 +52,12 @@ impl actix_web::ResponseError for SearchError { } } +#[derive(Clone)] +pub struct SearchConfig { + pub address: String, + pub key: String, +} + /// A mod document used for uploading mods to meilisearch's indices. /// This contains some extra data that is not returned by search results. #[derive(Serialize, Deserialize, Debug, Clone)] @@ -133,9 +139,11 @@ impl Document for ResultSearchMod { } } -pub async fn search_for_mod(info: &SearchRequest) -> Result { - let address = &*dotenv::var("MEILISEARCH_ADDR")?; - let client = Client::new(address, ""); +pub async fn search_for_mod( + info: &SearchRequest, + config: &SearchConfig, +) -> Result { + let client = Client::new(&*config.key, &*config.address); let filters: Cow<_> = match (info.filters.as_deref(), info.version.as_deref()) { (Some(f), Some(v)) => format!("({}) AND ({})", f, v).into(),