Support for using a master key (#83)

* Support for using a master key

* Expand scope of PR, add wrapper struct, add files to intitial versions/mods

* Change changelog path, run formatter

* Split file changes into different PR

* Formatting, rename main variable

Co-authored-by: Aeledfyr <aeledfyr@gmail.com>
This commit is contained in:
Geometrically 2020-10-19 14:23:05 -07:00 committed by GitHub
parent e0b972f6d6
commit c886e7949e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 75 additions and 25 deletions

2
.env
View File

@ -5,7 +5,9 @@ CORS_ORIGINS='["http://localhost:3000","https://modrinth.com"]'
CDN_URL=https://cdn.modrinth.com
DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth
MEILISEARCH_ADDR=http://localhost:7700
MEILISEARCH_KEY=modrinth
BIND_ADDR=127.0.0.1:8000
MOCK_FILE_PATH=/tmp/modrinth

View File

@ -18,6 +18,8 @@ services:
- 7700:7700
volumes:
- meilisearch-data:/data.ms
environment:
MEILI_MASTER_KEY: modrinth
pgadmin:
image: dpage/pgadmin4:latest
environment:

View File

@ -39,13 +39,22 @@ async fn main() -> std::io::Result<()> {
check_env_vars();
let search_config = search::SearchConfig {
address: dotenv::var("MEILISEARCH_ADDR").unwrap(),
key: dotenv::var("MEILISEARCH_KEY").unwrap(),
};
if config.reset_indices {
info!("Resetting indices");
search::indexing::reset_indices().await.unwrap();
search::indexing::reset_indices(&search_config)
.await
.unwrap();
return Ok(());
} else if config.reconfigure_indices {
info!("Reconfiguring indices");
search::indexing::reconfigure_indices().await.unwrap();
search::indexing::reconfigure_indices(&search_config)
.await
.unwrap();
return Ok(());
}
@ -106,9 +115,11 @@ async fn main() -> std::io::Result<()> {
);
let pool_ref = pool.clone();
let thread_search_config = search_config.clone();
let mut skip = skip_initial;
scheduler.run(local_index_interval, move || {
let pool_ref = pool_ref.clone();
let thread_search_config = thread_search_config.clone();
let local_skip = skip;
if skip {
skip = false;
@ -122,7 +133,7 @@ async fn main() -> std::io::Result<()> {
index_local: true,
index_external: false,
};
let result = index_mods(pool_ref, settings).await;
let result = index_mods(pool_ref, settings, &thread_search_config).await;
if let Err(e) = result {
warn!("Local mod indexing failed: {:?}", e);
}
@ -133,9 +144,11 @@ async fn main() -> std::io::Result<()> {
let indexing_queue = Arc::new(search::indexing::queue::CreationQueue::new());
let queue_ref = indexing_queue.clone();
let thread_search_config = search_config.clone();
let mut skip = skip_initial;
scheduler.run(std::time::Duration::from_secs(15 * 60), move || {
let queue = queue_ref.clone();
let thread_search_config = thread_search_config.clone();
let local_skip = skip;
if skip {
skip = false;
@ -145,7 +158,7 @@ async fn main() -> std::io::Result<()> {
return;
}
info!("Indexing created mod queue");
let result = search::indexing::queue::index_queue(&*queue).await;
let result = search::indexing::queue::index_queue(&*queue, &thread_search_config).await;
if let Err(e) = result {
warn!("Indexing created mods failed: {:?}", e);
}
@ -168,15 +181,17 @@ async fn main() -> std::io::Result<()> {
);
let pool_ref = pool.clone();
let thread_search_config = search_config.clone();
scheduler.run(external_index_interval, move || {
info!("Indexing curseforge");
let pool_ref = pool_ref.clone();
let thread_search_config = thread_search_config.clone();
async move {
let settings = IndexingSettings {
index_local: false,
index_external: true,
};
let result = index_mods(pool_ref, settings).await;
let result = index_mods(pool_ref, settings, &thread_search_config).await;
if let Err(e) = result {
warn!("External mod indexing failed: {:?}", e);
}
@ -210,6 +225,7 @@ async fn main() -> std::io::Result<()> {
.data(pool.clone())
.data(file_host.clone())
.data(indexing_queue.clone())
.data(search_config.clone())
.service(routes::index_get)
.service(
web::scope("/api/v1/")
@ -253,6 +269,7 @@ fn check_env_vars() {
check_var::<String>("CDN_URL");
check_var::<String>("DATABASE_URL");
check_var::<String>("MEILISEARCH_ADDR");
check_var::<String>("MEILISEARCH_KEY");
check_var::<String>("BIND_ADDR");
check_var::<String>("STORAGE_BACKEND");

View File

@ -60,6 +60,8 @@ pub enum ApiError {
JsonError(#[from] serde_json::Error),
#[error("Authentication Error")]
AuthenticationError,
#[error("Search Error: {0}")]
SearchError(#[from] meilisearch_sdk::errors::Error),
}
impl actix_web::ResponseError for ApiError {
@ -68,6 +70,7 @@ impl actix_web::ResponseError for ApiError {
ApiError::DatabaseError(..) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR,
ApiError::AuthenticationError => actix_web::http::StatusCode::UNAUTHORIZED,
ApiError::JsonError(..) => actix_web::http::StatusCode::BAD_REQUEST,
ApiError::SearchError(..) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR,
}
}
@ -78,6 +81,7 @@ impl actix_web::ResponseError for ApiError {
ApiError::DatabaseError(..) => "database_error",
ApiError::AuthenticationError => "unauthorized",
ApiError::JsonError(..) => "json_error",
ApiError::SearchError(..) => "search_error",
},
description: &self.to_string(),
},

View File

@ -3,7 +3,7 @@ use crate::auth::check_is_moderator_from_headers;
use crate::database;
use crate::models;
use crate::models::mods::SearchRequest;
use crate::search::{search_for_mod, SearchError};
use crate::search::{search_for_mod, SearchConfig, SearchError};
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@ -11,8 +11,9 @@ use sqlx::PgPool;
#[get("mod")]
pub async fn mod_search(
web::Query(info): web::Query<SearchRequest>,
config: web::Data<SearchConfig>,
) -> Result<HttpResponse, SearchError> {
let results = search_for_mod(&info).await?;
let results = search_for_mod(&info, &**config).await?;
Ok(HttpResponse::Ok().json(results))
}
@ -121,12 +122,13 @@ pub async fn mod_get(
Ok(HttpResponse::NotFound().body(""))
}
}
// TODO: The mod remains in meilisearch's index until the index is deleted
#[delete("{id}")]
pub async fn mod_delete(
req: HttpRequest,
info: web::Path<(models::ids::ModId,)>,
pool: web::Data<PgPool>,
config: web::Data<SearchConfig>,
) -> Result<HttpResponse, ApiError> {
check_is_moderator_from_headers(
req.headers(),
@ -143,6 +145,13 @@ pub async fn mod_delete(
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?;
let client = meilisearch_sdk::client::Client::new(&*config.key, &*config.address);
let indexes: Vec<meilisearch_sdk::indexes::Index> = client.get_indexes().await?;
for index in indexes {
index.delete_document(format!("local-{}", id)).await?;
}
if result.is_some() {
Ok(HttpResponse::Ok().body(""))
} else {

View File

@ -3,7 +3,7 @@ pub mod curseforge_import;
pub mod local_import;
pub mod queue;
use crate::search::UploadSearchMod;
use crate::search::{SearchConfig, UploadSearchMod};
use curseforge_import::index_curseforge;
use local_import::index_local;
use meilisearch_sdk::client::Client;
@ -56,7 +56,11 @@ impl IndexingSettings {
}
}
pub async fn index_mods(pool: PgPool, settings: IndexingSettings) -> Result<(), IndexingError> {
pub async fn index_mods(
pool: PgPool,
settings: IndexingSettings,
config: &SearchConfig,
) -> Result<(), IndexingError> {
let mut docs_to_add: Vec<UploadSearchMod> = vec![];
if settings.index_local {
@ -73,14 +77,13 @@ pub async fn index_mods(pool: PgPool, settings: IndexingSettings) -> Result<(),
// Write Indices
add_mods(docs_to_add).await?;
add_mods(docs_to_add, config).await?;
Ok(())
}
pub async fn reset_indices() -> Result<(), IndexingError> {
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
pub async fn reset_indices(config: &SearchConfig) -> Result<(), IndexingError> {
let client = Client::new(&*config.address, &*config.key);
client.delete_index("relevance_mods").await?;
client.delete_index("downloads_mods").await?;
@ -89,9 +92,8 @@ pub async fn reset_indices() -> Result<(), IndexingError> {
Ok(())
}
pub async fn reconfigure_indices() -> Result<(), IndexingError> {
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
pub async fn reconfigure_indices(config: &SearchConfig) -> Result<(), IndexingError> {
let client = Client::new(&*config.address, &*config.key);
// Relevance Index
update_index(&client, "relevance_mods", {
@ -184,9 +186,11 @@ async fn add_to_index(index: Index<'_>, mods: &[UploadSearchMod]) -> Result<(),
Ok(())
}
pub async fn add_mods(mods: Vec<UploadSearchMod>) -> Result<(), IndexingError> {
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
pub async fn add_mods(
mods: Vec<UploadSearchMod>,
config: &SearchConfig,
) -> Result<(), IndexingError> {
let client = Client::new(&*config.address, &*config.key);
// Relevance Index
let relevance_index = create_index(&client, "relevance_mods", || {

View File

@ -1,4 +1,5 @@
use super::{add_mods, IndexingError, UploadSearchMod};
use crate::search::SearchConfig;
use std::sync::Mutex;
pub struct CreationQueue {
@ -25,7 +26,10 @@ impl CreationQueue {
}
}
pub async fn index_queue(queue: &CreationQueue) -> Result<(), IndexingError> {
pub async fn index_queue(
queue: &CreationQueue,
config: &SearchConfig,
) -> Result<(), IndexingError> {
let queue = queue.take();
add_mods(queue).await
add_mods(queue, config).await
}

View File

@ -52,6 +52,12 @@ impl actix_web::ResponseError for SearchError {
}
}
#[derive(Clone)]
pub struct SearchConfig {
pub address: String,
pub key: String,
}
/// A mod document used for uploading mods to meilisearch's indices.
/// This contains some extra data that is not returned by search results.
#[derive(Serialize, Deserialize, Debug, Clone)]
@ -133,9 +139,11 @@ impl Document for ResultSearchMod {
}
}
pub async fn search_for_mod(info: &SearchRequest) -> Result<SearchResults, SearchError> {
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
pub async fn search_for_mod(
info: &SearchRequest,
config: &SearchConfig,
) -> Result<SearchResults, SearchError> {
let client = Client::new(&*config.key, &*config.address);
let filters: Cow<_> = match (info.filters.as_deref(), info.version.as_deref()) {
(Some(f), Some(v)) => format!("({}) AND ({})", f, v).into(),