Improve error handling (#33)

* refactor: improve error handling

* fix: specify bind address instead of port

* fix: remove temporary testing file

* fix(errors): change error names to snake_case

* refactor(errors): split indexing error types, remove unused errors

* feat: add env variable checking at program start

This just checks whether the enviroment variables exist and can
parse to the given type and gives a warning if they can't. This
should prevent cases where the program fails at runtime due to
checking an environment variable that doesn't exist.
This commit is contained in:
Aeledfyr 2020-07-03 12:44:39 -05:00 committed by GitHub
parent 91305262f1
commit 6ff7fa74e2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 171 additions and 110 deletions

2
.env
View File

@ -4,7 +4,7 @@ DEBUG=true
MONGODB_ADDR=mongodb://localhost:27017
MEILISEARCH_ADDR=http://localhost:7700
PORT=8000
BIND_ADDR=127.0.0.1:8000
BACKBLAZE_KEY_ID=none
BACKBLAZE_KEY=none

View File

@ -6,7 +6,8 @@ use mongodb::Client;
pub async fn connect() -> Result<Client, Error> {
info!("Initializing database connection");
let mut client_options = ClientOptions::parse(&dotenv::var("MONGODB_ADDR").unwrap()).await?;
let mongodb_addr = dotenv::var("MONGODB_ADDR").expect("`MONGO_ADDR` not in .env");
let mut client_options = ClientOptions::parse(&mongodb_addr).await?;
client_options.app_name = Some("labrinth".to_string());
Client::with_options(client_options)

View File

@ -18,10 +18,8 @@ pub use delete::DeleteFileData;
#[derive(Error, Debug)]
pub enum FileHostingError {
#[error("Error while accessing the data from remote")]
RemoteWebsiteError(#[from] reqwest::Error),
#[error("Error while serializing or deserializing JSON")]
SerDeError(#[from] serde_json::Error),
#[error("Error while accessing the data from backblaze")]
BackblazeError(#[from] reqwest::Error),
}
#[cfg(test)]

View File

@ -17,7 +17,11 @@ async fn main() -> std::io::Result<()> {
env_logger::from_env(Env::default().default_filter_or("info")).init();
dotenv::dotenv().ok();
let client = database::connect().await.unwrap();
check_env_vars();
let client = database::connect()
.await
.expect("Database connection failed");
// Get executable path
let mut exe_path = env::current_exe()?.parent().unwrap().to_path_buf();
@ -28,11 +32,11 @@ async fn main() -> std::io::Result<()> {
if env::args().any(|x| x == "regen") {
// User forced regen of indexing
info!("Forced regeneration of indexes!");
index_mods(client).await.unwrap();
} else if exe_path.exists() {
index_mods(client).await.expect("Mod indexing failed");
} else if !exe_path.exists() {
// The indexes were not created, or the version was upgraded
info!("Indexing of mods for first time...");
index_mods(client).await.unwrap();
index_mods(client).await.expect("Mod indexing failed");
// Create the lock file
File::create(exe_path)?;
}
@ -48,7 +52,32 @@ async fn main() -> std::io::Result<()> {
.service(routes::mod_search)
.default_service(web::get().to(routes::not_found))
})
.bind("127.0.0.1:".to_string() + &dotenv::var("PORT").unwrap())?
.bind(dotenv::var("BIND_ADDR").unwrap())?
.run()
.await
}
// This is so that env vars not used immediately don't panic at runtime
fn check_env_vars() {
fn check_var<T: std::str::FromStr>(var: &str) {
if dotenv::var(var)
.ok()
.and_then(|s| s.parse::<T>().ok())
.is_none()
{
log::warn!(
"Variable `{}` missing in dotenv or not of type `{}`",
var,
std::any::type_name::<T>()
)
}
}
check_var::<bool>("INDEX_CURSEFORGE");
check_var::<String>("MONGODB_ADDR");
check_var::<String>("MEILISEARCH_ADDR");
check_var::<String>("BIND_ADDR");
check_var::<String>("BACKBLAZE_KEY_ID");
check_var::<String>("BACKBLAZE_KEY");
check_var::<String>("BACKBLAZE_BUCKET_ID");
}

View File

@ -60,7 +60,7 @@ pub struct Base62Id(pub u64);
#[derive(Error, Debug)]
pub enum DecodingError {
/// Encountered a non base62 character in base62 string
#[error("Invalid character `{0:?}` in base62 encoding")]
#[error("Invalid character {0:?} in base62 encoding")]
InvalidBase62(char),
/// Encountered integer overflow when decoding a base62 id.
#[error("Base62 decoding overflowed")]

View File

@ -1,13 +1,10 @@
use crate::models::mods::SearchRequest;
use crate::search::search_for_mod;
use crate::search::{search_for_mod, SearchError};
use actix_web::{get, web, HttpResponse};
#[get("api/v1/mods")]
pub fn mod_search(web::Query(info): web::Query<SearchRequest>) -> HttpResponse {
//TODO: Fix this line with anyhow
let body = serde_json::to_string(&search_for_mod(&info).unwrap()).unwrap();
HttpResponse::Ok()
.content_type("application/json")
.body(body)
pub async fn mod_search(
web::Query(info): web::Query<SearchRequest>,
) -> Result<HttpResponse, SearchError> {
Ok(HttpResponse::Ok().json(search_for_mod(&info)?))
}

View File

@ -1,4 +1,5 @@
use crate::search::{SearchError, SearchMod};
use super::IndexingError;
use crate::search::SearchMod;
use log::info;
use serde::{Deserialize, Serialize};
@ -47,7 +48,7 @@ pub struct CurseForgeMod {
pub async fn index_curseforge(
start_index: i32,
end_index: i32,
) -> Result<Vec<SearchMod>, SearchError> {
) -> Result<Vec<SearchMod>, IndexingError> {
info!("Indexing curseforge mods!");
let mut docs_to_add: Vec<SearchMod> = vec![];
@ -60,10 +61,13 @@ pub async fn index_curseforge(
(start_index..end_index).collect::<Vec<_>>()
))
.send()
.await?;
.await
.map_err(IndexingError::CurseforgeImportError)?;
let text = &res.text().await?;
let curseforge_mods: Vec<CurseForgeMod> = serde_json::from_str(text)?;
let curseforge_mods: Vec<CurseForgeMod> = res
.json()
.await
.map_err(IndexingError::CurseforgeImportError)?;
for curseforge_mod in curseforge_mods {
if curseforge_mod.game_slug != "minecraft"
@ -78,15 +82,14 @@ pub async fn index_curseforge(
let mut using_fabric = false;
for version in curseforge_mod.game_version_latest_files {
let version_number: String = version
if let Some(parsed) = version
.game_version
.chars()
.skip(2)
.take(version.game_version.len())
.collect();
if version_number.parse::<f32>()? < 14.0 {
using_forge = true;
.get(2..)
.and_then(|f| f.parse::<f32>().ok())
{
if parsed < 14.0 {
using_forge = true;
}
}
mod_game_versions.push(version.game_version);
@ -188,17 +191,13 @@ pub async fn index_curseforge(
date_created: curseforge_mod.date_created.chars().take(10).collect(),
created: curseforge_mod
.date_created
.chars()
.filter(|c| c.is_ascii_digit())
.collect::<String>()
.parse()?,
.parse::<chrono::DateTime<chrono::Utc>>()?
.timestamp(),
date_modified: curseforge_mod.date_modified.chars().take(10).collect(),
updated: curseforge_mod
.date_modified
.chars()
.filter(|c| c.is_ascii_digit())
.collect::<String>()
.parse()?,
.parse::<chrono::DateTime<chrono::Utc>>()?
.timestamp(),
latest_version,
empty: String::from("{}{}{}"),
})

View File

@ -3,11 +3,12 @@ use futures::StreamExt;
use log::info;
use crate::database::models::Item;
use crate::database::{Mod, Version};
use crate::database::{DatabaseError, Mod, Version};
use crate::search::{SearchError, SearchMod};
use super::IndexingError;
use crate::search::SearchMod;
pub async fn index_local(client: mongodb::Client) -> Result<Vec<SearchMod>, SearchError> {
pub async fn index_local(client: mongodb::Client) -> Result<Vec<SearchMod>, IndexingError> {
info!("Indexing local mods!");
let mut docs_to_add: Vec<SearchMod> = vec![];
@ -17,17 +18,26 @@ pub async fn index_local(client: mongodb::Client) -> Result<Vec<SearchMod>, Sear
let mods = db.collection("mods");
let versions = db.collection("versions");
let mut results = mods.find(None, None).await?;
let mut results = mods
.find(None, None)
.await
.map_err(DatabaseError::LocalDatabaseError)?;
while let Some(unparsed_result) = results.next().await {
let result: Mod = *Mod::from_doc(unparsed_result?)?;
let result: Mod =
*Mod::from_doc(unparsed_result.map_err(DatabaseError::LocalDatabaseError)?)?;
let mut mod_versions = versions.find(doc! { "mod_id": result.id}, None).await?;
let mut mod_versions = versions
.find(doc! { "mod_id": result.id }, None)
.await
.map_err(DatabaseError::LocalDatabaseError)?;
let mut mod_game_versions = vec![];
while let Some(unparsed_version) = mod_versions.next().await {
let mut version: Version = *Version::from_doc(unparsed_version?)?;
let mut version = unparsed_version
.map_err(DatabaseError::LocalDatabaseError)
.and_then(Version::from_doc)?;
mod_game_versions.append(&mut version.game_versions);
}

View File

@ -4,12 +4,29 @@ pub mod local_import;
use crate::search::indexing::curseforge_import::index_curseforge;
use crate::search::indexing::local_import::index_local;
use crate::search::{SearchError, SearchMod};
use crate::search::SearchMod;
use meilisearch_sdk::client::Client;
use meilisearch_sdk::settings::Settings;
use std::collections::{HashMap, VecDeque};
use thiserror::Error;
pub async fn index_mods(db: mongodb::Client) -> Result<(), SearchError> {
#[derive(Error, Debug)]
pub enum IndexingError {
#[error("Error while connecting to the MeiliSearch database")]
IndexDBError(meilisearch_sdk::errors::Error),
#[error("Error while importing mods from CurseForge")]
CurseforgeImportError(reqwest::Error),
#[error("Error while serializing or deserializing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
#[error("Error while parsing a timestamp: {0}")]
ParseDateError(#[from] chrono::format::ParseError),
#[error("Database Error: {0}")]
DatabaseError(#[from] crate::database::DatabaseError),
#[error("Environment Error")]
EnvError(#[from] dotenv::Error),
}
pub async fn index_mods(db: mongodb::Client) -> Result<(), IndexingError> {
// Check if the index exists
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
@ -17,66 +34,73 @@ pub async fn index_mods(db: mongodb::Client) -> Result<(), SearchError> {
let mut docs_to_add: Vec<SearchMod> = vec![];
docs_to_add.append(&mut index_local(db.clone()).await?);
if dotenv::var("INDEX_CURSEFORGE")
.expect("`INDEX_CURSEFORGE` is missing in the .env file.")
if dotenv::var("INDEX_CURSEFORGE")?
.parse()
.unwrap()
.expect("`INDEX_CURSEFORGE` is not a boolean.")
{
docs_to_add.append(&mut index_curseforge(1, 400000).await?);
}
//Write Indexes
//Relevance Index
let mut relevance_index = client.get_or_create("relevance_mods").unwrap();
let mut relevance_index = client
.get_or_create("relevance_mods")
.map_err(IndexingError::IndexDBError)?;
let mut relevance_rules = default_rules();
relevance_rules.push_back("desc(downloads)".to_string());
relevance_index
.set_settings(&default_settings().with_ranking_rules(relevance_rules.into()))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
relevance_index
.add_documents(docs_to_add.clone(), Some("mod_id"))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
//Downloads Index
let mut downloads_index = client.get_or_create("downloads_mods").unwrap();
let mut downloads_index = client
.get_or_create("downloads_mods")
.map_err(IndexingError::IndexDBError)?;
let mut downloads_rules = default_rules();
downloads_rules.push_front("desc(downloads)".to_string());
downloads_index
.set_settings(&default_settings().with_ranking_rules(downloads_rules.into()))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
downloads_index
.add_documents(docs_to_add.clone(), Some("mod_id"))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
//Updated Index
let mut updated_index = client.get_or_create("updated_mods").unwrap();
let mut updated_index = client
.get_or_create("updated_mods")
.map_err(IndexingError::IndexDBError)?;
let mut updated_rules = default_rules();
updated_rules.push_front("desc(updated)".to_string());
updated_index
.set_settings(&default_settings().with_ranking_rules(updated_rules.into()))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
updated_index
.add_documents(docs_to_add.clone(), Some("mod_id"))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
//Created Index
let mut newest_index = client.get_or_create("newest_mods").unwrap();
let mut newest_index = client
.get_or_create("newest_mods")
.map_err(IndexingError::IndexDBError)?;
let mut newest_rules = default_rules();
newest_rules.push_back("desc(created)".to_string());
newest_index
.set_settings(&default_settings().with_ranking_rules(newest_rules.into()))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
newest_index
.add_documents(docs_to_add.clone(), Some("mod_id"))
.unwrap();
.map_err(IndexingError::IndexDBError)?;
Ok(())
}
@ -124,8 +148,8 @@ fn default_settings() -> Settings {
];
Settings::new()
.with_displayed_attributes(displayed_attributes.clone())
.with_searchable_attributes(searchable_attributes.clone())
.with_displayed_attributes(displayed_attributes)
.with_searchable_attributes(searchable_attributes)
.with_accept_new_fields(true)
.with_stop_words(vec![])
.with_synonyms(HashMap::new())

View File

@ -1,5 +1,7 @@
use crate::database::DatabaseError;
use crate::models::error::ApiError;
use crate::models::mods::SearchRequest;
use actix_web::http::StatusCode;
use actix_web::web::HttpResponse;
use meilisearch_sdk::client::Client;
use meilisearch_sdk::document::Document;
use meilisearch_sdk::search::Query;
@ -10,24 +12,39 @@ pub mod indexing;
#[derive(Error, Debug)]
pub enum SearchError {
#[error("Error while connection to the MeiliSearch database")]
IndexDBError(),
#[error("Error while connecting to the local server")]
LocalDatabaseError(#[from] mongodb::error::Error),
#[error("Error while accessing the data from remote")]
RemoteWebsiteError(#[from] reqwest::Error),
#[error("Error while serializing or deserializing JSON")]
#[error("Error while connecting to the MeiliSearch database")]
IndexDBError(meilisearch_sdk::errors::Error),
#[error("Error while serializing or deserializing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
#[error("Error while parsing float")]
FloatParsingError(#[from] std::num::ParseFloatError),
#[error("Error while parsing float")]
#[error("Error while parsing an integer: {0}")]
IntParsingError(#[from] std::num::ParseIntError),
#[error("Error while parsing BSON")]
DatabaseError(#[from] DatabaseError),
#[error("Environment Error")]
EnvError(#[from] dotenv::Error),
}
impl actix_web::ResponseError for SearchError {
fn status_code(&self) -> StatusCode {
match self {
SearchError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
SearchError::IndexDBError(..) => StatusCode::INTERNAL_SERVER_ERROR,
SearchError::SerDeError(..) => StatusCode::BAD_REQUEST,
SearchError::IntParsingError(..) => StatusCode::BAD_REQUEST,
}
}
fn error_response(&self) -> HttpResponse {
HttpResponse::build(self.status_code()).json(ApiError {
error: match self {
SearchError::EnvError(..) => "environment_error",
SearchError::IndexDBError(..) => "indexdb_error",
SearchError::SerDeError(..) => "invalid_input",
SearchError::IntParsingError(..) => "invalid_input",
},
description: &self.to_string(),
})
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SearchMod {
pub mod_id: i32,
@ -57,38 +74,24 @@ impl Document for SearchMod {
}
pub fn search_for_mod(info: &SearchRequest) -> Result<Vec<SearchMod>, SearchError> {
use std::borrow::Cow;
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
let search_query: &str;
let mut filters = String::new();
let mut offset = 0;
let mut index = "relevance";
let filters: Cow<_> = match (info.filters.as_deref(), info.version.as_deref()) {
(Some(f), Some(v)) => format!("({}) AND ({})", f, v).into(),
(Some(f), None) => f.into(),
(None, Some(v)) => v.into(),
(None, None) => "".into(),
};
match info.query.as_ref() {
Some(q) => search_query = q,
None => search_query = "{}{}{}",
}
if let Some(f) = info.filters.as_ref() {
filters = f.clone();
}
if let Some(v) = info.version.as_ref() {
if filters.is_empty() {
filters = v.clone();
} else {
filters = format!("({}) AND ({})", filters, v);
}
}
if let Some(o) = info.offset.as_ref() {
offset = o.parse().unwrap();
}
if let Some(s) = info.index.as_ref() {
index = s;
}
let offset = info.offset.as_deref().unwrap_or("0").parse()?;
let index = info.index.as_deref().unwrap_or("relevance");
let search_query: &str = info
.query
.as_deref()
.filter(|s| !s.is_empty())
.unwrap_or("{}{}{}");
let mut query = Query::new(search_query).with_limit(10).with_offset(offset);
@ -98,8 +101,8 @@ pub fn search_for_mod(info: &SearchRequest) -> Result<Vec<SearchMod>, SearchErro
Ok(client
.get_index(format!("{}_mods", index).as_ref())
.unwrap()
.map_err(SearchError::IndexDBError)?
.search::<SearchMod>(&query)
.unwrap()
.map_err(SearchError::IndexDBError)?
.hits)
}