feat(migration): Added automatic migration (#54)

This commit is contained in:
Redblueflame 2020-08-28 17:48:01 +02:00 committed by GitHub
parent 2b1ed49e9a
commit 38b7d9724e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 123 additions and 4 deletions

3
.env
View File

@ -3,11 +3,10 @@ RUST_LOG=info,sqlx::query=warn
CDN_URL=https://cdn.modrinth.com
DATABASE_URL=postgresql://labrinth@localhost/labrinth
DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth
MEILISEARCH_ADDR=http://localhost:7700
BIND_ADDR=127.0.0.1:8000
MOCK_FILE_PATH=/tmp/modrinth
BACKBLAZE_ENABLED=false

16
Cargo.lock generated
View File

@ -468,6 +468,12 @@ dependencies = [
"libc",
]
[[package]]
name = "build_const"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39092a32794787acd8525ee150305ff051b0aa6cc2abaf193924f5ab05425f39"
[[package]]
name = "bumpalo"
version = "3.4.0"
@ -568,6 +574,15 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec6763c20301ab0dc67051d1b6f4cc9132ad9e6eddcb1f10c6c53ea6d6ae2183"
[[package]]
name = "crc"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d663548de7f5cca343f1e0a48d14dcfb0e9eb4e079ec58883b7251539fa10aeb"
dependencies = [
"build_const",
]
[[package]]
name = "crc32fast"
version = "1.2.0"
@ -1974,6 +1989,7 @@ dependencies = [
"byteorder",
"bytes",
"chrono",
"crc",
"crossbeam-channel",
"crossbeam-queue",
"crossbeam-utils",

View File

@ -45,7 +45,7 @@ sha1 = {version="0.6.0", features=["std"]}
git = "https://github.com/launchbadge/sqlx/"
branch = "master"
default-features = false
features = ["runtime-actix", "postgres", "chrono", "offline", "macros"]
features = ["runtime-actix", "postgres", "chrono", "offline", "macros", "migrate"]
[dependencies.sqlx-macros]
git = "https://github.com/launchbadge/sqlx/"

60
build.rs Normal file
View File

@ -0,0 +1,60 @@
use std::fs;
use std::path::{Path, PathBuf};
fn main() {
let dir = std::env::var("OUT_DIR").unwrap();
let mut target = PathBuf::from(dir);
target.pop();
target.pop();
target.pop();
target.push("migrations");
println!("Output: {}", target.to_str().unwrap());
copy("migrations", target).unwrap();
}
pub fn copy<U: AsRef<Path>, V: AsRef<Path>>(from: U, to: V) -> Result<(), std::io::Error> {
let mut stack = Vec::new();
stack.push(PathBuf::from(from.as_ref()));
let output_root = PathBuf::from(to.as_ref());
let input_root = PathBuf::from(from.as_ref()).components().count();
while let Some(working_path) = stack.pop() {
println!("process: {:?}", &working_path);
// Generate a relative path
let src: PathBuf = working_path.components().skip(input_root).collect();
// Create a destination if missing
let dest = if src.components().count() == 0 {
output_root.clone()
} else {
output_root.join(&src)
};
if fs::metadata(&dest).is_err() {
println!(" mkdir: {:?}", dest);
fs::create_dir_all(&dest)?;
}
for entry in fs::read_dir(working_path)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else {
match path.file_name() {
Some(filename) => {
let dest_path = dest.join(filename);
println!(" copy: {:?} -> {:?}", &path, &dest_path);
fs::copy(&path, &dest_path)?;
}
None => {
println!("failed: {:?}", path);
}
}
}
}
}
Ok(())
}

View File

@ -9,6 +9,7 @@ services:
environment:
POSTGRES_DB: postgres
POSTGRES_USER: labrinth
POSTGRES_PASSWORD: labrinth
POSTGRES_HOST_AUTH_METHOD: trust
meilisearch:
image: getmeili/meilisearch

View File

@ -4,3 +4,4 @@ mod postgres_database;
pub use models::Mod;
pub use models::Version;
pub use postgres_database::connect;
pub use postgres_database::check_for_migrations;

View File

@ -1,5 +1,10 @@
use log::info;
use log::{info, debug};
use sqlx::postgres::{PgPool, PgPoolOptions};
use sqlx::migrate::{Migrator, Migrate, MigrateDatabase};
use std::path::Path;
use sqlx::{PgConnection, Connection, Postgres};
const MIGRATION_FOLDER: &'static str = "migrations";
pub async fn connect() -> Result<PgPool, sqlx::Error> {
info!("Initializing database connection");
@ -12,3 +17,38 @@ pub async fn connect() -> Result<PgPool, sqlx::Error> {
Ok(pool)
}
pub async fn check_for_migrations() -> Result<(), sqlx::Error> {
let uri = &*dotenv::var("DATABASE_URL").expect("`DATABASE_URL` not in .env");
if !Postgres::database_exists(uri).await? {
info!("Creating database...");
Postgres::create_database(uri).await?;
}
info!("Applying migrations...");
run_migrations(uri).await?;
Ok(())
}
pub async fn run_migrations(uri: &str) -> Result<(), sqlx::Error> {
let migrator = Migrator::new(Path::new(MIGRATION_FOLDER)).await?;
let mut conn : PgConnection = PgConnection::connect(uri).await?;
conn.ensure_migrations_table().await?;
let (version, dirty) = conn.version().await?.unwrap_or((0, false));
if dirty {
panic!("The database is dirty ! Please check your database status.");
}
for migration in migrator.iter() {
if migration.version > version {
let elapsed = conn.apply(migration).await?;
} else {
conn.validate(migration).await?;
}
}
Ok(())
}

View File

@ -37,6 +37,8 @@ async fn main() -> std::io::Result<()> {
check_env_vars();
database::check_for_migrations().await.expect("An error occurred while running migrations.");
// Database Connector
let pool = database::connect()
.await