Fixes in forge universal lib + other things

This commit is contained in:
Jai A 2021-11-02 19:59:10 -07:00
parent e8057a5c8a
commit fb16f25b07
No known key found for this signature in database
GPG Key ID: 2AA7E66D6ACA2201
7 changed files with 148 additions and 58 deletions

3
.env
View File

@ -10,4 +10,5 @@ S3_REGION=none
S3_BUCKET_NAME=none
DO_INTEGRATION=false
DO_ACCESS_KEY=none
DO_ACCESS_KEY=none
DO_ENDPOINT_ID=none

View File

@ -11,12 +11,12 @@ jobs:
docker:
runs-on: ubuntu-latest
steps:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Checkout
uses: actions/checkout@v2
- name: Fetch docker metadata
uses: docker/metadata-action@v3
with:
images: ghcr.io/modrinth/daedalus
-
name: Login to GitHub Images
uses: docker/login-action@v1
@ -30,4 +30,5 @@ jobs:
uses: docker/build-push-action@v2
with:
push: ${{ github.event_name != 'pull_request' }}
tags: ghcr.io/modrinth/daedalus:latest
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}

View File

@ -82,7 +82,7 @@ pub fn merge_partial_version(partial: PartialVersionInfo, merge: VersionInfo) ->
asset_index: merge.asset_index,
assets: merge.assets,
downloads: merge.downloads,
id: merge.id,
id: partial.id,
libraries: partial
.libraries
.into_iter()

View File

@ -2,13 +2,13 @@ use crate::{format_url, upload_file_to_bucket, Error};
use daedalus::download_file;
use daedalus::minecraft::Library;
use daedalus::modded::{LoaderType, LoaderVersion, Manifest, PartialVersionInfo, Version};
use futures::lock::Mutex;
use tokio::sync::Mutex;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
use std::time::{Duration, Instant};
pub async fn retrieve_data() -> Result<(), Error> {
pub async fn retrieve_data(uploaded_files: &mut Vec<String>) -> Result<(), Error> {
let mut list = fetch_fabric_versions(None).await?;
let old_manifest = daedalus::modded::fetch_manifest(&*format!(
"fabric/v{}/manifest.json",
@ -23,6 +23,8 @@ pub async fn retrieve_data() -> Result<(), Error> {
Vec::new()
}));
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
if let Some(latest) = list.loader.get(0) {
let loaders_mutex = Arc::new(Mutex::new(HashMap::new()));
let visited_artifacts_mutex = Arc::new(Mutex::new(Vec::new()));
@ -50,6 +52,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
for game_version in list.game.iter_mut() {
let visited_artifacts_mutex = Arc::clone(&visited_artifacts_mutex);
let loaders_mutex = Arc::clone(&loaders_mutex);
let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex);
let versions_mutex = Arc::clone(&versions);
version_futures.push(async move {
@ -119,8 +122,9 @@ pub async fn retrieve_data() -> Result<(), Error> {
format!("{}/{}", "maven", artifact_path),
artifact.to_vec(),
Some("application/java-archive".to_string()),
uploaded_files_mutex.as_ref(),
)
.await?;
.await?;
Ok::<Library, Error>(lib)
},
@ -131,9 +135,11 @@ pub async fn retrieve_data() -> Result<(), Error> {
"fabric/v{}/versions/{}-{}.json",
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
version.inherits_from,
loader
&loader
);
let inherits_from = version.inherits_from.clone();
upload_file_to_bucket(
version_path.clone(),
serde_json::to_vec(&PartialVersionInfo {
@ -146,21 +152,25 @@ pub async fn retrieve_data() -> Result<(), Error> {
inherits_from: version.inherits_from,
libraries: libs,
processors: None,
data: None
data: None,
})?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref(),
)
.await?;
{
let mut loader_version_map = loader_version_mutex.lock().await;
loader_version_map.insert(
type_,
LoaderVersion {
id: loader,
url: format_url(&*version_path),
},
);
async move {
loader_version_map.insert(
type_,
LoaderVersion {
id: format!("{}-{}", inherits_from, loader),
url: format_url(&*version_path),
},
);
}
.await;
}
Ok::<(), Error>(())
@ -190,7 +200,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
chunk_index += 1;
let elapsed = now.elapsed();
println!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
info!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
}
}
@ -204,10 +214,15 @@ pub async fn retrieve_data() -> Result<(), Error> {
game_versions: versions.into_inner(),
})?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref(),
)
.await?;
}
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
uploaded_files.extend(uploaded_files_mutex.into_inner());
}
Ok(())
}

View File

@ -2,7 +2,9 @@ use crate::{format_url, upload_file_to_bucket, Error};
use chrono::{DateTime, Utc};
use daedalus::download_file;
use daedalus::minecraft::{Argument, ArgumentType, Library, VersionType};
use daedalus::modded::{LoaderType, LoaderVersion, Manifest, PartialVersionInfo, Processor, SidedDataEntry};
use daedalus::modded::{
LoaderType, LoaderVersion, Manifest, PartialVersionInfo, Processor, SidedDataEntry,
};
use lazy_static::lazy_static;
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
@ -11,20 +13,19 @@ use std::io::Read;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::Mutex;
use log::info;
lazy_static! {
static ref FORGE_MANIFEST_V1_QUERY: VersionReq =
VersionReq::parse(">=8.0.684, <23.5.2851").unwrap();
static ref FORGE_MANIFEST_V2_QUERY_P1: VersionReq =
VersionReq::parse(">=23.5.2851, <31.2.52").unwrap();
static ref FORGE_MANIFEST_V2_QUERY_P2: VersionReq =
VersionReq::parse(">=32.0.1, <37.0.0").unwrap();
static ref FORGE_MANIFEST_V3_QUERY: VersionReq = VersionReq::parse(">=37.0.0").unwrap();
}
pub async fn retrieve_data() -> Result<(), Error> {
pub async fn retrieve_data(uploaded_files: &mut Vec<String>) -> Result<(), Error> {
let maven_metadata = fetch_maven_metadata(None).await?;
let old_manifest = daedalus::modded::fetch_manifest(&*format_url(&*format!(
"forge/v{}/manifest.json",
@ -40,6 +41,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
}));
let visited_assets_mutex = Arc::new(Mutex::new(Vec::new()));
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
let mut version_futures = Vec::new();
@ -66,7 +68,11 @@ pub async fn retrieve_data() -> Result<(), Error> {
let version = Version::parse(&*loader_version)?;
if FORGE_MANIFEST_V1_QUERY.matches(&version) || FORGE_MANIFEST_V2_QUERY_P1.matches(&version) || FORGE_MANIFEST_V2_QUERY_P2.matches(&version) || FORGE_MANIFEST_V3_QUERY.matches(&version) {
if FORGE_MANIFEST_V1_QUERY.matches(&version)
|| FORGE_MANIFEST_V2_QUERY_P1.matches(&version)
|| FORGE_MANIFEST_V2_QUERY_P2.matches(&version)
|| FORGE_MANIFEST_V3_QUERY.matches(&version)
{
loaders.push((loader_version_full, version))
}
}
@ -76,6 +82,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
version_futures.push(async {
let versions_mutex = Arc::clone(&versions);
let visited_assets = Arc::clone(&visited_assets_mutex);
let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex);
async move {
{
if versions_mutex.lock().await.iter().any(|x| {
@ -89,7 +96,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
}
}
println!("installer start {}", loader_version_full.clone());
info!("Forge - Installer Start {}", loader_version_full.clone());
let bytes = download_file(&*format!("https://maven.minecraftforge.net/net/minecraftforge/forge/{0}/forge-{0}-installer.jar", loader_version_full), None).await?;
let reader = std::io::Cursor::new(&*bytes);
@ -112,7 +119,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
bytes::Bytes::from(forge_universal)
};
let forge_universal_path = profile.install.file_path.clone();
let forge_universal_path = profile.install.path.clone();
let now = Instant::now();
let libs = futures::future::try_join_all(profile.version_info.libraries.into_iter().map(|mut lib| async {
@ -151,6 +158,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
format!("{}/{}", "maven", artifact_path),
artifact.to_vec(),
Some("application/java-archive".to_string()),
uploaded_files_mutex.as_ref(),
).await?;
}
@ -158,7 +166,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
})).await?;
let elapsed = now.elapsed();
println!("Elapsed lib DL: {:.2?}", elapsed);
info!("Elapsed lib DL: {:.2?}", elapsed);
let new_profile = PartialVersionInfo {
id: profile.version_info.id,
@ -183,6 +191,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
version_path.clone(),
serde_json::to_vec(&new_profile)?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref()
).await?;
let mut map = HashMap::new();
@ -243,7 +252,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
let artifact_path =
daedalus::get_path_from_artifact(&*lib.name)?;
let artifact_bytes = if &*artifact.url == "" {
let artifact_bytes = if artifact.url.is_empty() {
forge_universal_bytes.clone().unwrap_or_default()
} else {
daedalus::download_file(
@ -259,6 +268,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
format!("{}/{}", "maven", artifact_path),
artifact_bytes.to_vec(),
Some("application/java-archive".to_string()),
uploaded_files_mutex.as_ref()
).await?;
}
}
@ -267,7 +277,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
})).await?;
let elapsed = now.elapsed();
println!("Elapsed lib DL: {:.2?}", elapsed);
info!("Elapsed lib DL: {:.2?}", elapsed);
let new_profile = PartialVersionInfo {
id: version_info.id,
@ -292,6 +302,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
version_path.clone(),
serde_json::to_vec(&new_profile)?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref()
).await?;
let mut map = HashMap::new();
@ -318,7 +329,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
let mut versions_peek = version_futures.into_iter().peekable();
let mut chunk_index = 0;
while versions_peek.peek().is_some() {
println!("Chunk {} Start", chunk_index);
info!("Chunk {} Start", chunk_index);
let now = Instant::now();
let chunk: Vec<_> = versions_peek.by_ref().take(100).collect();
@ -329,7 +340,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
chunk_index += 1;
let elapsed = now.elapsed();
println!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
info!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
}
}
@ -343,10 +354,15 @@ pub async fn retrieve_data() -> Result<(), Error> {
game_versions: versions.into_inner(),
})?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref()
)
.await?;
}
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
uploaded_files.extend(uploaded_files_mutex.into_inner());
}
Ok(())
}
@ -415,4 +431,4 @@ struct ForgeInstallerProfileV2 {
pub data: HashMap<String, SidedDataEntry>,
pub libraries: Vec<Library>,
pub processors: Vec<Processor>,
}
}

View File

@ -47,15 +47,22 @@ async fn main() {
loop {
timer.tick().await;
tokio::spawn(async {
match fabric::retrieve_data().await {
let mut uploaded_files = Vec::new();
match fabric::retrieve_data(&mut uploaded_files).await {
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
match minecraft::retrieve_data().await {
match minecraft::retrieve_data(&mut uploaded_files).await {
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
match forge::retrieve_data().await {
match forge::retrieve_data(&mut uploaded_files).await {
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
match purge_digitalocean_cache(uploaded_files).await {
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
@ -97,10 +104,12 @@ fn check_env_vars() -> bool {
let do_integration = dotenv::var("DO_INTEGRATION")
.ok()
.map(|x| x.parse::<bool>().ok())
.flatten();
.flatten()
.unwrap_or(false);
if do_integration.unwrap_or(false) {
failed |= check_var::<bool>("DO_ACCESS_KEY");
if do_integration {
failed |= check_var::<String>("DO_ACCESS_KEY");
failed |= check_var::<String>("DO_ENDPOINT_ID");
}
failed
@ -126,11 +135,14 @@ pub async fn upload_file_to_bucket(
path: String,
bytes: Vec<u8>,
content_type: Option<String>,
uploaded_files: &tokio::sync::Mutex<Vec<String>>,
) -> Result<(), Error> {
let key = format!("{}/{}", &*dotenv::var("BASE_FOLDER").unwrap(), path);
CLIENT
.put_object(PutObjectRequest {
bucket: dotenv::var("S3_BUCKET_NAME").unwrap(),
key: format!("{}/{}", &*dotenv::var("BASE_FOLDER").unwrap(), path),
key: key.clone(),
body: Some(bytes.into()),
acl: Some("public-read".to_string()),
content_type,
@ -142,6 +154,11 @@ pub async fn upload_file_to_bucket(
file: format!("{}/{}", &*dotenv::var("BASE_FOLDER").unwrap(), path),
})?;
{
let mut uploaded_files = uploaded_files.lock().await;
uploaded_files.push(key);
}
Ok(())
}
@ -153,3 +170,35 @@ pub fn format_url(path: &str) -> String {
path
)
}
#[derive(serde::Serialize)]
struct PurgeCacheRequest {
pub files: Vec<String>,
}
pub async fn purge_digitalocean_cache(files: Vec<String>) -> Result<(), Error> {
if !dotenv::var("DO_INTEGRATION")
.ok()
.map(|x| x.parse::<bool>().ok())
.flatten()
.unwrap_or(false) {
return Ok(())
}
let client = reqwest::Client::new();
client
.delete(&format!(
"https://api.digitalocean.com/v2/cdn/endpoints/{}/cache",
&*dotenv::var("DO_ENDPOINT_ID").unwrap()
))
.header("Authorization", &*format!("Bearer {}", &*dotenv::var("DO_ACCESS_KEY").unwrap()))
.json(&PurgeCacheRequest { files })
.send().await.map_err(|err| Error::FetchError {
inner: err,
item: "purging digital ocean cache".to_string()
})?;
Ok(())
}

View File

@ -1,10 +1,10 @@
use crate::{format_url, upload_file_to_bucket, Error};
use daedalus::download_file;
use futures::lock::Mutex;
use tokio::sync::Mutex;
use std::sync::Arc;
use std::time::{Duration, Instant};
pub async fn retrieve_data() -> Result<(), Error> {
pub async fn retrieve_data(uploaded_files: &mut Vec<String>) -> Result<(), Error> {
let old_manifest =
daedalus::minecraft::fetch_version_manifest(Some(&*crate::format_url(&*format!(
"minecraft/v{}/manifest.json",
@ -17,6 +17,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
let cloned_manifest = Arc::new(Mutex::new(manifest.clone()));
let visited_assets_mutex = Arc::new(Mutex::new(Vec::new()));
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
let now = Instant::now();
@ -38,6 +39,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
let visited_assets_mutex = Arc::clone(&visited_assets_mutex);
let cloned_manifest_mutex = Arc::clone(&cloned_manifest);
let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex);
let assets_hash = old_version.map(|x| x.assets_index_sha1.clone()).flatten();
@ -104,6 +106,7 @@ pub async fn retrieve_data() -> Result<(), Error> {
assets_path,
assets_index.to_vec(),
Some("application/json".to_string()),
uploaded_files_mutex.as_ref()
));
}
}
@ -113,13 +116,11 @@ pub async fn retrieve_data() -> Result<(), Error> {
version_path,
serde_json::to_vec(&version_info)?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref()
));
}
let now = Instant::now();
futures::future::try_join_all(upload_futures).await?;
let elapsed = now.elapsed();
println!("Spaces Upload {} Elapsed: {:.2?}", version.id, elapsed);
Ok::<(), Error>(())
}
@ -129,20 +130,22 @@ pub async fn retrieve_data() -> Result<(), Error> {
})
}
let mut versions = version_futures.into_iter().peekable();
let mut chunk_index = 0;
while versions.peek().is_some() {
let now = Instant::now();
{
let mut versions = version_futures.into_iter().peekable();
let mut chunk_index = 0;
while versions.peek().is_some() {
let now = Instant::now();
let chunk: Vec<_> = versions.by_ref().take(100).collect();
futures::future::try_join_all(chunk).await?;
let chunk: Vec<_> = versions.by_ref().take(100).collect();
futures::future::try_join_all(chunk).await?;
tokio::time::sleep(Duration::from_secs(1)).await;
tokio::time::sleep(Duration::from_secs(1)).await;
chunk_index += 1;
chunk_index += 1;
let elapsed = now.elapsed();
println!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
let elapsed = now.elapsed();
info!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
}
}
upload_file_to_bucket(
@ -152,11 +155,16 @@ pub async fn retrieve_data() -> Result<(), Error> {
),
serde_json::to_vec(&*cloned_manifest.lock().await)?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref()
)
.await?;
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
uploaded_files.extend(uploaded_files_mutex.into_inner());
}
let elapsed = now.elapsed();
println!("Elapsed: {:.2?}", elapsed);
info!("Elapsed: {:.2?}", elapsed);
Ok(())
}