From 4274a8ed6840129fb0caf1e28f9c5da02d97d34b Mon Sep 17 00:00:00 2001 From: Geometrically <18202329+Geometrically@users.noreply.github.com> Date: Fri, 28 Jun 2024 15:44:17 -0700 Subject: [PATCH] Fix forge install issues (#18) * Fix forge install issues * remove mac garb --- daedalus/Cargo.toml | 2 +- daedalus_client/Cargo.toml | 2 +- daedalus_client/src/fabric.rs | 34 +++++-- daedalus_client/src/forge.rs | 68 +++++++++----- daedalus_client/src/main.rs | 111 +++++++++++++++-------- daedalus_client/src/util.rs | 161 +++------------------------------- 6 files changed, 163 insertions(+), 215 deletions(-) diff --git a/daedalus/Cargo.toml b/daedalus/Cargo.toml index 1acc525..82e1610 100644 --- a/daedalus/Cargo.toml +++ b/daedalus/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "daedalus" -version = "0.2.0" +version = "0.2.1" authors = ["Jai A "] edition = "2021" license = "MIT" diff --git a/daedalus_client/Cargo.toml b/daedalus_client/Cargo.toml index ce8204f..c0bdbd2 100644 --- a/daedalus_client/Cargo.toml +++ b/daedalus_client/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "daedalus_client" -version = "0.2.0" +version = "0.2.1" authors = ["Jai A "] edition = "2021" diff --git a/daedalus_client/src/fabric.rs b/daedalus_client/src/fabric.rs index 8d1d943..a7e352b 100644 --- a/daedalus_client/src/fabric.rs +++ b/daedalus_client/src/fabric.rs @@ -17,6 +17,7 @@ pub async fn fetch_fabric( "fabric", "https://meta.fabricmc.net/v2", "https://maven.fabricmc.net/", + &[], semaphore, upload_files, mirror_artifacts, @@ -34,7 +35,11 @@ pub async fn fetch_quilt( daedalus::modded::CURRENT_QUILT_FORMAT_VERSION, "quilt", "https://meta.quiltmc.org/v3", - "https://meta.quiltmc.org/", + "https://maven.quiltmc.org/repository/release/", + &[ + // This version is broken as it contains invalid library coordinates + "0.17.5-beta.4", + ], semaphore, upload_files, mirror_artifacts, @@ -48,6 +53,7 @@ async fn fetch( mod_loader: &str, meta_url: &str, maven_url: &str, + skip_versions: &[&str], semaphore: Arc, upload_files: &DashMap, mirror_artifacts: &DashMap, @@ -76,6 +82,7 @@ async fn fetch( .game_versions .iter() .any(|x| x.loaders.iter().any(|x| x.id == version.version)) + && !skip_versions.contains(&&*version.version) { fetch_versions.push(version); } @@ -98,7 +105,11 @@ async fn fetch( (fetch_versions, fetch_intermediary_versions) } else { ( - fabric_manifest.loader.iter().collect(), + fabric_manifest + .loader + .iter() + .filter(|x| !skip_versions.contains(&&*x.version)) + .collect(), fabric_manifest.intermediary.iter().collect(), ) }; @@ -109,7 +120,9 @@ async fn fetch( for x in &fetch_intermediary_versions { insert_mirrored_artifact( &x.maven, - maven_url.to_string(), + None, + vec![maven_url.to_string()], + false, mirror_artifacts, )?; } @@ -142,13 +155,24 @@ async fn fetch( let new_name = lib .name .replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING); + + // Hard-code: This library is not present on fabric's maven, so we fetch it from MC libraries + if &*lib.name == "net.minecraft:launchwrapper:1.12" { + lib.url = Some( + "https://libraries.minecraft.net/".to_string(), + ); + } + // If a library is not intermediary, we add it to mirror artifacts to be mirrored if lib.name == new_name { insert_mirrored_artifact( &new_name, - lib.url + None, + vec![lib + .url .clone() - .unwrap_or_else(|| maven_url.to_string()), + .unwrap_or_else(|| maven_url.to_string())], + false, mirror_artifacts, )?; } else { diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge.rs index 99892f2..09c8e0a 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge.rs @@ -1,4 +1,6 @@ -use crate::util::{download_file, fetch_json, fetch_xml, format_url}; +use crate::util::{ + download_file, fetch_json, fetch_xml, format_url, sha1_async, +}; use crate::{insert_mirrored_artifact, Error, MirrorArtifact, UploadFile}; use chrono::{DateTime, Utc}; use daedalus::get_path_from_artifact; @@ -246,6 +248,7 @@ async fn fetch( raw: bytes::Bytes, loader: &ForgeVersion, maven_url: &str, + mod_loader: &str, upload_files: &DashMap, mirror_artifacts: &DashMap, ) -> Result { @@ -399,15 +402,27 @@ async fn fetch( .into_iter() .map(|mut lib| { // For all libraries besides the forge lib extracted, we mirror them from maven servers - if lib.name != install_profile.install.path { - // TODO: add mirrors "https://maven.creeperhost.net/", "https://libraries.minecraft.net/" - insert_mirrored_artifact( - &lib.name, - lib.url.clone().unwrap_or_else(|| { - maven_url.to_string() - }), - mirror_artifacts, - )?; + // unless the URL is empty/null or available on Minecraft's servers + if let Some(url) = lib.url { + if lib.name != install_profile.install.path + && !url.is_empty() + && !url.contains( + "https://libraries.minecraft.net/", + ) + { + insert_mirrored_artifact( + &lib.name, + None, + vec![ + url, + "https://maven.creeperhost.net/" + .to_string(), + maven_url.to_string(), + ], + false, + mirror_artifacts, + )?; + } } lib.url = Some(format_url("maven/")); @@ -468,6 +483,7 @@ async fn fetch( async fn mirror_forge_library( mut zip: ZipFileReader, mut lib: daedalus::minecraft::Library, + maven_url: &str, upload_files: &DashMap, mirror_artifacts: &DashMap, ) -> Result @@ -480,7 +496,9 @@ async fn fetch( if !artifact.url.is_empty() { insert_mirrored_artifact( &lib.name, - artifact.url.clone(), + Some(artifact.sha1.clone()), + vec![artifact.url.clone()], + true, mirror_artifacts, )?; @@ -491,10 +509,18 @@ async fn fetch( } } else if let Some(url) = &lib.url { if !url.is_empty() { - // TODO: add mirrors "https://maven.creeperhost.net/", "https://libraries.minecraft.net/" insert_mirrored_artifact( &lib.name, - url.clone(), + None, + vec![ + url.clone(), + "https://libraries.minecraft.net/" + .to_string(), + "https://maven.creeperhost.net/" + .to_string(), + maven_url.to_string(), + ], + false, mirror_artifacts, )?; @@ -531,6 +557,7 @@ async fn fetch( mirror_forge_library( zip.clone(), lib, + maven_url, upload_files, mirror_artifacts, ) @@ -560,7 +587,7 @@ async fn fetch( value: &str, upload_files: &DashMap, libs: &mut Vec, - install_profile_path: Option<&str>, + mod_loader: &str, version: &ForgeVersion, ) -> Result { let extract_file = @@ -595,11 +622,9 @@ async fn fetch( })?; let path = format!( - "{}:{}@{}", - install_profile_path.unwrap_or(&*format!( - "net.minecraftforge:forge:{}", - version.raw - )), + "com.modrinth.daedalus:{}-installer-extracts:{}:{}@{}", + mod_loader, + version.raw, file_name, ext ); @@ -634,7 +659,7 @@ async fn fetch( &entry.client, upload_files, &mut version_info.libraries, - install_profile.path.as_deref(), + mod_loader, loader, ) .await? @@ -649,7 +674,7 @@ async fn fetch( &entry.server, upload_files, &mut version_info.libraries, - install_profile.path.as_deref(), + mod_loader, loader, ) .await? @@ -686,6 +711,7 @@ async fn fetch( raw, loader, maven_url, + mod_loader, upload_files, mirror_artifacts, ) diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index 33df709..8e934cb 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -72,46 +72,63 @@ async fn main() -> Result<()> { futures::future::try_join_all(mirror_artifacts.iter().map(|x| { upload_url_to_bucket_mirrors( format!("maven/{}", x.key()), - x.value().mirrors.iter().map(|x| x.key().clone()).collect(), + x.value() + .mirrors + .iter() + .map(|mirror| { + if mirror.entire_url { + mirror.path.clone() + } else { + format!("{}{}", mirror.path, x.key()) + } + }) + .collect(), + x.sha1.clone(), &semaphore, ) })) .await?; - if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") { - if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") { - let cache_clears = upload_files - .into_iter() - .map(|x| format_url(&x.0)) - .chain( - mirror_artifacts - .into_iter() - .map(|x| format_url(&format!("maven/{}", x.0))), - ) - .collect::>(); - - // Cloudflare ratelimits cache clears to 500 files per request - for chunk in cache_clears.chunks(500) { - REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache")) - .bearer_auth(&token) - .json(&serde_json::json!({ + if dotenvy::var("CLOUDFLARE_INTEGRATION") + .ok() + .and_then(|x| x.parse::().ok()) + .unwrap_or(false) + { + if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") { + if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") { + let cache_clears = upload_files + .into_iter() + .map(|x| format_url(&x.0)) + .chain( + mirror_artifacts + .into_iter() + .map(|x| format_url(&format!("maven/{}", x.0))), + ) + .collect::>(); + + // Cloudflare ratelimits cache clears to 500 files per request + for chunk in cache_clears.chunks(500) { + REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache")) + .bearer_auth(&token) + .json(&serde_json::json!({ "files": chunk })) - .send() - .await - .map_err(|err| { - ErrorKind::Fetch { - inner: err, - item: "cloudflare clear cache".to_string(), - } - })? - .error_for_status() - .map_err(|err| { - ErrorKind::Fetch { - inner: err, - item: "cloudflare clear cache".to_string(), - } - })?; + .send() + .await + .map_err(|err| { + ErrorKind::Fetch { + inner: err, + item: "cloudflare clear cache".to_string(), + } + })? + .error_for_status() + .map_err(|err| { + ErrorKind::Fetch { + inner: err, + item: "cloudflare clear cache".to_string(), + } + })?; + } } } } @@ -125,21 +142,37 @@ pub struct UploadFile { } pub struct MirrorArtifact { - pub mirrors: DashSet, + pub sha1: Option, + pub mirrors: DashSet, } +#[derive(Eq, PartialEq, Hash)] +pub struct Mirror { + path: String, + entire_url: bool, +} + +#[tracing::instrument(skip(mirror_artifacts))] pub fn insert_mirrored_artifact( artifact: &str, - mirror: String, + sha1: Option, + mirrors: Vec, + entire_url: bool, mirror_artifacts: &DashMap, ) -> Result<()> { - mirror_artifacts + let mut val = mirror_artifacts .entry(get_path_from_artifact(artifact)?) .or_insert(MirrorArtifact { + sha1, mirrors: DashSet::new(), - }) - .mirrors - .insert(mirror); + }); + + for mirror in mirrors { + val.mirrors.insert(Mirror { + path: mirror, + entire_url, + }); + } Ok(()) } diff --git a/daedalus_client/src/util.rs b/daedalus_client/src/util.rs index 0b571f0..72ff255 100644 --- a/daedalus_client/src/util.rs +++ b/daedalus_client/src/util.rs @@ -1,6 +1,5 @@ use crate::{Error, ErrorKind}; -use bytes::{Bytes, BytesMut}; -use futures::StreamExt; +use bytes::Bytes; use s3::creds::Credentials; use s3::{Bucket, Region}; use serde::de::DeserializeOwned; @@ -95,8 +94,9 @@ pub async fn upload_file_to_bucket( } pub async fn upload_url_to_bucket_mirrors( - base: String, + upload_path: String, mirrors: Vec, + sha1: Option, semaphore: &Arc, ) -> Result<(), Error> { if mirrors.is_empty() { @@ -108,8 +108,9 @@ pub async fn upload_url_to_bucket_mirrors( for (index, mirror) in mirrors.iter().enumerate() { let result = upload_url_to_bucket( - &base, - &format!("{}{}", mirror, base), + upload_path.clone(), + mirror.clone(), + sha1.clone(), semaphore, ) .await; @@ -124,152 +125,16 @@ pub async fn upload_url_to_bucket_mirrors( #[tracing::instrument(skip(semaphore))] pub async fn upload_url_to_bucket( - path: &str, - url: &str, + path: String, + url: String, + sha1: Option, semaphore: &Arc, ) -> Result<(), Error> { - let _permit = semaphore.acquire().await?; - - const RETRIES: i32 = 3; - for attempt in 1..=(RETRIES + 1) { - tracing::trace!("Attempting streaming file upload, attempt {attempt}"); - - let result: Result<(), Error> = { - let response = - REQWEST_CLIENT.get(url).send().await.map_err(|err| { - ErrorKind::Fetch { - inner: err, - item: url.to_string(), - } - })?; - - let content_type = response - .headers() - .get(reqwest::header::CONTENT_TYPE) - .and_then(|ct| ct.to_str().ok()) - .unwrap_or("application/octet-stream") - .to_string(); - - let total_size = response.content_length().unwrap_or(0); - - const MIN_PART_SIZE: usize = 5 * 1024 * 1024; - - if total_size < MIN_PART_SIZE as u64 { - let data = - response.bytes().await.map_err(|err| ErrorKind::Fetch { - inner: err, - item: url.to_string(), - })?; - BUCKET.put_object(&path, &data).await.map_err(|err| { - ErrorKind::S3 { - inner: err, - file: path.to_string(), - } - })?; - } else { - let mut stream = response.bytes_stream(); - - let multipart = BUCKET - .initiate_multipart_upload(path, &content_type) - .await - .map_err(|err| ErrorKind::S3 { - inner: err, - file: path.to_string(), - })?; - - let mut parts = Vec::new(); - let mut buffer = BytesMut::new(); - - async fn upload_part( - parts: &mut Vec, - buffer: Vec, - path: &str, - upload_id: &str, - content_type: &str, - ) -> Result<(), Error> { - let part = BUCKET - .put_multipart_chunk( - buffer, - path, - (parts.len() + 1) as u32, - upload_id, - content_type, - ) - .await - .map_err(|err| ErrorKind::S3 { - inner: err, - file: path.to_string(), - })?; - - parts.push(part); + let data = download_file(&url, sha1.as_deref(), semaphore).await?; - Ok(()) - } - - while let Some(chunk) = stream.next().await { - let chunk = chunk.map_err(|err| ErrorKind::Fetch { - inner: err, - item: url.to_string(), - })?; - - buffer.extend_from_slice(&chunk); - - if buffer.len() >= MIN_PART_SIZE { - upload_part( - &mut parts, - buffer.to_vec(), - path, - &multipart.upload_id, - &content_type, - ) - .await?; - buffer.clear(); - } - } - - if !buffer.is_empty() { - let part = BUCKET - .put_multipart_chunk( - buffer.to_vec(), - path, - (parts.len() + 1) as u32, - &multipart.upload_id, - &content_type, - ) - .await - .map_err(|err| ErrorKind::S3 { - inner: err, - file: path.to_string(), - })?; - - parts.push(part); - } - - BUCKET - .complete_multipart_upload( - path, - &multipart.upload_id, - parts, - ) - .await - .map_err(|err| ErrorKind::S3 { - inner: err, - file: path.to_string(), - })?; - } - - Ok(()) - }; + upload_file_to_bucket(path, data, None, semaphore).await?; - match result { - Ok(_) => return Ok(()), - Err(_) if attempt <= RETRIES => continue, - Err(_) => { - result?; - } - } - } - unreachable!() + Ok(()) } #[tracing::instrument(skip(bytes))] @@ -294,7 +159,7 @@ pub async fn download_file( const RETRIES: u32 = 10; for attempt in 1..=(RETRIES + 1) { let result = REQWEST_CLIENT - .get(url) + .get(&url.replace("http://", "https://")) .send() .await .and_then(|x| x.error_for_status());