From a44087457ec571f475a8192946b3a7441331dffd Mon Sep 17 00:00:00 2001 From: Davide Date: Mon, 13 Oct 2025 22:08:13 +0200 Subject: [PATCH 1/7] big big refactor --- .env.example | 100 ++- Cargo.toml | 1 + daedalus/Cargo.toml | 2 +- daedalus/src/lib.rs | 41 +- daedalus/src/modded.rs | 16 +- daedalus/src/version.rs | 356 ++++++++++ daedalus/tests/version_comparison.rs | 346 ++++++++++ daedalus_client/Cargo.toml | 10 +- daedalus_client/README.md | 299 +++++++++ daedalus_client/src/fabric.rs | 402 +----------- daedalus_client/src/forge.rs | 457 +++++++++---- .../src/infrastructure/circuit_breaker.rs | 222 +++++++ daedalus_client/src/infrastructure/error.rs | 243 +++++++ daedalus_client/src/infrastructure/mod.rs | 2 + daedalus_client/src/loaders/fabric.rs | 83 +++ daedalus_client/src/loaders/mod.rs | 532 +++++++++++++++ daedalus_client/src/loaders/quilt.rs | 85 +++ daedalus_client/src/main.rs | 610 +++++++++++++----- daedalus_client/src/minecraft.rs | 278 ++++---- daedalus_client/src/neoforge.rs | 289 ++++++--- daedalus_client/src/quilt.rs | 400 +----------- daedalus_client/src/services/betterstack.rs | 216 +++++++ daedalus_client/src/services/cas.rs | 468 ++++++++++++++ daedalus_client/src/services/cloudflare.rs | 130 ++++ daedalus_client/src/services/download.rs | 61 ++ daedalus_client/src/services/mod.rs | 5 + daedalus_client/src/services/upload.rs | 327 ++++++++++ rust-toolchain.toml | 2 + 28 files changed, 4706 insertions(+), 1277 deletions(-) create mode 100644 daedalus/src/version.rs create mode 100644 daedalus/tests/version_comparison.rs create mode 100644 daedalus_client/README.md create mode 100644 daedalus_client/src/infrastructure/circuit_breaker.rs create mode 100644 daedalus_client/src/infrastructure/error.rs create mode 100644 daedalus_client/src/infrastructure/mod.rs create mode 100644 daedalus_client/src/loaders/fabric.rs create mode 100644 daedalus_client/src/loaders/mod.rs create mode 100644 daedalus_client/src/loaders/quilt.rs create mode 100644 daedalus_client/src/services/betterstack.rs create mode 100644 daedalus_client/src/services/cas.rs create mode 100644 daedalus_client/src/services/cloudflare.rs create mode 100644 daedalus_client/src/services/download.rs create mode 100644 daedalus_client/src/services/mod.rs create mode 100644 daedalus_client/src/services/upload.rs create mode 100644 rust-toolchain.toml diff --git a/.env.example b/.env.example index d1e512c..90a1753 100644 --- a/.env.example +++ b/.env.example @@ -1,11 +1,89 @@ -RUST_LOG=info -BASE_URL=YOUR_BASE_URL -S3_ACCESS_TOKEN=null -S3_SECRET=null -S3_URL=null -S3_REGION=null -S3_BUCKET_NAME=null -BRAND_NAME=your-brand-name -SUPPORT_EMAIL=support-email -CDN_UPLOAD_DIR=./upload_cdn -SENTRY_DSN=null \ No newline at end of file +# ============================================================================= +# Daedalus Client Environment Configuration +# ============================================================================= +# Copy this file to .env and fill in your actual values +# Required variables must be set for the application to run + +# ============================================================================= +# REQUIRED CONFIGURATION +# ============================================================================= + +# Base URL for CAS objects (public CDN URL where metadata will be served) +BASE_URL=https://cdn.example.com + +# Sentry error tracking DSN (get from https://sentry.io) +SENTRY_DSN=https://key@sentry.io/project + +# Brand name for metadata files +BRAND_NAME=MyLauncher + +# Support email for metadata +SUPPORT_EMAIL=support@example.com + +# ============================================================================= +# S3 STORAGE CONFIGURATION +# ============================================================================= + +# S3 bucket name where metadata will be stored +S3_BUCKET_NAME=minecraft-metadata + +# S3 region (use "r2" for Cloudflare R2) +# Examples: us-east-1, eu-west-1, ap-southeast-1, r2 +S3_REGION=us-east-1 + +# S3 endpoint URL +# AWS S3: https://s3.amazonaws.com +# Cloudflare R2: https://.r2.cloudflarestorage.com +S3_URL=https://s3.amazonaws.com + +# S3 access key ID +S3_ACCESS_TOKEN=token + +# S3 secret access key +S3_SECRET=secret + +# ============================================================================= +# OPTIONAL: LOGGING CONFIGURATION +# ============================================================================= + +# Log output format: "text" or "json" +# Default: text +# LOG_FORMAT=text + +# Rust log level filter +# Options: trace, debug, info, warn, error +# Default: info +# RUST_LOG=info + +# Betterstack logging token for centralized log management +# Get from https://betterstack.com +# BETTERSTACK_TOKEN=your-betterstack-token + +# ============================================================================= +# OPTIONAL: CLOUDFLARE INTEGRATION +# ============================================================================= + +# Enable Cloudflare cache purging on updates +# Default: false +# CLOUDFLARE_INTEGRATION=true + +# Cloudflare API token (required if CLOUDFLARE_INTEGRATION=true) +# Create at: https://dash.cloudflare.com/profile/api-tokens +# Required permissions: Zone.Cache Purge +# CLOUDFLARE_TOKEN=your-cloudflare-token + +# Cloudflare zone ID (required if CLOUDFLARE_INTEGRATION=true) +# Find in: Zone Overview > API section +# CLOUDFLARE_ZONE_ID=your-zone-id + +# ============================================================================= +# OPTIONAL: ADVANCED CONFIGURATION +# ============================================================================= + +# Local directory for CDN file uploads +# Default: ./upload_cdn +# CDN_UPLOAD_DIR=./upload_cdn + +# Force reprocessing of all NeoForge versions (useful for debugging) +# Default: false +# FORCE_REPROCESS=false diff --git a/Cargo.toml b/Cargo.toml index dc360c2..1c6e233 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,4 +1,5 @@ [workspace] +resolver = "2" members = [ "daedalus", diff --git a/daedalus/Cargo.toml b/daedalus/Cargo.toml index 2b3c05b..22aeb63 100644 --- a/daedalus/Cargo.toml +++ b/daedalus/Cargo.toml @@ -2,7 +2,7 @@ name = "daedalus" version = "0.1.21" authors = ["Jai A "] -edition = "2021" +edition = "2024" license = "MIT" description = "Utilities for querying and parsing Minecraft metadata" repository = "https://github.com/modrinth/daedalus/" diff --git a/daedalus/src/lib.rs b/daedalus/src/lib.rs index f044825..b516761 100644 --- a/daedalus/src/lib.rs +++ b/daedalus/src/lib.rs @@ -6,7 +6,7 @@ use std::{ cmp::Ordering, convert::TryFrom, fmt::Display, path::PathBuf, str::FromStr, - time::Duration, + sync::LazyLock, time::Duration, }; use backon::{ExponentialBuilder, Retryable}; @@ -17,6 +17,8 @@ use serde::{Deserialize, Serialize}; pub mod minecraft; /// Models and methods for fetching metadata for Minecraft mod loaders pub mod modded; +/// Custom version comparison for Minecraft versions +pub mod version; /// Your branding, used for the user agent and similar #[derive(Debug)] @@ -30,6 +32,25 @@ pub struct Branding { /// The branding of your application pub static BRANDING: OnceCell = OnceCell::new(); +/// Global HTTP client with connection pooling and TCP keepalive +static HTTP_CLIENT: LazyLock = LazyLock::new(|| { + let mut headers = reqwest::header::HeaderMap::new(); + if let Ok(header) = reqwest::header::HeaderValue::from_str( + &BRANDING.get_or_init(Branding::default).header_value, + ) { + headers.insert(reqwest::header::USER_AGENT, header); + } + + reqwest::Client::builder() + .tcp_keepalive(Some(Duration::from_secs(10))) + .timeout(Duration::from_secs(120)) + .connect_timeout(Duration::from_secs(30)) + .default_headers(headers) + .pool_max_idle_per_host(10) + .build() + .expect("Failed to create HTTP client") +}); + impl Branding { /// Creates a new branding instance pub fn new(name: String, email: String) -> Branding { @@ -366,24 +387,8 @@ pub async fn download_file( url: &str, sha1: Option<&str>, ) -> Result { - let mut headers = reqwest::header::HeaderMap::new(); - if let Ok(header) = reqwest::header::HeaderValue::from_str( - &BRANDING.get_or_init(Branding::default).header_value, - ) { - headers.insert(reqwest::header::USER_AGENT, header); - } - let client = reqwest::Client::builder() - .tcp_keepalive(Some(std::time::Duration::from_secs(10))) - .timeout(std::time::Duration::from_secs(15)) - .default_headers(headers) - .build() - .map_err(|err| Error::FetchError { - inner: err, - item: url.to_string(), - })?; - (|| async { - let result = client.get(url).send().await; + let result = HTTP_CLIENT.get(url).send().await; match result { Ok(x) => { diff --git a/daedalus/src/modded.rs b/daedalus/src/modded.rs index 9e40d06..e880aa2 100644 --- a/daedalus/src/modded.rs +++ b/daedalus/src/modded.rs @@ -4,22 +4,13 @@ use crate::minecraft::{ Argument, ArgumentType, Library, LoggingConfig, LoggingConfigName, VersionInfo, VersionType, }; -use chrono::{DateTime, TimeZone, Utc}; +use chrono::{DateTime, Utc}; use serde::{Deserialize, Deserializer, Serialize}; use std::collections::HashMap; #[cfg(feature = "bincode")] use bincode::{Decode, Encode}; -/// The latest version of the format the fabric model structs deserialize to -pub const CURRENT_FABRIC_FORMAT_VERSION: usize = 2; -/// The latest version of the format the fabric model structs deserialize to -pub const CURRENT_FORGE_FORMAT_VERSION: usize = 2; -/// The latest version of the format the quilt model structs deserialize to -pub const CURRENT_QUILT_FORMAT_VERSION: usize = 2; -/// The latest version of the format the neoforge model structs deserialize to -pub const CURRENT_NEOFORGE_FORMAT_VERSION: usize = 2; - /// A data variable entry that depends on the side of the installation #[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] @@ -37,7 +28,10 @@ where let s = String::deserialize(deserializer)?; serde_json::from_str::>(&format!("\"{s}\"")) - .or_else(|_| Utc.datetime_from_str(&s, "%Y-%m-%dT%H:%M:%S%.9f")) + .or_else(|_| { + chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S%.9f") + .map(|dt| dt.and_utc()) + }) .map_err(serde::de::Error::custom) } diff --git a/daedalus/src/version.rs b/daedalus/src/version.rs new file mode 100644 index 0000000..29474e9 --- /dev/null +++ b/daedalus/src/version.rs @@ -0,0 +1,356 @@ +//! Minecraft version comparison utilities +//! +//! This module provides custom version comparison logic that handles all +//! Minecraft version formats correctly, including: +//! - Snapshots (YYwWWx format like 23w9a, 23w10a) +//! - Pre-releases (X.Y.Z-preN, X.Y.Z-rcN) +//! - Old format (X.Y.Z_preN) +//! - Forge/NeoForge versions (X.Y.Z-A.B.C.D) +//! - Regular releases (X.Y.Z) +//! +//! The standard `lenient_semver` crate fails on snapshot versions because +//! it uses lexicographic comparison which incorrectly orders "23w9a" > "23w10a" +//! (since '9' > '1' in ASCII). + +use std::cmp::Ordering; + +/// Parsed Minecraft version format +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum MinecraftVersion { + /// Snapshot version (YYwWWx format) + /// Example: 23w10a = year 23, week 10, revision a + Snapshot { + year: u32, + week: u32, + revision: String, + }, + /// Regular release version + /// Example: 1.20.4 + Release { + major: u32, + minor: u32, + patch: u32, + prerelease: Option, + build: Option>, + }, +} + +/// Pre-release format +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Prerelease { + /// Pre-release (e.g., "pre1", "pre2") + Pre(u32), + /// Release candidate (e.g., "rc1", "rc2") + Rc(u32), + /// Other pre-release format (e.g., "alpha", "beta") + Other(String), +} + +impl MinecraftVersion { + /// Parse a Minecraft version string + pub fn parse(version: &str) -> Result { + // Try snapshot format first (YYwWWx) + if let Some(snapshot) = Self::try_parse_snapshot(version) { + return Ok(snapshot); + } + + // Try release/pre-release format + Self::parse_release(version) + } + + /// Try to parse as snapshot (YYwWWx format) + fn try_parse_snapshot(version: &str) -> Option { + // Check for 'w' character which is unique to snapshots + if !version.contains('w') { + return None; + } + + // Split on 'w' + let parts: Vec<&str> = version.split('w').collect(); + if parts.len() != 2 { + return None; + } + + // Parse year (before 'w') + let year = parts[0].parse::().ok()?; + + // Parse week and revision (after 'w') + // Week can be 1-2 digits, revision is everything after + let week_and_rev = parts[1]; + + // Find where the numeric week ends + let week_end = week_and_rev + .chars() + .position(|c| !c.is_ascii_digit()) + .unwrap_or(week_and_rev.len()); + + let week = week_and_rev[..week_end].parse::().ok()?; + let revision = week_and_rev[week_end..].to_string(); + + Some(MinecraftVersion::Snapshot { + year, + week, + revision, + }) + } + + /// Parse as release version + fn parse_release(version: &str) -> Result { + // Normalize: replace underscore with hyphen for old format compatibility + let normalized = version.replace('_', "-"); + + // Split on '-' to separate version from prerelease/build + let parts: Vec<&str> = normalized.split('-').collect(); + + // Parse base version (X.Y.Z or X.Y) + let version_parts: Vec<&str> = parts[0].split('.').collect(); + + let major = version_parts + .first() + .and_then(|s| s.parse::().ok()) + .ok_or_else(|| format!("Invalid major version in '{}'", version))?; + + let minor = version_parts + .get(1) + .and_then(|s| s.parse::().ok()) + .unwrap_or(0); + + let patch = version_parts + .get(2) + .and_then(|s| s.parse::().ok()) + .unwrap_or(0); + + // Parse pre-release if present (e.g., "pre1", "rc2") + let mut prerelease = None; + let mut build = None; + + if parts.len() > 1 { + // Check if it's a pre-release + let pre_str = parts[1]; + prerelease = Self::parse_prerelease(pre_str); + + // If not a prerelease, try parsing as build metadata (Forge format) + if prerelease.is_none() { + // Try parsing as numeric build (e.g., "14.23.5.2859" in Forge) + let build_parts: Vec = parts[1..] + .iter() + .flat_map(|s| s.split('.')) + .filter_map(|s| s.parse::().ok()) + .collect(); + + if !build_parts.is_empty() { + build = Some(build_parts); + } + } + } + + Ok(MinecraftVersion::Release { + major, + minor, + patch, + prerelease, + build, + }) + } + + /// Parse pre-release identifier + fn parse_prerelease(s: &str) -> Option { + if let Some(stripped) = s.strip_prefix("pre") { + return stripped.parse::().ok().map(Prerelease::Pre); + } + + if let Some(stripped) = s.strip_prefix("rc") { + return stripped.parse::().ok().map(Prerelease::Rc); + } + + // Other pre-release formats (alpha, beta, etc.) + if !s.chars().all(|c| c.is_ascii_digit() || c == '.') { + return Some(Prerelease::Other(s.to_string())); + } + + None + } +} + +impl PartialOrd for MinecraftVersion { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for MinecraftVersion { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + // Snapshot vs Snapshot + ( + MinecraftVersion::Snapshot { year: y1, week: w1, revision: r1 }, + MinecraftVersion::Snapshot { year: y2, week: w2, revision: r2 }, + ) => { + // Compare year first + match y1.cmp(y2) { + Ordering::Equal => { + // Then week + match w1.cmp(w2) { + Ordering::Equal => { + // Finally revision (lexicographic) + r1.cmp(r2) + } + other => other, + } + } + other => other, + } + } + + // Release vs Release + ( + MinecraftVersion::Release { major: maj1, minor: min1, patch: p1, prerelease: pre1, build: b1 }, + MinecraftVersion::Release { major: maj2, minor: min2, patch: p2, prerelease: pre2, build: b2 }, + ) => { + // Compare major.minor.patch + match maj1.cmp(maj2) { + Ordering::Equal => match min1.cmp(min2) { + Ordering::Equal => match p1.cmp(p2) { + Ordering::Equal => { + // Compare pre-release + match (pre1, pre2) { + (None, None) => { + // Compare build metadata if both are releases + compare_builds(b1, b2) + } + (Some(_), None) => Ordering::Less, // Pre-release < release + (None, Some(_)) => Ordering::Greater, // Release > pre-release + (Some(p1), Some(p2)) => compare_prereleases(p1, p2), + } + } + other => other, + }, + other => other, + }, + other => other, + } + } + + // Snapshot vs Release: snapshots are generally "development" versions + // We treat them as lexicographically greater for now + // (This is a heuristic and may need refinement based on usage) + (MinecraftVersion::Snapshot { .. }, MinecraftVersion::Release { .. }) => { + Ordering::Greater + } + (MinecraftVersion::Release { .. }, MinecraftVersion::Snapshot { .. }) => { + Ordering::Less + } + } + } +} + +/// Compare build metadata (Forge versions) +fn compare_builds(b1: &Option>, b2: &Option>) -> Ordering { + match (b1, b2) { + (None, None) => Ordering::Equal, + (Some(_), None) => Ordering::Greater, + (None, Some(_)) => Ordering::Less, + (Some(v1), Some(v2)) => { + // Compare element by element + for (x, y) in v1.iter().zip(v2.iter()) { + match x.cmp(y) { + Ordering::Equal => continue, + other => return other, + } + } + // If all equal so far, longer version is greater + v1.len().cmp(&v2.len()) + } + } +} + +/// Compare pre-release identifiers +fn compare_prereleases(p1: &Prerelease, p2: &Prerelease) -> Ordering { + match (p1, p2) { + (Prerelease::Pre(n1), Prerelease::Pre(n2)) => n1.cmp(n2), + (Prerelease::Rc(n1), Prerelease::Rc(n2)) => n1.cmp(n2), + (Prerelease::Pre(_), Prerelease::Rc(_)) => Ordering::Less, // pre < rc + (Prerelease::Rc(_), Prerelease::Pre(_)) => Ordering::Greater, // rc > pre + (Prerelease::Other(s1), Prerelease::Other(s2)) => s1.cmp(s2), + (Prerelease::Other(_), _) => Ordering::Less, // other < specific + (_, Prerelease::Other(_)) => Ordering::Greater, // specific > other + } +} + +/// Convenience function for comparing two version strings +pub fn compare_versions(v1: &str, v2: &str) -> Result { + let ver1 = MinecraftVersion::parse(v1)?; + let ver2 = MinecraftVersion::parse(v2)?; + Ok(ver1.cmp(&ver2)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_snapshot() { + let v = MinecraftVersion::parse("23w10a").unwrap(); + assert!(matches!(v, MinecraftVersion::Snapshot { year: 23, week: 10, .. })); + + let v = MinecraftVersion::parse("20w14infinite").unwrap(); + assert!(matches!(v, MinecraftVersion::Snapshot { year: 20, week: 14, .. })); + } + + #[test] + fn test_parse_release() { + let v = MinecraftVersion::parse("1.20.4").unwrap(); + assert!(matches!(v, MinecraftVersion::Release { major: 1, minor: 20, patch: 4, .. })); + } + + #[test] + fn test_parse_prerelease() { + let v = MinecraftVersion::parse("1.20.4-pre1").unwrap(); + if let MinecraftVersion::Release { ref prerelease, .. } = v { + assert!(matches!(prerelease, Some(Prerelease::Pre(1)))); + } else { + panic!("Expected Release variant"); + } + } + + #[test] + fn test_snapshot_ordering() { + let v1 = MinecraftVersion::parse("23w9a").unwrap(); + let v2 = MinecraftVersion::parse("23w10a").unwrap(); + assert!(v1 < v2, "23w9a should be less than 23w10a"); + } + + #[test] + fn test_release_ordering() { + let v1 = MinecraftVersion::parse("1.19.4").unwrap(); + let v2 = MinecraftVersion::parse("1.20.0").unwrap(); + assert!(v1 < v2); + } + + #[test] + fn test_prerelease_ordering() { + let release = MinecraftVersion::parse("1.20.4").unwrap(); + let pre = MinecraftVersion::parse("1.20.4-pre1").unwrap(); + assert!(pre < release, "pre-release should be less than release"); + } + + #[test] + fn test_old_format() { + let v = MinecraftVersion::parse("1.7.10_pre4").unwrap(); + if let MinecraftVersion::Release { ref prerelease, .. } = v { + assert!(matches!(prerelease, Some(Prerelease::Pre(4)))); + } else { + panic!("Expected Release variant"); + } + + let release = MinecraftVersion::parse("1.7.10").unwrap(); + assert!(v < release, "1.7.10_pre4 should be less than 1.7.10"); + } + + #[test] + fn test_forge_versions() { + let v1 = MinecraftVersion::parse("1.12.2-14.23.5.2851").unwrap(); + let v2 = MinecraftVersion::parse("1.12.2-14.23.5.2859").unwrap(); + assert!(v1 < v2, "Build 2851 should be less than 2859"); + } +} diff --git a/daedalus/tests/version_comparison.rs b/daedalus/tests/version_comparison.rs new file mode 100644 index 0000000..5d8dc7d --- /dev/null +++ b/daedalus/tests/version_comparison.rs @@ -0,0 +1,346 @@ +/// Comprehensive version comparison tests for Minecraft versions +/// +/// This test suite validates that lenient_semver correctly handles all edge cases +/// in Minecraft version formats, including: +/// - Snapshots (e.g., 23w9a, 20w14infinite) +/// - Pre-releases (e.g., 1.20.4-pre1, 1.20.4-rc1) +/// - Forge versions (e.g., 1.12.2-14.23.5.2859) +/// - Old formats (e.g., 1.7.10_pre4) +/// - April Fools versions (e.g., 20w14infinite, 23w13a_or_b) +/// +/// These tests are critical for ensuring correct version ordering in the +/// daedalus_client when processing Minecraft and mod loader metadata. + +#[cfg(test)] +mod tests { + use daedalus::version::{MinecraftVersion, compare_versions}; + use std::cmp::Ordering; + + /// Test basic semantic version comparison + #[test] + fn test_basic_semver() { + // Basic version ordering + assert!(MinecraftVersion::parse("1.0.0").unwrap() < MinecraftVersion::parse("1.0.1").unwrap()); + assert!(MinecraftVersion::parse("1.0.1").unwrap() < MinecraftVersion::parse("1.1.0").unwrap()); + assert!(MinecraftVersion::parse("1.1.0").unwrap() < MinecraftVersion::parse("2.0.0").unwrap()); + + // Equality + assert_eq!(MinecraftVersion::parse("1.20.4").unwrap(), MinecraftVersion::parse("1.20.4").unwrap()); + + // Reverse ordering + assert!(MinecraftVersion::parse("2.0.0").unwrap() > MinecraftVersion::parse("1.20.4").unwrap()); + } + + /// Test snapshot version comparison (YYwWWx format) + /// + /// Snapshots use format: YYwWWx where: + /// - YY = year (two digits) + /// - WW = week number (two digits) + /// - x = optional letter revision (a, b, c, etc.) + #[test] + fn test_snapshot_versions() { + // Same year, different weeks + assert!( + MinecraftVersion::parse("23w9a").unwrap() < MinecraftVersion::parse("23w10a").unwrap(), + "23w9a should be less than 23w10a (week 9 < week 10)" + ); + + // Same week, different revisions + assert!( + MinecraftVersion::parse("23w10a").unwrap() < MinecraftVersion::parse("23w10b").unwrap(), + "23w10a should be less than 23w10b (revision a < b)" + ); + + // Different years + assert!( + MinecraftVersion::parse("22w50a").unwrap() < MinecraftVersion::parse("23w01a").unwrap(), + "22w50a should be less than 23w01a (year 22 < 23)" + ); + + // Complex comparison: older year with higher week + assert!( + MinecraftVersion::parse("22w51a").unwrap() < MinecraftVersion::parse("23w10a").unwrap(), + "22w51a should be less than 23w10a (year takes precedence)" + ); + } + + /// Test pre-release and release candidate versions + /// + /// Pre-releases use formats: + /// - X.Y.Z-preN (pre-release) + /// - X.Y.Z-rcN (release candidate) + #[test] + fn test_prerelease_versions() { + let release = MinecraftVersion::parse("1.20.4").unwrap(); + let pre1 = MinecraftVersion::parse("1.20.4-pre1").unwrap(); + let pre2 = MinecraftVersion::parse("1.20.4-pre2").unwrap(); + let rc1 = MinecraftVersion::parse("1.20.4-rc1").unwrap(); + + // Pre-releases should be less than release + assert!( + pre1 < release, + "1.20.4-pre1 should be less than 1.20.4" + ); + + assert!( + pre2 < release, + "1.20.4-pre2 should be less than 1.20.4" + ); + + assert!( + rc1 < release, + "1.20.4-rc1 should be less than 1.20.4" + ); + + // Pre-release ordering + assert!( + pre1 < pre2, + "1.20.4-pre1 should be less than 1.20.4-pre2" + ); + + // RC typically comes after pre-releases in semver + // But ordering depends on lenient_semver's lexicographic handling + // Document the actual behavior rather than assume semver rules + if rc1 < pre1 { + println!("Note: lenient_semver orders rc before pre (lexicographic)"); + } else { + println!("Note: lenient_semver orders rc after pre (semver-like)"); + } + } + + /// Test Forge version formats + /// + /// Forge versions use format: X.Y.Z-A.B.C.D where: + /// - X.Y.Z = Minecraft version + /// - A.B.C.D = Forge build number + #[test] + fn test_forge_versions() { + // Same Minecraft version, different Forge builds + assert!( + MinecraftVersion::parse("1.12.2-14.23.5.2851").unwrap() < MinecraftVersion::parse("1.12.2-14.23.5.2859").unwrap(), + "1.12.2-14.23.5.2851 should be less than 1.12.2-14.23.5.2859 (build 2851 < 2859)" + ); + + // Different Minecraft versions + assert!( + MinecraftVersion::parse("1.12.2-14.23.5.2859").unwrap() < MinecraftVersion::parse("1.16.5-36.2.39").unwrap(), + "1.12.2 should be less than 1.16.5" + ); + + // Different major Forge versions for same MC version + assert!( + MinecraftVersion::parse("1.12.2-14.23.5.2859").unwrap() < MinecraftVersion::parse("1.12.2-14.23.6.2859").unwrap(), + "14.23.5.2859 should be less than 14.23.6.2859" + ); + } + + /// Test old Minecraft version format (underscore instead of hyphen) + /// + /// Old format: X.Y.Z_preN (used in very old versions) + #[test] + fn test_old_format_versions() { + // Old format with underscore + let old_format = MinecraftVersion::parse("1.7.10_pre4").unwrap(); + let modern_format = MinecraftVersion::parse("1.7.10-pre4").unwrap(); + let release = MinecraftVersion::parse("1.7.10").unwrap(); + + // Both formats should be less than release + assert!( + old_format < release, + "1.7.10_pre4 should be less than 1.7.10" + ); + + // Note: lenient_semver may not treat _ and - identically + // Document the actual behavior + if old_format == modern_format { + println!("Note: lenient_semver treats underscore and hyphen identically"); + } else { + println!("Note: lenient_semver treats underscore and hyphen differently"); + } + } + + /// Test April Fools versions + /// + /// Special versions: + /// - 20w14infinite (Infinite dimensions) + /// - 23w13a_or_b (Vote update) + /// - 2point0_red, 2point0_blue, 2point0_purple (Super Duper Graphics Pack prank) + #[test] + fn test_april_fools_versions() { + // 20w14infinite is a snapshot from 2020 week 14 + let infinite = MinecraftVersion::parse("20w14infinite").unwrap(); + let normal_snapshot = MinecraftVersion::parse("20w14a").unwrap(); + + // April Fools versions should be comparable to regular snapshots + // The suffix "infinite" vs "a" determines ordering (lexicographic) + println!("20w14infinite vs 20w14a ordering:"); + if infinite < normal_snapshot { + println!(" 20w14infinite < 20w14a (lexicographic: 'a' > 'i')"); + } else { + println!(" 20w14infinite > 20w14a (lexicographic: 'i' > 'a')"); + } + + // 23w13a_or_b from 2023 + let vote_update = MinecraftVersion::parse("23w13a_or_b").unwrap(); + let same_week = MinecraftVersion::parse("23w13a").unwrap(); + + println!("23w13a_or_b vs 23w13a ordering:"); + if vote_update < same_week { + println!(" 23w13a_or_b < 23w13a"); + } else { + println!(" 23w13a_or_b > 23w13a"); + } + + // 2point0 variants - These are unparseable April Fools versions + // that don't follow any standard format. Document as unsupported edge case. + println!("2point0 variants (April Fools 2016) are not parseable:"); + assert!(MinecraftVersion::parse("2point0_red").is_err()); + assert!(MinecraftVersion::parse("2point0_blue").is_err()); + assert!(MinecraftVersion::parse("2point0_purple").is_err()); + println!(" Note: These versions don't follow any standard format and are intentionally unsupported"); + } + + /// Test version comparison across different formats + /// + /// This ensures that versions can be compared even when they use + /// different formatting conventions (important for daedalus_client) + #[test] + fn test_mixed_format_comparison() { + // Release vs snapshot + assert!( + MinecraftVersion::parse("1.19.4").unwrap() < MinecraftVersion::parse("1.20.0").unwrap(), + "Release versions should order correctly" + ); + + // Snapshot vs pre-release + let snapshot_1_20 = MinecraftVersion::parse("23w51a").unwrap(); + let pre_1_20_3 = MinecraftVersion::parse("1.20.3-pre1").unwrap(); + + println!("Snapshot vs pre-release comparison:"); + println!(" 23w51a vs 1.20.3-pre1: {:?}", snapshot_1_20.cmp(&pre_1_20_3)); + + // Old format vs new format + let old = MinecraftVersion::parse("1.7.10_pre4").unwrap(); + let new_pre = MinecraftVersion::parse("1.8.0-pre1").unwrap(); + + assert!( + old < new_pre, + "1.7.10_pre4 should be less than 1.8.0-pre1 (major version difference)" + ); + } + + /// Test edge cases and boundary conditions + #[test] + fn test_edge_cases() { + // Single digit versions + assert!( + MinecraftVersion::parse("1.0").unwrap() < MinecraftVersion::parse("1.1").unwrap(), + "Single digit versions should work" + ); + + // Very long version numbers (Forge) + let long_version = "1.12.2-14.23.5.2859"; + let parsed = lenient_semver::parse(long_version); + assert_eq!( + parsed, lenient_semver::parse(long_version), + "Long Forge versions should be parseable and comparable" + ); + + // Versions with many parts + let many_parts = MinecraftVersion::parse("1.16.5-36.2.39.256").unwrap(); + let fewer_parts = MinecraftVersion::parse("1.16.5-36.2.39").unwrap(); + + println!("Version with different part counts:"); + println!(" 1.16.5-36.2.39.256 vs 1.16.5-36.2.39: {:?}", many_parts.cmp(&fewer_parts)); + } + + /// Test version comparison accuracy for known Minecraft release timeline + /// + /// This validates ordering matches the actual Minecraft release history + #[test] + fn test_minecraft_release_timeline() { + // Historical version order (subset of actual timeline) + let versions = vec![ + "1.7.10", + "1.8.0", + "1.8.9", + "1.9.0", + "1.12.2", + "1.16.5", + "1.18.2", + "1.19.4", + "1.20.0", + "1.20.4", + ]; + + // Verify each version is less than the next + for i in 0..versions.len() - 1 { + let current = lenient_semver::parse(versions[i]); + let next = lenient_semver::parse(versions[i + 1]); + + assert!( + current < next, + "{} should be less than {} (historical release order)", + versions[i], + versions[i + 1] + ); + } + } + + /// Test NeoForge version format (similar to modern Forge) + /// + /// NeoForge versions use format: X.Y.Z-A.B.C where: + /// - X.Y.Z = Minecraft version + /// - A.B.C = NeoForge version + #[test] + fn test_neoforge_versions() { + // NeoForge started at 1.20.1 as a Forge fork + assert!( + MinecraftVersion::parse("1.20.1-47.1.0").unwrap() < MinecraftVersion::parse("1.20.1-47.1.3").unwrap(), + "NeoForge patch versions should order correctly" + ); + + assert!( + MinecraftVersion::parse("1.20.1-47.1.0").unwrap() < MinecraftVersion::parse("1.20.4-20.4.80").unwrap(), + "NeoForge versions across MC versions should order correctly" + ); + } + + /// Test Fabric version comparison + /// + /// Fabric versions are typically semantic versions without + /// the Minecraft version prefix + #[test] + fn test_fabric_versions() { + assert!( + MinecraftVersion::parse("0.14.0").unwrap() < MinecraftVersion::parse("0.15.0").unwrap(), + "Fabric minor versions should order correctly" + ); + + assert!( + MinecraftVersion::parse("0.14.21").unwrap() < MinecraftVersion::parse("0.14.22").unwrap(), + "Fabric patch versions should order correctly" + ); + + assert!( + MinecraftVersion::parse("0.15.11").unwrap() < MinecraftVersion::parse("1.0.0").unwrap(), + "Fabric major version bump should order correctly" + ); + } + + /// Test Quilt version comparison (similar to Fabric) + /// + /// Quilt uses semantic versioning + #[test] + fn test_quilt_versions() { + assert!( + MinecraftVersion::parse("0.18.0").unwrap() < MinecraftVersion::parse("0.19.0").unwrap(), + "Quilt minor versions should order correctly" + ); + + assert!( + MinecraftVersion::parse("0.19.0").unwrap() < MinecraftVersion::parse("0.19.1").unwrap(), + "Quilt patch versions should order correctly" + ); + } +} diff --git a/daedalus_client/Cargo.toml b/daedalus_client/Cargo.toml index 59ce617..30fdf3b 100644 --- a/daedalus_client/Cargo.toml +++ b/daedalus_client/Cargo.toml @@ -2,21 +2,22 @@ name = "daedalus_client" version = "0.1.21" authors = ["Jai A "] -edition = "2018" +edition = "2024" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] tracing = "0.1.37" -tracing-subscriber = { version = "0.3.17", features = ["ansi", "env-filter"] } +tracing-subscriber = { version = "0.3.17", features = ["ansi", "env-filter", "json"] } +tracing-error = "0.2" +thiserror = "1" daedalus = { path = "../daedalus" } tokio = { version = "1", features = ["full"] } futures = "0.3.25" dotenvy = "0.15.6" -log = "0.4.17" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" -lazy_static = "1.4.0" +dashmap = "6" anyhow = "1.0" reqwest = { version = "0.11.13", default-features = false, features = [ "json", @@ -30,6 +31,7 @@ chrono = { version = "0.4", features = ["serde"] } bytes = "1.3.0" rust-s3 = "0.33.0" lenient_semver = "0.4.2" +sha2 = "0.10" walkdir = "2.3.3" path-slash = "0.2.1" sentry = "0.32.1" diff --git a/daedalus_client/README.md b/daedalus_client/README.md new file mode 100644 index 0000000..ec2417e --- /dev/null +++ b/daedalus_client/README.md @@ -0,0 +1,299 @@ +# Daedalus Client + +Daedalus Client is a Rust-based metadata processing and distribution system for Minecraft launchers. It fetches, processes, and uploads version metadata for vanilla Minecraft and popular mod loaders (Forge, Fabric, Quilt, NeoForge) using a Content-Addressable Storage (CAS) architecture. + +## Features + +- **Content-Addressable Storage (CAS)**: Files stored by SHA256 hash for deduplication and immutability +- **Unified Versioning**: Single version entrypoint (v3) for all metadata types +- **Multi-Loader Support**: Minecraft, Forge, Fabric, Quilt, and NeoForge +- **S3-Compatible Storage**: Works with AWS S3, Cloudflare R2, and other S3-compatible services +- **Atomic Updates**: Root manifest provides atomic switching between versions +- **Rollback Support**: Historical manifests enable auditing and rollback capabilities +- **Cloudflare Integration**: Optional cache purging on updates +- **Observability**: Structured logging with Sentry error tracking and Betterstack integration + +## Architecture + +The CAS architecture provides several key benefits: + +``` +Root Manifest (v3/manifest.json) + ├─> minecraft manifest (v3/manifests/minecraft/.json) + ├─> forge manifest (v3/manifests/forge/.json) + ├─> fabric manifest (v3/manifests/fabric/.json) + ├─> quilt manifest (v3/manifests/quilt/.json) + └─> neoforge manifest (v3/manifests/neoforge/.json) + +Each loader manifest contains: + ├─> version entries with content hashes + └─> references to v3/objects// +``` + +**Benefits:** +- **Atomic updates**: Single root manifest update makes all changes visible +- **Rollback**: Keep historical manifests, update root to point to previous version +- **Deduplication**: Same content = same hash = stored once +- **Immutability**: Content never changes, only manifest pointers + +## Requirements + +- **Rust**: 1.85 or later (Rust 2024 edition) +- **S3-Compatible Storage**: AWS S3, Cloudflare R2, MinIO, etc. +- **Environment Variables**: See configuration below + +## Installation + +1. Clone the repository: +```bash +git clone +cd daedalus/daedalus_client +``` + +2. Build the project: +```bash +cargo build --release +``` + +3. Create a `.env` file with required configuration (see Environment Variables below) + +## Environment Variables + +### Required Variables + +| Variable | Description | Example | +|----------|-------------|---------| +| `BASE_URL` | Base URL for CAS objects (public CDN URL) | `https://cdn.example.com` | +| `SENTRY_DSN` | Sentry error tracking DSN | `https://key@sentry.io/project` | +| `BRAND_NAME` | Brand name for metadata | `MyLauncher` | +| `SUPPORT_EMAIL` | Support email for metadata | `support@example.com` | +| `S3_BUCKET_NAME` | S3 bucket name | `minecraft-metadata` | +| `S3_REGION` | S3 region (use `r2` for Cloudflare R2) | `us-east-1` or `r2` | +| `S3_URL` | S3 endpoint URL | `https://s3.amazonaws.com` or `https://.r2.cloudflarestorage.com` | +| `S3_ACCESS_TOKEN` | S3 access key ID | `AKIAIOSFODNN7EXAMPLE` | +| `S3_SECRET` | S3 secret access key | `wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY` | + +### Optional Variables + +| Variable | Description | Default | Example | +|----------|-------------|---------|---------| +| `LOG_FORMAT` | Log output format | `text` | `json` or `text` | +| `RUST_LOG` | Rust log level filter | `info` | `debug`, `info`, `warn`, `error` | +| `BETTERSTACK_TOKEN` | Betterstack logging token | None | `your-betterstack-token` | +| `CLOUDFLARE_INTEGRATION` | Enable Cloudflare cache purging | `false` | `true` or `false` | +| `CLOUDFLARE_TOKEN` | Cloudflare API token (required if integration enabled) | None | `your-cloudflare-token` | +| `CLOUDFLARE_ZONE_ID` | Cloudflare zone ID (required if integration enabled) | None | `your-zone-id` | +| `CDN_UPLOAD_DIR` | Local directory for CDN file uploads | `./upload_cdn` | `/path/to/cdn/dir` | +| `FORCE_REPROCESS` | Force reprocessing of all NeoForge versions | `false` | `true` or `false` | + +### Example .env File + +```env +# Required Configuration +BASE_URL=https://cdn.example.com +SENTRY_DSN=https://key@sentry.io/project +BRAND_NAME=MyLauncher +SUPPORT_EMAIL=support@example.com + +# S3 Configuration +S3_BUCKET_NAME=minecraft-metadata +S3_REGION=us-east-1 +S3_URL=https://s3.amazonaws.com +S3_ACCESS_TOKEN=your-access-key +S3_SECRET=your-secret-key + +# Optional: Logging +LOG_FORMAT=json +RUST_LOG=info +BETTERSTACK_TOKEN=your-betterstack-token + +# Optional: Cloudflare Integration +CLOUDFLARE_INTEGRATION=true +CLOUDFLARE_TOKEN=your-cloudflare-token +CLOUDFLARE_ZONE_ID=your-zone-id +``` + +### Example .env File for Cloudflare R2 + +```env +# Required Configuration +BASE_URL=https://pub-abc123.r2.dev +SENTRY_DSN=https://key@sentry.io/project +BRAND_NAME=MyLauncher +SUPPORT_EMAIL=support@example.com + +# Cloudflare R2 Configuration +S3_BUCKET_NAME=minecraft-metadata +S3_REGION=r2 +S3_URL=https://abc123.r2.cloudflarestorage.com +S3_ACCESS_TOKEN=your-r2-access-key +S3_SECRET=your-r2-secret-key + +# Optional: Cloudflare Integration +CLOUDFLARE_INTEGRATION=true +CLOUDFLARE_TOKEN=your-cloudflare-token +CLOUDFLARE_ZONE_ID=your-zone-id +``` + +## Usage + +### Run Full Metadata Processing + +Process all enabled loaders (default: minecraft, forge, fabric, quilt, neoforge): + +```bash +cargo run --release +``` + +### Run Specific Loaders Only + +Use feature flags to enable specific loaders: + +```bash +# Only Minecraft and Fabric +cargo run --release --no-default-features --features fabric + +# Only Forge +cargo run --release --no-default-features --features forge + +# Minecraft, Fabric, and Quilt +cargo run --release --no-default-features --features fabric,quilt +``` + +### Development Mode + +Run with debug logging: + +```bash +RUST_LOG=debug cargo run +``` + +### Force Reprocess + +Force reprocessing of all versions (useful for NeoForge): + +```bash +FORCE_REPROCESS=true cargo run --release +``` + +## Output Structure + +The client generates the following structure in your S3 bucket: + +``` +v3/ +├── manifest.json # Root manifest (atomic pointer) +├── manifests/ +│ ├── minecraft/.json # Minecraft version manifest +│ ├── forge/.json # Forge version manifest +│ ├── fabric/.json # Fabric version manifest +│ ├── quilt/.json # Quilt version manifest +│ └── neoforge/.json # NeoForge version manifest +├── objects/ +│ └── / +│ └── # Content-addressed files +└── history/ + └── manifest-.json # Historical root manifests +``` + +## Testing + +Run the test suite: + +```bash +cargo test --all-features +``` + +Run tests for specific loaders: + +```bash +cargo test --features forge +cargo test --features fabric,quilt +``` + +## Code Quality + +Check code quality with Clippy: + +```bash +cargo clippy --all-features +``` + +## Features + +The following features can be enabled/disabled: + +| Feature | Description | Default | +|---------|-------------|---------| +| `sentry` | Sentry error tracking | ✓ | +| `forge` | Forge loader support | ✓ | +| `fabric` | Fabric loader support | ✓ | +| `quilt` | Quilt loader support | ✓ | +| `neoforge` | NeoForge loader support | ✓ | + +## Performance + +- **Concurrent Processing**: Parallel version processing with configurable semaphore limits +- **Deduplication**: Lock-free artifact deduplication using DashMap +- **Batch Uploads**: Atomic batch uploads to S3 minimize requests +- **Circuit Breaker**: Resilient HTTP requests with automatic retry and backoff + +## Observability + +### Structured Logging + +The client supports both text and JSON logging formats: + +```bash +# Human-readable text logs (default) +LOG_FORMAT=text cargo run + +# JSON logs for log aggregation +LOG_FORMAT=json cargo run +``` + +### Error Tracking + +Sentry integration provides: +- Error capture and aggregation +- Release tracking +- Environment tagging + +### Betterstack Integration + +Optional Betterstack logging for centralized log management: + +```bash +BETTERSTACK_TOKEN=your-token cargo run +``` + +## Troubleshooting + +### S3 Connection Issues + +- Verify `S3_URL`, `S3_ACCESS_TOKEN`, and `S3_SECRET` are correct +- For Cloudflare R2, ensure `S3_REGION=r2` +- Check bucket permissions for read/write access + +### Cloudflare Cache Not Purging + +- Verify `CLOUDFLARE_INTEGRATION=true` +- Check `CLOUDFLARE_TOKEN` has cache purge permissions +- Ensure `CLOUDFLARE_ZONE_ID` matches your domain + +### Missing Versions + +- Check source API availability (meta.fabricmc.net, maven.minecraftforge.net, etc.) +- Review logs for download failures +- Try `FORCE_REPROCESS=true` for NeoForge + +## License + +This project is part of the daedalus ecosystem for Minecraft launcher metadata management. + +## Contributing + +1. Ensure code passes `cargo clippy --all-features` +2. Run `cargo test --all-features` before submitting +3. Follow existing code style and architecture patterns +4. Update documentation for new features or changes diff --git a/daedalus_client/src/fabric.rs b/daedalus_client/src/fabric.rs index 395b438..ecaf60e 100644 --- a/daedalus_client/src/fabric.rs +++ b/daedalus_client/src/fabric.rs @@ -1,391 +1,23 @@ -use crate::{download_file, format_url, upload_file_to_bucket}; -use daedalus::minecraft::{Library, VersionManifest}; -use daedalus::modded::{LoaderVersion, Manifest, PartialVersionInfo, Version}; -use daedalus::{Branding, BRANDING}; -use serde::{Deserialize, Serialize}; +use crate::loaders::fabric::{FabricStrategy, FabricVersions}; +use crate::loaders::LoaderProcessor; +use crate::services::upload::UploadQueue; +use daedalus::minecraft::VersionManifest; use std::sync::Arc; -use tokio::sync::{Mutex, RwLock, Semaphore}; +use tokio::sync::Semaphore; +/// Retrieve Fabric loader data using the strategy pattern +/// +/// This is now a thin wrapper around the generic LoaderProcessor. +/// All the common logic has been extracted to the strategy pattern, +/// eliminating hundreds of lines of duplicated code. pub async fn retrieve_data( minecraft_versions: &VersionManifest, - uploaded_files: &mut Vec, + upload_queue: &UploadQueue, + manifest_builder: &crate::services::cas::ManifestBuilder, semaphore: Arc, -) -> Result<(), anyhow::Error> { - log::info!("Retrieving Fabric data ..."); - - let list = fetch_fabric_versions(None, semaphore.clone()).await?; - - let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!( - "fabric/v{}/manifest.json", - daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION, - ))) - .await - .ok(); - - let mut versions = if let Some(old_manifest) = old_manifest { - old_manifest.game_versions - } else { - Vec::new() - }; - - let loaders_mutex = RwLock::new(Vec::new()); - - { - let mut loaders = loaders_mutex.write().await; - - for (index, loader) in list.loader.iter().enumerate() { - if versions.iter().any(|x| { - x.id == BRANDING - .get_or_init(Branding::default) - .dummy_replace_string - && x.loaders.iter().any(|x| x.id == loader.version) - }) { - if index == 0 { - loaders.push(( - Box::new(loader.stable), - loader.version.clone(), - Box::new(true), - )) - } - } else { - loaders.push(( - Box::new(loader.stable), - loader.version.clone(), - Box::new(false), - )) - } - } - } - - const DUMMY_GAME_VERSION: &str = "1.19.4-rc2"; - - let loader_version_mutex = Mutex::new(Vec::new()); - let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new())); - - let loader_versions = futures::future::try_join_all( - loaders_mutex.read().await.clone().into_iter().map( - |(stable, loader, skip_upload)| async { - let version = fetch_fabric_version( - DUMMY_GAME_VERSION, - &loader, - semaphore.clone(), - ) - .await?; - - Ok::< - (Box, String, PartialVersionInfo, Box), - anyhow::Error, - >((stable, loader, version, skip_upload)) - }, - ), - ) - .await?; - - let visited_artifacts_mutex = Arc::new(Mutex::new(Vec::new())); - futures::future::try_join_all(loader_versions.into_iter() - .map( - |(stable, loader, version, skip_upload)| async { - let libs = futures::future::try_join_all( - version.libraries.into_iter().map(|mut lib| async { - { - let mut visited_assets = - visited_artifacts_mutex.lock().await; - - if visited_assets.contains(&lib.name) { - lib.name = lib.name.to_string().replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string).parse()?; - lib.url = Some(format_url("maven/")); - - return Ok(lib); - } else { - visited_assets.push(lib.name.clone()) - } - } - - let name = lib.name.to_string(); - if name.contains(DUMMY_GAME_VERSION) { - lib.name = name.replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string).parse()?; - futures::future::try_join_all(list.game.clone().into_iter().map(|game_version| async { - let semaphore = semaphore.clone(); - let uploaded_files_mutex = uploaded_files_mutex.clone(); - let lib_name = lib.name.to_string(); - let lib_url = lib.url.clone(); - - async move { - let artifact_path = - daedalus::get_path_from_artifact(&lib_name.replace(&BRANDING - .get_or_init(Branding::default) - .dummy_replace_string, &game_version.version))?; - - let artifact = download_file( - &format!( - "{}{}", - lib_url.unwrap_or_else(|| { - "https://maven.fabricmc.net/".to_string() - }), - artifact_path - ), - None, - semaphore.clone(), - ) - .await?; - - upload_file_to_bucket( - format!("{}/{}", "maven", artifact_path), - artifact.to_vec(), - Some("application/java-archive".to_string()), - &uploaded_files_mutex, - semaphore.clone(), - ) - .await?; - - Ok::<(), anyhow::Error>(()) - }.await?; - - Ok::<(), anyhow::Error>(()) - })).await?; - lib.url = Some(format_url("maven/")); - - return Ok(lib); - } - - let artifact_path = lib.name.path(); - - let artifact = download_file( - &format!( - "{}{}", - lib.url.unwrap_or_else(|| { - "https://maven.fabricmc.net/".to_string() - }), - artifact_path - ), - None, - semaphore.clone(), - ) - .await?; - - lib.url = Some(format_url("maven/")); - - upload_file_to_bucket( - format!("{}/{}", "maven", artifact_path), - artifact.to_vec(), - Some("application/java-archive".to_string()), - &uploaded_files_mutex, - semaphore.clone(), - ) - .await?; - - Ok::(lib) - }), - ) - .await?; - - if async move { - *skip_upload - }.await { - return Ok::<(), anyhow::Error>(()) - } - - let version_path = format!( - "fabric/v{}/versions/{}.json", - daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION, - &loader - ); - - upload_file_to_bucket( - version_path.clone(), - serde_json::to_vec(&PartialVersionInfo { - arguments: version.arguments, - id: version - .id - .replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string), - main_class: version.main_class, - release_time: version.release_time, - time: version.time, - type_: version.type_, - logging: None, - inherits_from: version - .inherits_from - .replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string), - libraries: libs, - minecraft_arguments: version.minecraft_arguments, - processors: None, - data: None, - })?, - Some("application/json".to_string()), - &uploaded_files_mutex, - semaphore.clone(), - ) - .await?; - - { - let mut loader_version_map = loader_version_mutex.lock().await; - async move { - loader_version_map.push(LoaderVersion { - id: loader.to_string(), - url: format_url(&version_path), - stable: *stable, - }); - } - .await; - } - - Ok::<(), anyhow::Error>(()) - }, - )) - .await?; - - let mut loader_version_mutex = loader_version_mutex.into_inner(); - if !loader_version_mutex.is_empty() { - if let Some(version) = versions.iter_mut().find(|x| { - x.id == BRANDING.get_or_init(Branding::default).dummy_replace_string - }) { - version.loaders.append(&mut loader_version_mutex); - } else { - versions.push(Version { - id: BRANDING - .get_or_init(Branding::default) - .dummy_replace_string - .clone(), - stable: true, - loaders: loader_version_mutex, - }); - } - } - - for version in &list.game { - if !versions.iter().any(|x| x.id == version.version) { - versions.push(Version { - id: version.version.clone(), - stable: version.stable, - loaders: vec![], - }); - } - } - - versions.sort_by(|x, y| { - minecraft_versions - .versions - .iter() - .position(|z| x.id == z.id) - .unwrap_or_default() - .cmp( - &minecraft_versions - .versions - .iter() - .position(|z| y.id == z.id) - .unwrap_or_default(), - ) - }); - - for version in &mut versions { - version.loaders.sort_by(|x, y| { - let x_pos = list - .loader - .iter() - .position(|z| x.id == *z.version) - .unwrap_or_default(); - let y_pos = &list - .loader - .iter() - .position(|z| y.id == z.version) - .unwrap_or_default(); - - x_pos.cmp(y_pos) - }) - } - - upload_file_to_bucket( - format!( - "fabric/v{}/manifest.json", - daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION, - ), - serde_json::to_vec(&Manifest { - game_versions: versions, - })?, - Some("application/json".to_string()), - &uploaded_files_mutex, - semaphore, - ) - .await?; - - if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) { - uploaded_files.extend(uploaded_files_mutex.into_inner()); - } - - Ok(()) -} - -const FABRIC_META_URL: &str = "https://meta.fabricmc.net/v2"; - -async fn fetch_fabric_version( - version_number: &str, - loader_version: &str, - semaphore: Arc, -) -> Result { - Ok(serde_json::from_slice( - &download_file( - &format!( - "{}/versions/loader/{}/{}/profile/json", - FABRIC_META_URL, version_number, loader_version - ), - None, - semaphore, - ) - .await?, - )?) -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -/// Versions of fabric components -struct FabricVersions { - /// Versions of Minecraft that fabric supports - pub game: Vec, - /// Available versions of the fabric loader - pub loader: Vec, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -/// A version of Minecraft that fabric supports -struct FabricGameVersion { - /// The version number of the game - pub version: String, - /// Whether the Minecraft version is stable or not - pub stable: bool, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -/// A version of the fabric loader -struct FabricLoaderVersion { - /// The separator to get the build number - pub separator: String, - /// The build number - pub build: u32, - /// The maven artifact - pub maven: String, - /// The version number of the fabric loader - pub version: String, - /// Whether the loader is stable or not - pub stable: bool, -} -/// Fetches the list of fabric versions -async fn fetch_fabric_versions( - url: Option<&str>, - semaphore: Arc, -) -> Result { - Ok(serde_json::from_slice( - &download_file( - url.unwrap_or(&*format!("{}/versions", FABRIC_META_URL)), - None, - semaphore, - ) - .await?, - )?) +) -> Result<(), crate::infrastructure::error::Error> { + let processor = LoaderProcessor::new(FabricStrategy); + processor + .retrieve_data::(minecraft_versions, upload_queue, manifest_builder, semaphore) + .await } diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge.rs index d556ba6..e5dcc2f 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge.rs @@ -1,39 +1,45 @@ use crate::{ - download_file, download_file_mirrors, format_url, upload_file_to_bucket, + download_file, download_file_mirrors, format_url, }; +use crate::services::upload::UploadQueue; use chrono::{DateTime, Utc}; +use dashmap::DashSet; use daedalus::minecraft::{ Argument, ArgumentType, Library, VersionManifest, VersionType, }; use daedalus::modded::{ - LoaderVersion, Manifest, PartialVersionInfo, Processor, SidedDataEntry, + LoaderVersion, PartialVersionInfo, Processor, SidedDataEntry, }; use daedalus::GradleSpecifier; -use lazy_static::lazy_static; -use log::info; +use tracing::{info, warn}; use semver::{Version, VersionReq}; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet}; use std::convert::{TryInto, TryFrom}; use std::io::Read; -use std::sync::Arc; +use std::sync::{Arc, LazyLock}; use std::time::Instant; use tokio::sync::{Mutex, Semaphore}; -lazy_static! { - static ref FORGE_MANIFEST_V1_QUERY: VersionReq = - VersionReq::parse(">=8.0.684, <23.5.2851").unwrap(); - static ref FORGE_MANIFEST_V2_QUERY_P1: VersionReq = - VersionReq::parse(">=23.5.2851, <31.2.52").unwrap(); - static ref FORGE_MANIFEST_V2_QUERY_P2: VersionReq = - VersionReq::parse(">=32.0.1, <37.0.0").unwrap(); - static ref FORGE_MANIFEST_V3_QUERY: VersionReq = - VersionReq::parse(">=37.0.0").unwrap(); -} +static FORGE_MANIFEST_V1_QUERY: LazyLock = LazyLock::new(|| { + VersionReq::parse(">=8.0.684, <23.5.2851").unwrap() +}); + +static FORGE_MANIFEST_V2_QUERY_P1: LazyLock = LazyLock::new(|| { + VersionReq::parse(">=23.5.2851, <31.2.52").unwrap() +}); + +static FORGE_MANIFEST_V2_QUERY_P2: LazyLock = LazyLock::new(|| { + VersionReq::parse(">=32.0.1, <37.0.0").unwrap() +}); + +static FORGE_MANIFEST_V3_QUERY: LazyLock = LazyLock::new(|| { + VersionReq::parse(">=37.0.0").unwrap() +}); pub async fn fetch_generated_version_info( version_id: &str, -) -> Result { +) -> Result { let path = format!( "minecraft/v{}/versions/{}.json", daedalus::minecraft::CURRENT_FORMAT_VERSION, @@ -68,7 +74,7 @@ impl MinecraftVersionLibraryCache { pub async fn load_minecraft_version_libs( &mut self, version_id: &str, - ) -> Result<&HashSet, anyhow::Error> { + ) -> Result<&HashSet, crate::infrastructure::error::Error> { let index = self.versions.iter().position(|ver| ver.id == version_id); if let Some(index) = index { @@ -130,16 +136,17 @@ pub fn should_ignore_artifact( pub async fn retrieve_data( minecraft_versions: &VersionManifest, - uploaded_files: &mut Vec, + upload_queue: &UploadQueue, + manifest_builder: &crate::services::cas::ManifestBuilder, semaphore: Arc, -) -> Result<(), anyhow::Error> { - log::info!("Retrieving Forge data ..."); +) -> Result<(), crate::infrastructure::error::Error> { + info!("Retrieving Forge data ..."); let maven_metadata = fetch_maven_metadata(None, semaphore.clone()).await?; let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!( "forge/v{}/manifest.json", - daedalus::modded::CURRENT_FORGE_FORMAT_VERSION, + crate::services::cas::CAS_VERSION, ))) .await .ok(); @@ -156,8 +163,7 @@ pub async fn retrieve_data( let versions = Arc::new(Mutex::new(Vec::new())); - let visited_assets_mutex = Arc::new(Mutex::new(Vec::new())); - let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new())); + let visited_assets = Arc::new(DashSet::new()); let mut version_futures = Vec::new(); @@ -166,23 +172,18 @@ pub async fn retrieve_data( for loader_version_full in loader_versions { - // Check if this is a snapshot version format - // Example: "25w14craftmine-47.1.0" or "1.21-pre1-47.1.0" let is_snapshot = minecraft_version.contains('w') || minecraft_version.contains("-pre") || minecraft_version.contains("-rc"); if is_snapshot { - log::info!("Skipping snapshot version: {}", loader_version_full); + info!("Skipping snapshot version: {}", loader_version_full); continue; } let loader_version = loader_version_full.split('-').nth(1); if let Some(loader_version_raw) = loader_version { - // This is a dirty hack to get around Forge not complying with SemVer, but whatever - // Most of this is a hack anyways :( - // Works for all forge versions! let split = loader_version_raw.split('.').collect::>(); let loader_version = if split.len() >= 4 { @@ -215,14 +216,13 @@ pub async fn retrieve_data( let loaders_futures = loaders.into_iter().map(|(loader_version_full, version)| async { let mc_library_cache_mutex = Arc::clone(&mc_library_cache_mutex); let versions_mutex = Arc::clone(&old_versions); - let visited_assets = Arc::clone(&visited_assets_mutex); - let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex); + let visited_assets = Arc::clone(&visited_assets); let semaphore = Arc::clone(&semaphore); let minecraft_version = minecraft_version.clone(); async move { /// These forge versions are not worth supporting! - const WHITELIST : &[&str] = &[ + const FORGE_SKIP_LIST : &[&str] = &[ // Not supported due to `data` field being `[]` even though the type is a map "1.12.2-14.23.5.2851", // Malformed Archives @@ -234,7 +234,8 @@ pub async fn retrieve_data( "1.6.4-9.11.1.964", ]; - if WHITELIST.contains(&&*loader_version_full) { + if FORGE_SKIP_LIST.contains(&&*loader_version_full) { + info!("⏭️ Forge - Skipping excluded version: {}", loader_version_full); return Ok(None); } @@ -245,7 +246,7 @@ pub async fn retrieve_data( if let Some(version) = version { info!("Already have Forge {}", loader_version_full.clone()); - return Ok::, anyhow::Error>(Some(version.clone())); + return Ok::, crate::infrastructure::error::Error>(Some(version.clone())); } } @@ -263,7 +264,7 @@ pub async fn retrieve_data( let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - Ok::(serde_json::from_str::(&contents)?) + Ok::(serde_json::from_str::(&contents)?) }).await??; let mut archive_clone = archive.clone(); @@ -274,7 +275,7 @@ pub async fn retrieve_data( forge_universal_file.read_to_end(&mut forge_universal)?; - Ok::(bytes::Bytes::from(forge_universal)) + Ok::(bytes::Bytes::from(forge_universal)) }).await??; let forge_universal_path = profile.install.path.clone(); @@ -287,27 +288,20 @@ pub async fn retrieve_data( let libs = futures::future::try_join_all(profile.version_info.libraries.into_iter().map(|mut lib| async { if lib.name.is_lwjgl() || lib.name.is_log4j() || should_ignore_artifact(&minecraft_libs_filter, &lib.name) { - return Ok::, anyhow::Error>(None); + return Ok::, crate::infrastructure::error::Error>(None); } // let mut repo_url if let Some(url) = lib.url { - { - let mut visited_assets = visited_assets.lock().await; - - if visited_assets.contains(&lib.name) { - lib.url = Some(format_url("maven/")); - - return Ok::, anyhow::Error>(Some(lib)); - } else { - visited_assets.push(lib.name.clone()) - } + // Check if we've already processed this artifact (lock-free) + if !visited_assets.insert(lib.name.clone()) { + // Already processed, skip download + lib.url = Some(format_url("maven/")); + return Ok::, crate::infrastructure::error::Error>(Some(lib)); } let artifact_path = lib.name.path(); - let mirrors = vec![url.as_str(), "https://maven.creeperhost.net/", "https://libraries.minecraft.net/"]; - let artifact = if lib.name.to_string() == forge_universal_path { forge_universal_bytes.clone() } else { @@ -322,19 +316,17 @@ pub async fn retrieve_data( lib.url = Some(format_url("maven/")); - upload_file_to_bucket( + upload_queue.enqueue_path( format!("{}/{}", "maven", artifact_path), artifact.to_vec(), Some("application/java-archive".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - ).await?; + ); } else if lib.downloads.is_none() { lib.url = Some(String::from("https://libraries.minecraft.net/")); } - Ok::, anyhow::Error>(Some(lib)) + Ok::, crate::infrastructure::error::Error>(Some(lib)) })).await?; let elapsed = now.elapsed(); @@ -355,23 +347,33 @@ pub async fn retrieve_data( processors: None }; - let version_path = format!( - "forge/v{}/versions/{}.json", - daedalus::modded::CURRENT_FORGE_FORMAT_VERSION, - loader_version_full + // Upload version to CAS and track in manifest builder + let version_bytes = serde_json::to_vec(&new_profile)?; + let version_hash = upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), ); - upload_file_to_bucket( - version_path.clone(), - serde_json::to_vec(&new_profile)?, - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - ).await?; + manifest_builder.add_version( + "forge", + loader_version_full.to_string(), + version_hash.clone(), + version_bytes.len() as u64, + ); + + // Build CAS URL for LoaderVersion + let base_url = dotenvy::var("BASE_URL").unwrap(); + let cas_url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &version_hash[..2], + &version_hash[2..] + ); return Ok(Some(LoaderVersion { id: loader_version_full, - url: format_url(&version_path), + url: cas_url, stable: false })); } else if FORGE_MANIFEST_V2_QUERY_P1.matches(&version) || FORGE_MANIFEST_V2_QUERY_P2.matches(&version) || FORGE_MANIFEST_V3_QUERY.matches(&version) { @@ -382,7 +384,7 @@ pub async fn retrieve_data( let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - Ok::(serde_json::from_str::(&contents)?) + Ok::(serde_json::from_str::(&contents)?) }).await??; let mut archive_clone = archive.clone(); @@ -392,7 +394,7 @@ pub async fn retrieve_data( let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - Ok::(serde_json::from_str::(&contents)?) + Ok::(serde_json::from_str::(&contents)?) }).await??; @@ -434,14 +436,14 @@ pub async fn retrieve_data( let lib_bytes = tokio::task::spawn_blocking(move || { let entry_name = format!("maven/{}", lib_name_clone.path()); let lib_file = archive_clone.by_name(&entry_name).map_err(|err| { - anyhow::anyhow!("Failed to find entry {} in installer jar: {}", entry_name, err) + crate::infrastructure::error::invalid_input(format!("Failed to find entry {} in installer jar: {}", entry_name, err)) }); // Thank you forge for always making it hard to parse your data // 1.20.4+ has a local lib that doesn't exist in the installer jar // Not sure what it does, but it doesn't seem to be needed if lib_file.is_err() && &*lib_name_clone.artifact == "forge" { - return Ok::<_, anyhow::Error>(None); + return Ok::<_, crate::infrastructure::error::Error>(None); } let mut lib_file = lib_file?; @@ -451,7 +453,7 @@ pub async fn retrieve_data( let result = Some(bytes::Bytes::from(lib_bytes)); - Ok::<_, anyhow::Error>(result) + Ok::<_, crate::infrastructure::error::Error>(result) }).await??; local_libs.insert(lib.name.to_string(), lib_bytes); @@ -475,7 +477,7 @@ pub async fn retrieve_data( let mut lib_bytes = Vec::new(); lib_file.read_to_end(&mut lib_bytes)?; - Ok::(bytes::Bytes::from(lib_bytes)) + Ok::(bytes::Bytes::from(lib_bytes)) }).await??; let split = $value.split('/').last(); @@ -527,22 +529,18 @@ pub async fn retrieve_data( let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| async { let artifact_path = lib.name.path(); - { - let mut visited_assets = visited_assets.lock().await; - - if visited_assets.contains(&lib.name) { - if let Some(ref mut downloads) = lib.downloads { - if let Some(ref mut artifact) = downloads.artifact { - artifact.url = Some(format_url(&format!("maven/{}", artifact_path))); - } - } else if lib.url.is_some() { - lib.url = Some(format_url("maven/")); + // Check if we've already processed this artifact (lock-free) + if !visited_assets.insert(lib.name.clone()) { + // Already processed, skip download + if let Some(ref mut downloads) = lib.downloads { + if let Some(ref mut artifact) = downloads.artifact { + artifact.url = Some(format_url(&format!("maven/{}", artifact_path))); } - - return Ok::, anyhow::Error>(Some(lib)); - } else { - visited_assets.push(lib.name.clone()) + } else if lib.url.is_some() { + lib.url = Some(format_url("maven/")); } + + return Ok::, crate::infrastructure::error::Error>(Some(lib)); } let artifact_bytes = if let Some(ref mut downloads) = lib.downloads { @@ -594,16 +592,14 @@ pub async fn retrieve_data( }; if let Some(bytes) = artifact_bytes { - upload_file_to_bucket( + upload_queue.enqueue_path( format!("{}/{}", "maven", artifact_path), bytes.to_vec(), Some("application/java-archive".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - ).await?; + ); } - Ok::, anyhow::Error>(Some(lib)) + Ok::, crate::infrastructure::error::Error>(Some(lib)) })).await?; let elapsed = now.elapsed(); @@ -624,23 +620,33 @@ pub async fn retrieve_data( processors: Some(profile.processors), }; - let version_path = format!( - "forge/v{}/versions/{}.json", - daedalus::modded::CURRENT_FORGE_FORMAT_VERSION, - loader_version_full + // Upload version to CAS and track in manifest builder + let version_bytes = serde_json::to_vec(&new_profile)?; + let version_hash = upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), ); - upload_file_to_bucket( - version_path.clone(), - serde_json::to_vec(&new_profile)?, - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - ).await?; + manifest_builder.add_version( + "forge", + loader_version_full.to_string(), + version_hash.clone(), + version_bytes.len() as u64, + ); + + // Build CAS URL for LoaderVersion + let base_url = dotenvy::var("BASE_URL").unwrap(); + let cas_url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &version_hash[..2], + &version_hash[2..] + ); return Ok(Some(LoaderVersion { id: loader_version_full, - url: format_url(&version_path), + url: cas_url, stable: false })); } @@ -654,18 +660,38 @@ pub async fn retrieve_data( let len = loaders_futures.len(); let mut versions = loaders_futures.into_iter().peekable(); let mut chunk_index = 0; + let mut successful = 0; + let mut failed = 0; + while versions.peek().is_some() { let now = Instant::now(); let chunk: Vec<_> = versions.by_ref().take(1).collect(); - let res = futures::future::try_join_all(chunk).await?; - loaders_versions.extend(res.into_iter().flatten()); + + // Handle each future individually to prevent crashing on errors + for future in chunk { + match future.await { + Ok(result) => { + if let Some(loader_version) = result { + loaders_versions.push(loader_version); + successful += 1; + } + } + Err(e) => { + warn!("⚠️ Forge - Failed to process version: {}", e); + failed += 1; + // Continue processing other versions + } + } + } chunk_index += 1; let elapsed = now.elapsed(); - info!("Loader Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed); + info!("Loader Chunk {}/{len} Elapsed: {:.2?} (✓ {} ✗ {})", chunk_index, elapsed, successful, failed); } + + info!("📊 Forge - Loader processing complete: {} successful, {} failed", successful, failed); } //futures::future::try_join_all(loaders_futures).await?; } @@ -676,7 +702,7 @@ pub async fn retrieve_data( loaders: loaders_versions }); - Ok::<(), anyhow::Error>(()) + Ok::<(), crate::infrastructure::error::Error>(()) }); } } @@ -685,24 +711,72 @@ pub async fn retrieve_data( let len = version_futures.len(); let mut versions = version_futures.into_iter().peekable(); let mut chunk_index = 0; + let mut successful = 0; + let mut failed = 0; + while versions.peek().is_some() { let now = Instant::now(); let chunk: Vec<_> = versions.by_ref().take(1).collect(); - futures::future::try_join_all(chunk).await?; + + // Handle each future individually to prevent crashing on errors + for future in chunk { + match future.await { + Ok(_) => { + successful += 1; + } + Err(e) => { + warn!("⚠️ Forge - Failed to process Minecraft version: {}", e); + failed += 1; + // Continue processing other versions + } + } + } chunk_index += 1; let elapsed = now.elapsed(); - info!("Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed); + info!("Chunk {}/{len} Elapsed: {:.2?} (✓ {} ✗ {})", chunk_index, elapsed, successful, failed); } + + info!("📊 Forge - Minecraft version processing complete: {} successful, {} failed", successful, failed); } //futures::future::try_join_all(version_futures).await?; + // Get old manifest versions for merging + let old_manifest_versions = if let Ok(old_versions) = Arc::try_unwrap(old_versions) { + old_versions.into_inner() + } else { + Vec::new() + }; + if let Ok(versions) = Arc::try_unwrap(versions) { - let mut versions = versions.into_inner(); + let new_versions = versions.into_inner(); + + // Merge new versions with old ones to preserve existing data + let mut final_versions = old_manifest_versions; + + for new_version in new_versions { + if let Some(existing) = final_versions.iter_mut().find(|v| v.id == new_version.id) { + // Merge loaders: keep old loaders + add/update new ones + for new_loader in new_version.loaders { + if let Some(existing_loader) = existing.loaders.iter_mut().find(|l| l.id == new_loader.id) { + let loader_id = new_loader.id.clone(); + *existing_loader = new_loader; + info!("✅ Forge - Updated loader: {}/{}", existing.id, loader_id); + } else { + info!("✅ Forge - Added new loader: {}/{}", existing.id, new_loader.id); + existing.loaders.push(new_loader); + } + } + } else { + info!("✅ Forge - Added new Minecraft version: {}", new_version.id); + final_versions.push(new_version); + } + } - versions.sort_by(|x, y| { + // Sort versions + final_versions.sort_by(|x, y| { minecraft_versions .versions .iter() @@ -721,7 +795,8 @@ pub async fn retrieve_data( ) }); - for version in &mut versions { + // Sort loaders within each version + for version in &mut final_versions { let loader_versions = maven_metadata.get(&version.id); if let Some(loader_versions) = loader_versions { version.loaders.sort_by(|x, y| { @@ -738,24 +813,6 @@ pub async fn retrieve_data( }) } } - - upload_file_to_bucket( - format!( - "forge/v{}/manifest.json", - daedalus::modded::CURRENT_FORGE_FORMAT_VERSION, - ), - serde_json::to_vec(&Manifest { - game_versions: versions, - })?, - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore, - ) - .await?; - } - - if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) { - uploaded_files.extend(uploaded_files_mutex.into_inner()); } Ok(()) @@ -771,7 +828,7 @@ const DEFAULT_MAVEN_METADATA_URL: &str = pub async fn fetch_maven_metadata( url: Option<&str>, semaphore: Arc, -) -> Result>, anyhow::Error> { +) -> Result>, crate::infrastructure::error::Error> { Ok(serde_json::from_slice( &download_file( url.unwrap_or(DEFAULT_MAVEN_METADATA_URL), @@ -832,3 +889,141 @@ struct ForgeInstallerProfileV2 { pub libraries: Vec, pub processors: Vec, } + +#[cfg(test)] +mod tests { + use super::*; + use std::str::FromStr; + + #[test] + fn test_should_ignore_artifact() { + // Create test artifacts + let create_spec = |package: &str, artifact: &str, version: &str| -> GradleSpecifier { + GradleSpecifier::from_str(&format!("{}:{}:{}", package, artifact, version)) + .expect("Valid GradleSpecifier") + }; + + // Test case 1: Identical version (should ignore - already have it) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "1.0.0")); + + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!(should_ignore_artifact(&libs, &new_artifact), + "Should ignore identical version"); + } + + // Test case 2: Lower version in new data (should ignore - keep existing higher version) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "2.0.0")); + + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!(should_ignore_artifact(&libs, &new_artifact), + "Should ignore lower version"); + } + + // Test case 3: Higher version in new data (should NOT ignore - upgrade needed) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "1.0.0")); + + let new_artifact = create_spec("org.example", "library", "2.0.0"); + assert!(!should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore higher version (upgrade needed)"); + } + + // Test case 4: No match in set (should NOT ignore - new artifact) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "other-library", "1.0.0")); + + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!(!should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore new artifact"); + } + + // Test case 5: Different package (should NOT ignore) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "1.0.0")); + + let new_artifact = create_spec("com.other", "library", "1.0.0"); + assert!(!should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore different package"); + } + + // Test case 6: Empty libs set (should NOT ignore) + { + let libs = HashSet::new(); + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!(!should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore when libs is empty"); + } + } + + #[tokio::test] + async fn test_minecraft_version_cache_basic() { + let cache = MinecraftVersionLibraryCache::new(); + + // Verify initial state + assert_eq!(cache.versions.len(), 0, "Cache should start empty"); + assert_eq!(cache.max_size, 20, "Max size should be 20"); + } + + #[test] + fn test_minecraft_version_cache_lru_reordering() { + // Test LRU reordering logic without network calls + let mut cache = MinecraftVersionLibraryCache::new(); + + // Manually populate cache with test entries + let mut libs1 = HashSet::new(); + libs1.insert(GradleSpecifier::from_str("org.example:lib1:1.0.0").unwrap()); + cache.versions.push(MinecraftVersionCacheEntry { + id: "1.19.4".to_string(), + libraries: libs1, + }); + + let mut libs2 = HashSet::new(); + libs2.insert(GradleSpecifier::from_str("org.example:lib2:2.0.0").unwrap()); + cache.versions.push(MinecraftVersionCacheEntry { + id: "1.20.1".to_string(), + libraries: libs2, + }); + + // Verify initial order + assert_eq!(cache.versions[0].id, "1.19.4"); + assert_eq!(cache.versions[1].id, "1.20.1"); + + // Simulate LRU access: access second entry (should move to front) + let index = cache.versions.iter().position(|v| v.id == "1.20.1").unwrap(); + let entry = cache.versions.remove(index); + cache.versions.insert(0, entry); + + // Verify reordering + assert_eq!(cache.versions[0].id, "1.20.1", "Accessed entry should move to front"); + assert_eq!(cache.versions[1].id, "1.19.4"); + } + + #[test] + fn test_minecraft_version_cache_eviction() { + let mut cache = MinecraftVersionLibraryCache::new(); + + // Fill cache beyond max_size + for i in 0..25 { + let mut libs = HashSet::new(); + libs.insert(GradleSpecifier::from_str(&format!("org.example:lib{}:1.0.0", i)).unwrap()); + cache.versions.push(MinecraftVersionCacheEntry { + id: format!("1.{}.0", i), + libraries: libs, + }); + } + + // Simulate truncation (what happens in load_minecraft_version_libs) + cache.versions.truncate(cache.max_size); + + // Verify eviction + assert_eq!(cache.versions.len(), 20, "Cache should be truncated to max_size"); + assert_eq!(cache.versions[0].id, "1.0.0", "First entry should remain"); + } +} diff --git a/daedalus_client/src/infrastructure/circuit_breaker.rs b/daedalus_client/src/infrastructure/circuit_breaker.rs new file mode 100644 index 0000000..75d505d --- /dev/null +++ b/daedalus_client/src/infrastructure/circuit_breaker.rs @@ -0,0 +1,222 @@ +use std::future::Future; +use std::sync::Arc; +use std::time::{Duration, Instant}; +use tokio::sync::Mutex; +use tracing::{info, warn}; + +/// Circuit breaker state +#[derive(Debug, Clone)] +enum BreakerState { + /// Circuit is closed, allowing requests through + Closed { failures: u32 }, + /// Circuit is open, rejecting all requests + Open { opened_at: Instant }, + /// Circuit is half-open, allowing a test request through + HalfOpen, +} + +/// Circuit breaker errors +#[derive(Debug)] +pub enum CircuitBreakerError { + /// Circuit is open, request rejected + Open, + /// Request failed + Failed(crate::infrastructure::error::Error), +} + +impl std::fmt::Display for CircuitBreakerError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CircuitBreakerError::Open => write!(f, "Circuit breaker is open"), + CircuitBreakerError::Failed(e) => write!(f, "Request failed: {}", e), + } + } +} + +impl std::error::Error for CircuitBreakerError {} + +/// Circuit breaker for preventing cascading failures +/// +/// Tracks consecutive failures and "opens" the circuit after reaching a threshold, +/// causing all subsequent requests to fail fast. After a reset timeout, the circuit +/// transitions to half-open state, allowing a test request through. +pub struct CircuitBreaker { + /// Name of the circuit breaker (for logging) + name: String, + /// Number of consecutive failures before opening the circuit + failure_threshold: u32, + /// Duration to wait before attempting to close the circuit + reset_timeout: Duration, + /// Current state of the circuit breaker + state: Arc>, +} + +impl CircuitBreaker { + /// Creates a new circuit breaker + /// + /// # Arguments + /// * `name` - Name for logging purposes + /// * `failure_threshold` - Number of consecutive failures before opening + /// * `reset_timeout` - Duration to wait before trying again + pub fn new(name: impl Into, failure_threshold: u32, reset_timeout: Duration) -> Self { + Self { + name: name.into(), + failure_threshold, + reset_timeout, + state: Arc::new(Mutex::new(BreakerState::Closed { failures: 0 })), + } + } + + /// Executes a future with circuit breaker protection + /// + /// # Returns + /// - `Ok(T)` if the operation succeeded + /// - `Err(CircuitBreakerError::Open)` if the circuit is open + /// - `Err(CircuitBreakerError::Failed(e))` if the operation failed + pub async fn call(&self, future: F) -> Result + where + F: Future>, + E: Into, + { + // Check current state + let mut state = self.state.lock().await; + + if let BreakerState::Open { opened_at } = *state { + // Check if reset timeout has elapsed + if opened_at.elapsed() >= self.reset_timeout { + info!( + breaker = %self.name, + "Circuit breaker transitioning from open to half-open" + ); + *state = BreakerState::HalfOpen; + } else { + // Circuit is still open, reject request + return Err(CircuitBreakerError::Open); + } + } + + // Release lock before executing the future + drop(state); + + // Execute the future + match future.await { + Ok(result) => { + // Success - reset or close the circuit + let mut state = self.state.lock().await; + match *state { + BreakerState::HalfOpen => { + info!( + breaker = %self.name, + "Circuit breaker transitioning from half-open to closed" + ); + *state = BreakerState::Closed { failures: 0 }; + } + BreakerState::Closed { .. } => { + *state = BreakerState::Closed { failures: 0 }; + } + _ => {} + } + Ok(result) + } + Err(error) => { + let error = error.into(); + + // Failure - increment counter or reopen circuit + let mut state = self.state.lock().await; + match *state { + BreakerState::HalfOpen => { + warn!( + breaker = %self.name, + error = %error, + "Circuit breaker transitioning from half-open to open" + ); + *state = BreakerState::Open { + opened_at: Instant::now(), + }; + } + BreakerState::Closed { failures } => { + let new_failures = failures + 1; + if new_failures >= self.failure_threshold { + warn!( + breaker = %self.name, + failures = new_failures, + threshold = self.failure_threshold, + "Circuit breaker opening due to consecutive failures" + ); + *state = BreakerState::Open { + opened_at: Instant::now(), + }; + } else { + *state = BreakerState::Closed { + failures: new_failures, + }; + } + } + _ => {} + } + + Err(CircuitBreakerError::Failed(error)) + } + } + } + + /// Returns the current state of the circuit breaker (for monitoring/debugging) + #[allow(dead_code)] + pub async fn is_open(&self) -> bool { + matches!(*self.state.lock().await, BreakerState::Open { .. }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_circuit_breaker_closes_on_success() { + let breaker = CircuitBreaker::new("test", 3, Duration::from_millis(100)); + + // Should succeed + let result = breaker.call(async { Ok::<_, crate::infrastructure::error::Error>(42) }).await; + assert!(result.is_ok()); + assert!(!breaker.is_open().await); + } + + #[tokio::test] + async fn test_circuit_breaker_opens_after_threshold() { + let breaker = CircuitBreaker::new("test", 3, Duration::from_millis(100)); + + // Fail 3 times + for _ in 0..3 { + let _ = breaker.call(async { Err::<(), _>(crate::infrastructure::error::invalid_input("error")) }).await; + } + + // Circuit should be open + assert!(breaker.is_open().await); + + // Next call should be rejected immediately + let result = breaker.call(async { Ok::<_, crate::infrastructure::error::Error>(42) }).await; + assert!(matches!(result, Err(CircuitBreakerError::Open))); + } + + #[tokio::test] + async fn test_circuit_breaker_half_open_after_timeout() { + let breaker = CircuitBreaker::new("test", 2, Duration::from_millis(50)); + + // Fail twice to open circuit + for _ in 0..2 { + let _ = breaker.call(async { Err::<(), _>(crate::infrastructure::error::invalid_input("error")) }).await; + } + + assert!(breaker.is_open().await); + + // Wait for reset timeout + tokio::time::sleep(Duration::from_millis(60)).await; + + // Next call should go through (half-open state) + let result = breaker.call(async { Ok::<_, crate::infrastructure::error::Error>(42) }).await; + assert!(result.is_ok()); + + // Circuit should be closed again + assert!(!breaker.is_open().await); + } +} diff --git a/daedalus_client/src/infrastructure/error.rs b/daedalus_client/src/infrastructure/error.rs new file mode 100644 index 0000000..bf2b56b --- /dev/null +++ b/daedalus_client/src/infrastructure/error.rs @@ -0,0 +1,243 @@ +use thiserror::Error; + +/// Structured error types for daedalus_client +/// +/// Uses thiserror for ergonomic error definitions. +/// Span context is captured by using #[instrument] on functions. +#[derive(Error, Debug)] +pub enum ErrorKind { + /// Network fetch error with context + #[error("Failed to fetch {item}: {source}")] + Fetch { + #[source] + source: reqwest::Error, + item: String, + }, + + /// S3 storage error with file context + #[error("S3 error for file '{file}': {source}")] + S3 { + #[source] + source: Box, + file: String, + }, + + /// JSON serialization/deserialization error + #[error("JSON error: {0}")] + SerdeJSON(#[from] serde_json::Error), + + /// XML parsing error + #[error("XML error: {0}")] + SerdeXML(#[from] serde_xml_rs::Error), + + /// Zip file error + #[error("Zip error: {0}")] + Zip(#[from] zip::result::ZipError), + + /// File I/O error + #[error("IO error: {0}")] + Io(#[from] std::io::Error), + + /// Checksum validation failure + #[error("Checksum mismatch for {url}: expected {expected}, got {actual} after {tries} tries")] + ChecksumFailure { + url: String, + expected: String, + actual: String, + tries: u32, + }, + + /// Invalid input data + #[error("Invalid input: {0}")] + InvalidInput(String), + + /// Missing required field + #[error("Missing required field: {field} in {context}")] + MissingField { field: String, context: String }, + + /// Version parsing error + #[error("Failed to parse version '{version}': {reason}")] + VersionParse { version: String, reason: String }, + + /// Maven artifact parsing error + #[error("Failed to parse maven artifact '{artifact}': {reason}")] + ArtifactParse { artifact: String, reason: String }, + + /// Environment variable missing + #[error("Missing environment variable: {0}")] + EnvVarMissing(String), + + /// Task join error + #[error("Task join error: {0}")] + TaskJoin(#[from] tokio::task::JoinError), + + /// Semaphore acquire error + #[error("Semaphore acquire error: {0}")] + SemaphoreAcquire(#[from] tokio::sync::AcquireError), + + /// Daedalus library error + #[error("Daedalus error: {0}")] + Daedalus(#[from] daedalus::Error), + + /// Semver parsing error + #[error("Semver parse error: {0}")] + SemverParse(#[from] semver::Error), + + /// Generic error with context + #[error("{context}: {source}")] + Generic { + context: String, + #[source] + source: Box, + }, +} + +/// Main error type +/// +/// Currently just wraps ErrorKind directly. Span context can be added +/// by using tracing spans around operations that produce errors. +pub type Error = ErrorKind; + +/// Bridge from anyhow::Error to our structured errors +/// +/// This allows gradual migration from anyhow to structured errors. +/// Eventually all anyhow usage will be replaced with specific error types. +impl From for ErrorKind { + fn from(error: anyhow::Error) -> Self { + ErrorKind::Generic { + context: "Legacy anyhow error".to_string(), + source: error.into(), + } + } +} + +/// Error classification helpers +impl ErrorKind { + /// Determines if this error is permanent (won't be fixed by retrying) + /// + /// Permanent errors include: + /// - JSON/XML parsing errors (data is malformed) + /// - Invalid input (won't change on retry) + /// - Missing required fields (structural issue) + /// + /// Transient errors include: + /// - Network failures (might work next time) + /// - Checksum mismatches (file might be re-uploaded) + /// - 404 errors (resource might appear later) + pub fn is_permanent(&self) -> bool { + matches!( + self, + ErrorKind::SerdeJSON(_) + | ErrorKind::SerdeXML(_) + | ErrorKind::InvalidInput(_) + | ErrorKind::MissingField { .. } + | ErrorKind::VersionParse { .. } + | ErrorKind::ArtifactParse { .. } + | ErrorKind::Zip(_) + ) + } + + /// Determines if this error should trigger a retry + /// + /// Retryable errors include: + /// - Network failures (temporary) + /// - Checksum failures (might be fixed) + /// - 5xx server errors (temporary) + pub fn should_retry(&self) -> bool { + match self { + ErrorKind::Fetch { source, .. } => { + // Retry on network errors, timeouts, or 5xx errors + source.is_timeout() + || source.is_connect() + || source + .status() + .map(|s| s.is_server_error()) + .unwrap_or(false) + } + ErrorKind::ChecksumFailure { .. } => true, + ErrorKind::S3 { .. } => true, + _ => false, + } + } + + /// Determines if this error indicates a not-found resource + pub fn is_not_found(&self) -> bool { + match self { + ErrorKind::Fetch { source, .. } => { + source.status().map(|s| s.as_u16() == 404).unwrap_or(false) + } + _ => false, + } + } + + /// Determines if this error is a network-related issue + pub fn is_network_error(&self) -> bool { + matches!( + self, + ErrorKind::Fetch { .. } | ErrorKind::ChecksumFailure { .. } + ) + } +} + +/// Helper function to create a fetch error with context +pub fn fetch_error(source: reqwest::Error, item: impl Into) -> Error { + Error::from(ErrorKind::Fetch { + source, + item: item.into(), + }) +} + +/// Helper function to create an S3 error with context +pub fn s3_error(source: s3::error::S3Error, file: impl Into) -> Error { + Error::from(ErrorKind::S3 { + source: Box::new(source), + file: file.into(), + }) +} + +/// Helper function to create an invalid input error +pub fn invalid_input(message: impl Into) -> Error { + Error::from(ErrorKind::InvalidInput(message.into())) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_error_classification() { + // Permanent errors + assert!(ErrorKind::SerdeJSON(serde_json::Error::io(std::io::Error::new( + std::io::ErrorKind::Other, + "test" + ))) + .is_permanent()); + assert!(ErrorKind::InvalidInput("test".to_string()).is_permanent()); + + // Transient errors + let checksum_err = ErrorKind::ChecksumFailure { + url: "http://test".to_string(), + expected: "abc".to_string(), + actual: "def".to_string(), + tries: 3, + }; + assert!(!checksum_err.is_permanent()); + assert!(checksum_err.should_retry()); + } + + #[test] + fn test_error_display() { + let err = ErrorKind::ChecksumFailure { + url: "http://test.com/file.jar".to_string(), + expected: "abc123".to_string(), + actual: "def456".to_string(), + tries: 3, + }; + + let display = format!("{}", err); + assert!(display.contains("Checksum mismatch")); + assert!(display.contains("http://test.com/file.jar")); + assert!(display.contains("abc123")); + assert!(display.contains("def456")); + } +} diff --git a/daedalus_client/src/infrastructure/mod.rs b/daedalus_client/src/infrastructure/mod.rs new file mode 100644 index 0000000..6823845 --- /dev/null +++ b/daedalus_client/src/infrastructure/mod.rs @@ -0,0 +1,2 @@ +pub mod circuit_breaker; +pub mod error; diff --git a/daedalus_client/src/loaders/fabric.rs b/daedalus_client/src/loaders/fabric.rs new file mode 100644 index 0000000..48ae7cf --- /dev/null +++ b/daedalus_client/src/loaders/fabric.rs @@ -0,0 +1,83 @@ +use super::{GameVersionInfo, LoaderStrategy, LoaderVersionInfo, LoaderVersionsList}; +use serde::{Deserialize, Serialize}; + +/// Fabric loader strategy implementation +pub struct FabricStrategy; + +impl LoaderStrategy for FabricStrategy { + fn name(&self) -> &str { + "Fabric" + } + + fn meta_url(&self) -> &str { + "https://meta.fabricmc.net/v2" + } + + fn maven_fallback(&self) -> &str { + "https://maven.fabricmc.net/" + } + + fn manifest_path_prefix(&self) -> &str { + "fabric" + } + + fn is_stable(&self, loader: &dyn LoaderVersionInfo) -> bool { + // Fabric API includes stability information + loader.stable().unwrap_or(false) + } +} + +/// Fabric API response structure +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct FabricVersions { + pub game: Vec, + pub loader: Vec, +} + +impl LoaderVersionsList for FabricVersions { + type Loader = FabricLoaderVersion; + type Game = FabricGameVersion; + + fn loader(&self) -> &[Self::Loader] { + &self.loader + } + + fn game(&self) -> &[Self::Game] { + &self.game + } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct FabricGameVersion { + pub version: String, + pub stable: bool, +} + +impl GameVersionInfo for FabricGameVersion { + fn version(&self) -> &str { + &self.version + } + + fn stable(&self) -> bool { + self.stable + } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct FabricLoaderVersion { + pub separator: String, + pub build: u32, + pub maven: String, + pub version: String, + pub stable: bool, +} + +impl LoaderVersionInfo for FabricLoaderVersion { + fn version(&self) -> &str { + &self.version + } + + fn stable(&self) -> Option { + Some(self.stable) + } +} diff --git a/daedalus_client/src/loaders/mod.rs b/daedalus_client/src/loaders/mod.rs new file mode 100644 index 0000000..f0f31c1 --- /dev/null +++ b/daedalus_client/src/loaders/mod.rs @@ -0,0 +1,532 @@ +pub mod fabric; +pub mod quilt; + +use crate::{download_file, format_url}; +use crate::services::upload::UploadQueue; +use dashmap::DashSet; +use daedalus::minecraft::{Library, VersionManifest}; +use daedalus::modded::{LoaderVersion, PartialVersionInfo, Version}; +use daedalus::{Branding, BRANDING}; +use serde::Deserialize; +use std::sync::Arc; +use tokio::sync::{Mutex, RwLock, Semaphore}; +use tracing::{info, warn}; + +/// Strategy trait for loader-specific behavior +/// +/// This trait abstracts the differences between loaders like Fabric and Quilt, +/// which have nearly identical processing logic but differ in URLs, API details, +/// and stability determination. +pub trait LoaderStrategy: Send + Sync { + /// Loader name for logging (e.g., "Fabric", "Quilt") + fn name(&self) -> &str; + + /// Base URL for loader metadata API + fn meta_url(&self) -> &str; + + /// Maven repository fallback URL + fn maven_fallback(&self) -> &str; + + /// Path prefix for storage (e.g., "fabric", "quilt") + fn manifest_path_prefix(&self) -> &str; + + /// Determine if a loader version is stable + /// + /// Different loaders have different ways of determining stability: + /// - Fabric includes a `stable` field in the API response + /// - Quilt does not, so we default to false + fn is_stable(&self, loader: &dyn LoaderVersionInfo) -> bool; +} + +/// Common interface for loader version information +/// +/// Both Fabric and Quilt have similar structures but with different field availability. +/// This trait allows the generic processor to work with both. +pub trait LoaderVersionInfo: Send + Sync { + fn version(&self) -> &str; + fn stable(&self) -> Option; +} + +/// Common interface for game version information +pub trait GameVersionInfo: Send + Sync + Clone { + fn version(&self) -> &str; + fn stable(&self) -> bool; +} + +/// Common interface for loader versions list +pub trait LoaderVersionsList: Send + Sync { + type Loader: LoaderVersionInfo; + type Game: GameVersionInfo; + + fn loader(&self) -> &[Self::Loader]; + fn game(&self) -> &[Self::Game]; +} + +/// Generic processor for loaders using the strategy pattern +/// +/// This processor handles the common logic for fetching, processing, and uploading +/// loader data. Loader-specific behavior is delegated to the LoaderStrategy trait. +pub struct LoaderProcessor { + strategy: S, +} + +impl LoaderProcessor { + pub fn new(strategy: S) -> Self { + Self { strategy } + } + + /// Main entry point for retrieving and processing loader data + /// + /// This is the generic implementation of what was previously duplicated + /// in fabric.rs and quilt.rs. + pub async fn retrieve_data( + &self, + minecraft_versions: &VersionManifest, + upload_queue: &UploadQueue, + manifest_builder: &crate::services::cas::ManifestBuilder, + semaphore: Arc, + ) -> Result<(), crate::infrastructure::error::Error> + where + V: LoaderVersionsList + for<'de> Deserialize<'de>, + { + info!("Retrieving {} data ...", self.strategy.name()); + + // Fetch list of available versions from the loader API + let list: V = self.fetch_versions_list(None, semaphore.clone()).await?; + + // Try to load existing manifest to do incremental updates + let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!( + "{}/v{}/manifest.json", + self.strategy.manifest_path_prefix(), + crate::services::cas::CAS_VERSION, + ))) + .await + .ok(); + + let mut versions = if let Some(old_manifest) = old_manifest { + old_manifest.game_versions + } else { + Vec::new() + }; + + // Prepare list of loaders to process + // Format: (stable, version, skip_upload) + let loaders_mutex = RwLock::new(Vec::new()); + + { + let mut loaders = loaders_mutex.write().await; + for (index, loader) in list.loader().iter().enumerate() { + // Check if this loader already exists in the dummy version + let already_exists = versions.iter().any(|x| { + x.id == BRANDING + .get_or_init(Branding::default) + .dummy_replace_string + && x.loaders.iter().any(|x| x.id == loader.version()) + }); + + if already_exists { + // Only add the first loader to update it + if index == 0 { + loaders.push(( + Box::new(self.strategy.is_stable(loader as &dyn LoaderVersionInfo)), + loader.version().to_string(), + Box::new(true), // skip_upload + )) + } + } else { + loaders.push(( + Box::new(self.strategy.is_stable(loader as &dyn LoaderVersionInfo)), + loader.version().to_string(), + Box::new(false), // don't skip + )) + } + } + } + + const DUMMY_GAME_VERSION: &str = "1.19.4-rc2"; + + let loader_version_mutex = Mutex::new(Vec::new()); + + // Fetch loader versions with individual error handling + let mut loader_versions = Vec::new(); + let mut fetch_successful = 0; + let mut fetch_failed = 0; + + for (stable, loader, skip_upload) in loaders_mutex.read().await.clone() { + match self + .fetch_loader_version(DUMMY_GAME_VERSION, &loader, semaphore.clone()) + .await + { + Ok(version) => { + loader_versions.push((stable, loader, version, skip_upload)); + fetch_successful += 1; + } + Err(e) => { + warn!( + "⚠️ {} - Failed to fetch loader version {}: {}", + self.strategy.name(), + loader, + e + ); + fetch_failed += 1; + } + } + } + + info!( + "📊 {} - Fetched {} loader versions ({} successful, {} failed)", + self.strategy.name(), + fetch_successful + fetch_failed, + fetch_successful, + fetch_failed + ); + + let visited_artifacts = Arc::new(DashSet::new()); + + // Process loader versions with individual error handling + let mut process_successful = 0; + let mut process_failed = 0; + + for (stable, loader, version, skip_upload) in loader_versions { + let loader_clone = loader.clone(); + let process_result = self + .process_loader_version( + stable, + loader, + version, + skip_upload, + &list, + &loader_version_mutex, + upload_queue, + manifest_builder, + &visited_artifacts, + semaphore.clone(), + ) + .await; + + match process_result { + Ok(_) => { + process_successful += 1; + } + Err(e) => { + warn!( + "⚠️ {} - Failed to process loader {}: {}", + self.strategy.name(), + loader_clone, + e + ); + process_failed += 1; + } + } + } + + info!( + "📊 {} - Processing complete: {} successful, {} failed", + self.strategy.name(), + process_successful, + process_failed + ); + + // Add processed loaders to versions list + let mut loader_version_mutex = loader_version_mutex.into_inner(); + if !loader_version_mutex.is_empty() { + if let Some(version) = versions.iter_mut().find(|x| { + x.id == BRANDING.get_or_init(Branding::default).dummy_replace_string + }) { + version.loaders.append(&mut loader_version_mutex); + } else { + versions.push(Version { + id: BRANDING + .get_or_init(Branding::default) + .dummy_replace_string + .clone(), + stable: true, + loaders: loader_version_mutex, + }); + } + } + + // Add game versions that don't have loaders yet + for version in list.game() { + if !versions.iter().any(|x| x.id == version.version()) { + versions.push(Version { + id: version.version().to_string(), + stable: version.stable(), + loaders: vec![], + }); + } + } + + // Sort versions by Minecraft version order + versions.sort_by(|x, y| { + minecraft_versions + .versions + .iter() + .position(|z| x.id == z.id) + .unwrap_or_default() + .cmp( + &minecraft_versions + .versions + .iter() + .position(|z| y.id == z.id) + .unwrap_or_default(), + ) + }); + + // Sort loaders within each version + for version in &mut versions { + version.loaders.sort_by(|x, y| { + let x_pos = list + .loader() + .iter() + .position(|z| x.id == *z.version()) + .unwrap_or_default(); + let y_pos = list + .loader() + .iter() + .position(|z| y.id == z.version()) + .unwrap_or_default(); + + x_pos.cmp(&y_pos) + }) + } + + // Note: Versions are now tracked in ManifestBuilder and uploaded separately + // in the main loop via manifest_builder.build_loader_manifest() + + info!( + "✅ {} - Processed {} game versions", + self.strategy.name(), + versions.len() + ); + + Ok(()) + } + + /// Fetch the list of available versions from the loader API + async fn fetch_versions_list( + &self, + url: Option<&str>, + semaphore: Arc, + ) -> Result + where + V: for<'de> Deserialize<'de>, + { + Ok(serde_json::from_slice( + &download_file( + url.unwrap_or(&format!("{}/versions", self.strategy.meta_url())), + None, + semaphore, + ) + .await?, + )?) + } + + /// Fetch a specific loader version profile + async fn fetch_loader_version( + &self, + minecraft_version: &str, + loader_version: &str, + semaphore: Arc, + ) -> Result { + Ok(serde_json::from_slice( + &download_file( + &format!( + "{}/versions/loader/{}/{}/profile/json", + self.strategy.meta_url(), + minecraft_version, + loader_version + ), + None, + semaphore, + ) + .await?, + )?) + } + + /// Process a single loader version + #[allow(clippy::too_many_arguments)] + async fn process_loader_version( + &self, + stable: Box, + loader: String, + version: PartialVersionInfo, + skip_upload: Box, + list: &V, + loader_version_mutex: &Mutex>, + upload_queue: &UploadQueue, + manifest_builder: &crate::services::cas::ManifestBuilder, + visited_artifacts: &Arc>, + semaphore: Arc, + ) -> Result<(), crate::infrastructure::error::Error> + where + V: LoaderVersionsList, + { + const DUMMY_GAME_VERSION: &str = "1.19.4-rc2"; + + // Process all libraries + let libs = futures::future::try_join_all(version.libraries.into_iter().map(|mut lib| { + let semaphore = semaphore.clone(); + let visited_artifacts = visited_artifacts.clone(); + let list_game = list.game().to_vec(); + let maven_fallback = self.strategy.maven_fallback().to_string(); + + async move { + // Check if we've already processed this artifact (lock-free) + if !visited_artifacts.insert(lib.name.to_string()) { + // Already processed, skip download + lib.name = lib + .name + .to_string() + .replace( + DUMMY_GAME_VERSION, + &BRANDING.get_or_init(Branding::default).dummy_replace_string, + ) + .parse()?; + lib.url = Some(format_url("maven/")); + + return Ok(lib); + } + + let name = lib.name.to_string(); + if name.contains(DUMMY_GAME_VERSION) { + // This library is game-version-specific, download for all game versions + lib.name = name + .replace( + DUMMY_GAME_VERSION, + &BRANDING.get_or_init(Branding::default).dummy_replace_string, + ) + .parse()?; + + futures::future::try_join_all(list_game.iter().map(|game_version| { + let semaphore = semaphore.clone(); + let lib_name = lib.name.to_string(); + let lib_url = lib.url.clone(); + let maven_fallback = maven_fallback.clone(); + let game_version_str = game_version.version().to_string(); + + async move { + let artifact_path = daedalus::get_path_from_artifact( + &lib_name.replace( + &BRANDING.get_or_init(Branding::default).dummy_replace_string, + &game_version_str, + ), + )?; + + let artifact = download_file( + &format!( + "{}{}", + lib_url.as_deref() + .unwrap_or(&maven_fallback), + artifact_path + ), + None, + semaphore.clone(), + ) + .await?; + + upload_queue.enqueue_path( + format!("{}/{}", "maven", artifact_path), + artifact.to_vec(), + Some("application/java-archive".to_string()), + ); + + Ok::<(), crate::infrastructure::error::Error>(()) + } + })) + .await?; + + lib.url = Some(format_url("maven/")); + return Ok(lib); + } + + // Regular library, download once + let artifact_path = lib.name.path(); + + let artifact = download_file( + &format!( + "{}{}", + lib.url.as_deref() + .unwrap_or(&maven_fallback), + artifact_path + ), + None, + semaphore.clone(), + ) + .await?; + + lib.url = Some(format_url("maven/")); + + upload_queue.enqueue_path( + format!("{}/{}", "maven", artifact_path), + artifact.to_vec(), + Some("application/java-archive".to_string()), + ); + + Ok::(lib) + } + })) + .await?; + + // Skip upload if this loader already exists + if *skip_upload { + return Ok(()); + } + + // Prepare version info with replaced dummy game version + let version_info = PartialVersionInfo { + arguments: version.arguments, + id: version.id.replace( + DUMMY_GAME_VERSION, + &BRANDING.get_or_init(Branding::default).dummy_replace_string, + ), + main_class: version.main_class, + release_time: version.release_time, + time: version.time, + type_: version.type_, + logging: None, + inherits_from: version.inherits_from.replace( + DUMMY_GAME_VERSION, + &BRANDING.get_or_init(Branding::default).dummy_replace_string, + ), + libraries: libs, + minecraft_arguments: version.minecraft_arguments, + processors: None, + data: None, + }; + + // Upload version to CAS and track in manifest builder + let version_bytes = serde_json::to_vec(&version_info)?; + let version_hash = upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), + ); + + manifest_builder.add_version( + self.strategy.manifest_path_prefix(), + loader.clone(), + version_hash.clone(), + version_bytes.len() as u64, + ); + + // Build CAS URL for LoaderVersion + let base_url = dotenvy::var("BASE_URL").unwrap(); + let cas_url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &version_hash[..2], + &version_hash[2..] + ); + + // Add to loader version list + let mut loader_version_map = loader_version_mutex.lock().await; + loader_version_map.push(LoaderVersion { + id: loader, + url: cas_url, + stable: *stable, + }); + + Ok(()) + } +} diff --git a/daedalus_client/src/loaders/quilt.rs b/daedalus_client/src/loaders/quilt.rs new file mode 100644 index 0000000..6c3cdbe --- /dev/null +++ b/daedalus_client/src/loaders/quilt.rs @@ -0,0 +1,85 @@ +use super::{GameVersionInfo, LoaderStrategy, LoaderVersionInfo, LoaderVersionsList}; +use serde::{Deserialize, Serialize}; + +/// Quilt loader strategy implementation +pub struct QuiltStrategy; + +impl LoaderStrategy for QuiltStrategy { + fn name(&self) -> &str { + "Quilt" + } + + fn meta_url(&self) -> &str { + "https://meta.quiltmc.org/v3" + } + + fn maven_fallback(&self) -> &str { + "https://maven.quiltmc.org/" + } + + fn manifest_path_prefix(&self) -> &str { + "quilt" + } + + fn is_stable(&self, _loader: &dyn LoaderVersionInfo) -> bool { + // Quilt API does not include stability information + // Default to false (unstable) + false + } +} + +/// Quilt API response structure +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct QuiltVersions { + pub game: Vec, + pub loader: Vec, +} + +impl LoaderVersionsList for QuiltVersions { + type Loader = QuiltLoaderVersion; + type Game = QuiltGameVersion; + + fn loader(&self) -> &[Self::Loader] { + &self.loader + } + + fn game(&self) -> &[Self::Game] { + &self.game + } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct QuiltGameVersion { + pub version: String, + pub stable: bool, +} + +impl GameVersionInfo for QuiltGameVersion { + fn version(&self) -> &str { + &self.version + } + + fn stable(&self) -> bool { + self.stable + } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct QuiltLoaderVersion { + pub separator: String, + pub build: u32, + pub maven: String, + pub version: String, + // Note: Quilt API does not include a 'stable' field +} + +impl LoaderVersionInfo for QuiltLoaderVersion { + fn version(&self) -> &str { + &self.version + } + + fn stable(&self) -> Option { + // Quilt doesn't provide stability information + None + } +} diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index c59fada..334be8f 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -1,11 +1,10 @@ -use anyhow::bail; use backon::{ExponentialBuilder, Retryable}; use daedalus::Branding; -use log::{error, info, warn}; +use tracing::{error, info, warn, instrument, Instrument}; use s3::creds::Credentials; use s3::{Bucket, Region}; use std::ffi::OsStr; -use std::sync::Arc; +use std::sync::{Arc, LazyLock}; use std::time::Duration; use tokio::sync::{Mutex, Semaphore}; use tracing_subscriber::layer::SubscriberExt; @@ -13,12 +12,15 @@ use tracing_subscriber::util::SubscriberInitExt; use tracing_subscriber::EnvFilter; mod fabric; +mod infrastructure; mod forge; +mod loaders; mod minecraft; mod neoforge; mod quilt; +mod services; -fn main() -> Result<(), anyhow::Error> { +fn main() -> Result<(), crate::infrastructure::error::Error> { #[cfg(feature = "sentry")] let _guard = sentry::init(( dotenvy::var("SENTRY_DSN").unwrap(), @@ -33,31 +35,112 @@ fn main() -> Result<(), anyhow::Error> { .build() .unwrap() .block_on(async { - let printer = tracing_subscriber::fmt::layer() - .with_target(true) - .with_ansi(true) - .pretty() - .with_thread_names(true); - - let filter = EnvFilter::builder(); + let use_json = dotenvy::var("LOG_FORMAT") + .map(|v| v == "json") + .unwrap_or(false); let filter = if std::env::var("RUST_LOG").is_ok() { - println!("loaded logger directives from `RUST_LOG` env"); - - filter.from_env().expect("logger directives are invalid") + println!("Loaded logger directives from RUST_LOG env"); + EnvFilter::from_env("RUST_LOG") } else { - filter - .parse("info") - .expect("default logger directives are invalid") + EnvFilter::new("daedalus_client=info") }; - tracing_subscriber::registry() - .with(printer) - .with(filter) - .init(); + let betterstack_token = dotenvy::var("BETTERSTACK_TOKEN").ok(); + let _betterstack_handle = if let Some(ref token) = betterstack_token { + let (betterstack_layer, handle) = services::betterstack::BetterstackLayer::new( + token.clone(), + None, + None, + ); + + if use_json { + let json_layer = tracing_subscriber::fmt::layer() + .json() + .with_target(true) + .with_thread_ids(true) + .with_thread_names(true) + .with_file(true) + .with_line_number(true); + + tracing_subscriber::registry() + .with(json_layer) + .with(betterstack_layer) + .with(filter) + .init(); + + info!( + version = env!("CARGO_PKG_VERSION"), + format = "json", + betterstack_enabled = true, + "Initialized JSON logging with Betterstack integration" + ); + } else { + let pretty_layer = tracing_subscriber::fmt::layer() + .with_target(true) + .with_ansi(true) + .pretty() + .with_thread_names(true); + + tracing_subscriber::registry() + .with(pretty_layer) + .with(betterstack_layer) + .with(filter) + .init(); + + info!( + version = env!("CARGO_PKG_VERSION"), + format = "pretty", + betterstack_enabled = true, + "Initialized pretty logging with Betterstack integration" + ); + } + + Some(handle) + } else { + if use_json { + let json_layer = tracing_subscriber::fmt::layer() + .json() + .with_target(true) + .with_thread_ids(true) + .with_thread_names(true) + .with_file(true) + .with_line_number(true); + + tracing_subscriber::registry() + .with(json_layer) + .with(filter) + .init(); + + info!( + version = env!("CARGO_PKG_VERSION"), + format = "json", + "Initialized JSON logging (production mode)" + ); + } else { + let pretty_layer = tracing_subscriber::fmt::layer() + .with_target(true) + .with_ansi(true) + .pretty() + .with_thread_names(true); + + tracing_subscriber::registry() + .with(pretty_layer) + .with(filter) + .init(); + + info!( + version = env!("CARGO_PKG_VERSION"), + format = "pretty", + "Initialized pretty logging (development mode)" + ); + } + + None + }; if check_env_vars() { - bail!("Some environment variables are missing!"); + return Err(crate::infrastructure::error::invalid_input("Some environment variables are missing!")); } Branding::set_branding(Branding::new( @@ -85,66 +168,294 @@ fn main() -> Result<(), anyhow::Error> { let mut is_first_run = true; loop { - info!("Waiting for next update timer"); - timer.tick().await; - - let mut uploaded_files = Vec::new(); - - let versions = match minecraft::retrieve_data( - &mut uploaded_files, - semaphore.clone(), - is_first_run, - ) - .await - { - Ok(res) => { - info!("Minecraft data retrieved"); - - Some(res) + let loop_span = tracing::info_span!("processing_cycle", is_first_run); + async { + info!("Waiting for next update timer"); + timer.tick().await; + + let upload_queue = services::upload::UploadQueue::new(); + let manifest_builder = services::cas::ManifestBuilder::new(); + + let versions = { + let span = tracing::info_span!("minecraft_processing"); + async { + match MINECRAFT_BREAKER.call(async { + minecraft::retrieve_data( + &upload_queue, + &manifest_builder, + semaphore.clone(), + is_first_run, + ) + .await + }) + .await + { + Ok(res) => { + info!(version_count = res.versions.len(), "Minecraft data retrieved"); + Some(res) + } + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Open) => { + warn!("Minecraft circuit breaker is open, skipping"); + None + } + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Failed(err)) => { + error!(error = %err, "Minecraft processing failed"); + None + } + } + } + .instrument(span) + .await + }; + + if let Some(manifest) = versions { + if cfg!(feature = "fabric") { + let span = tracing::info_span!("fabric_processing"); + async { + match FABRIC_BREAKER.call(async { + fabric::retrieve_data( + &manifest, + &upload_queue, + &manifest_builder, + semaphore.clone(), + ) + .await + }) + .await + { + Ok(_) => info!("Fabric processing completed"), + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Open) => { + warn!("Fabric circuit breaker is open, skipping"); + } + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Failed(err)) => { + error!(error = %err, "Fabric processing failed"); + } + } + } + .instrument(span) + .await; + } + + if cfg!(feature = "forge") { + let span = tracing::info_span!("forge_processing"); + async { + match FORGE_BREAKER.call(async { + forge::retrieve_data( + &manifest, + &upload_queue, + &manifest_builder, + semaphore.clone(), + ) + .await + }) + .await + { + Ok(_) => info!("Forge processing completed"), + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Open) => { + warn!("Forge circuit breaker is open, skipping"); + } + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Failed(err)) => { + error!(error = %err, "Forge processing failed"); + } + } + } + .instrument(span) + .await; + } + + if cfg!(feature = "quilt") { + let span = tracing::info_span!("quilt_processing"); + async { + match QUILT_BREAKER.call(async { + quilt::retrieve_data( + &manifest, + &upload_queue, + &manifest_builder, + semaphore.clone(), + ) + .await + }) + .await + { + Ok(_) => info!("Quilt processing completed"), + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Open) => { + warn!("Quilt circuit breaker is open, skipping"); + } + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Failed(err)) => { + error!(error = %err, "Quilt processing failed"); + } + } + } + .instrument(span) + .await; + } + + if cfg!(feature = "neoforge") { + let span = tracing::info_span!("neoforge_processing"); + async { + match NEOFORGE_BREAKER.call(async { + neoforge::retrieve_data( + &manifest, + &upload_queue, + &manifest_builder, + semaphore.clone(), + ) + .await + }) + .await + { + Ok(_) => info!("NeoForge processing completed"), + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Open) => { + warn!("NeoForge circuit breaker is open, skipping"); + } + Err(crate::infrastructure::circuit_breaker::CircuitBreakerError::Failed(err)) => { + error!(error = %err, "NeoForge processing failed"); + } + } + } + .instrument(span) + .await; + } + + info!(queued_count = upload_queue.len(), "Flushing CAS objects and path-based files"); + let flush_result = upload_queue.flush(&CLIENT, semaphore.clone()).await; + if let Err(e) = flush_result { + error!(error = %e, "Failed to flush upload queue - skipping manifest upload this cycle"); + } else { + info!("Upload queue flushed successfully"); + let timestamp = chrono::Utc::now().format("%Y-%m-%dT%H-%M-%SZ").to_string(); + let mut loader_references = std::collections::HashMap::new(); + let mut uploaded_manifest_urls = Vec::new(); + + let all_loaders = manifest_builder.get_loaders(); + info!(loader_count = all_loaders.len(), "Building loader manifests"); + + for loader in &all_loaders { + if let Some(loader_manifest) = manifest_builder.build_loader_manifest(loader) { + let manifest_path = format!("v{}/manifests/{}/{}.json", crate::services::cas::CAS_VERSION, loader, loader_manifest.timestamp); + + info!( + loader = %loader, + version_count = loader_manifest.versions.len(), + path = %manifest_path, + "Uploading loader manifest" + ); + + match serde_json::to_vec_pretty(&loader_manifest) { + Ok(manifest_bytes) => { + match upload_file_to_bucket( + manifest_path.clone(), + manifest_bytes, + Some("application/json".to_string()), + &tokio::sync::Mutex::new(Vec::new()), + semaphore.clone(), + ).await { + Ok(_) => { + info!(loader = %loader, "Loader manifest uploaded successfully"); + loader_references.insert( + loader.clone(), + services::cas::LoaderReference::new(loader, loader_manifest.timestamp.clone()) + ); + uploaded_manifest_urls.push(format!("{}/{}", dotenvy::var("BASE_URL").unwrap(), manifest_path)); + } + Err(e) => { + error!(loader = %loader, error = %e, "Failed to upload loader manifest"); + } + } + } + Err(e) => { + error!(loader = %loader, error = %e, "Failed to serialize loader manifest"); + } + } + } + } + + if !loader_references.is_empty() { + let root_manifest = services::cas::RootManifest::new(loader_references); + let root_path = format!("v{}/manifest.json", crate::services::cas::CAS_VERSION); + + info!("Uploading root manifest (atomic commit point)"); + + match serde_json::to_vec_pretty(&root_manifest) { + Ok(root_bytes) => { + match upload_file_to_bucket( + root_path.clone(), + root_bytes.clone(), + Some("application/json".to_string()), + &tokio::sync::Mutex::new(Vec::new()), + semaphore.clone(), + ).await { + Ok(_) => { + info!("Root manifest uploaded successfully - all changes are now live"); + uploaded_manifest_urls.push(format!("{}/{}", dotenvy::var("BASE_URL").unwrap(), root_path)); + } + Err(e) => { + error!(error = %e, "Failed to upload root manifest - changes NOT committed"); + } + } + + let backup_path = format!("v{}/history/manifest-{}.json", crate::services::cas::CAS_VERSION, timestamp); + info!(backup_path = %backup_path, "Creating backup of root manifest"); + + match upload_file_to_bucket( + backup_path, + root_bytes, + Some("application/json".to_string()), + &tokio::sync::Mutex::new(Vec::new()), + semaphore.clone(), + ).await { + Ok(_) => info!("Backup created successfully"), + Err(e) => warn!(error = %e, "Failed to create backup (non-fatal)"), + } + } + Err(e) => { + error!(error = %e, "Failed to serialize root manifest"); + } + } + + info!("Processing cycle completed successfully"); + + if !uploaded_manifest_urls.is_empty() { + let cloudflare_enabled = dotenvy::var("CLOUDFLARE_INTEGRATION") + .map(|v| v == "true") + .unwrap_or(false); + + if cloudflare_enabled { + match ( + dotenvy::var("CLOUDFLARE_TOKEN"), + dotenvy::var("CLOUDFLARE_ZONE_ID"), + ) { + (Ok(token), Ok(zone_id)) => { + match services::cloudflare::purge_cloudflare_cache(&token, &zone_id, &uploaded_manifest_urls).await { + Ok(_) => { + info!("Cloudflare cache purge successful"); + } + Err(e) => { + warn!(error = %e, "Cloudflare cache purge failed, but continuing"); + } + } + } + _ => { + warn!( + "CLOUDFLARE_INTEGRATION is enabled but CLOUDFLARE_TOKEN or \ + CLOUDFLARE_ZONE_ID is missing" + ); + } + } + } else { + info!("Cloudflare cache purging disabled (set CLOUDFLARE_INTEGRATION=true to enable)"); + } + } + } else { + warn!("No loader manifests were built - skipping root manifest upload"); + } } - Err(err) => { - error!("MC Error: {:?}", err); - - None } - }; - if let Some(manifest) = versions { - if cfg!(feature = "fabric") { - fabric::retrieve_data( - &manifest, - &mut uploaded_files, - semaphore.clone(), - ) - .await?; - } - if cfg!(feature = "forge") { - forge::retrieve_data( - &manifest, - &mut uploaded_files, - semaphore.clone(), - ) - .await?; - } - if cfg!(feature = "quilt") { - quilt::retrieve_data( - &manifest, - &mut uploaded_files, - semaphore.clone(), - ) - .await?; - } - if cfg!(feature = "neoforge") { - neoforge::retrieve_data( - &manifest, - &mut uploaded_files, - semaphore.clone(), - ) - .await?; - } + is_first_run = false; } - - is_first_run = false; + .instrument(loop_span) + .await; } }) } @@ -183,43 +494,64 @@ fn check_env_vars() -> bool { failed } -lazy_static::lazy_static! { - static ref CLIENT : Bucket = { - let bucket = Bucket::new( - &dotenvy::var("S3_BUCKET_NAME").unwrap(), - if &*dotenvy::var("S3_REGION").unwrap() == "r2" { - Region::R2 { - account_id: dotenvy::var("S3_URL").unwrap(), - } - } else { - Region::Custom { - region: dotenvy::var("S3_REGION").unwrap(), - endpoint: dotenvy::var("S3_URL").unwrap(), - } - }, - Credentials::new( - Some(&*dotenvy::var("S3_ACCESS_TOKEN").unwrap()), - Some(&*dotenvy::var("S3_SECRET").unwrap()), - None, - None, - None, - ).unwrap(), - ).unwrap(); - - bucket.with_path_style() - }; -} +static CLIENT: LazyLock = LazyLock::new(|| { + let bucket = Bucket::new( + &dotenvy::var("S3_BUCKET_NAME").unwrap(), + if &*dotenvy::var("S3_REGION").unwrap() == "r2" { + Region::R2 { + account_id: dotenvy::var("S3_URL").unwrap(), + } + } else { + Region::Custom { + region: dotenvy::var("S3_REGION").unwrap(), + endpoint: dotenvy::var("S3_URL").unwrap(), + } + }, + Credentials::new( + Some(&*dotenvy::var("S3_ACCESS_TOKEN").unwrap()), + Some(&*dotenvy::var("S3_SECRET").unwrap()), + None, + None, + None, + ) + .unwrap(), + ) + .unwrap(); + + bucket.with_path_style() +}); + +static MINECRAFT_BREAKER: LazyLock = LazyLock::new(|| { + crate::infrastructure::circuit_breaker::CircuitBreaker::new("minecraft", 5, Duration::from_secs(300)) +}); + +static FORGE_BREAKER: LazyLock = LazyLock::new(|| { + crate::infrastructure::circuit_breaker::CircuitBreaker::new("forge", 5, Duration::from_secs(300)) +}); + +static FABRIC_BREAKER: LazyLock = LazyLock::new(|| { + crate::infrastructure::circuit_breaker::CircuitBreaker::new("fabric", 5, Duration::from_secs(300)) +}); +static QUILT_BREAKER: LazyLock = LazyLock::new(|| { + crate::infrastructure::circuit_breaker::CircuitBreaker::new("quilt", 5, Duration::from_secs(300)) +}); + +static NEOFORGE_BREAKER: LazyLock = LazyLock::new(|| { + crate::infrastructure::circuit_breaker::CircuitBreaker::new("neoforge", 5, Duration::from_secs(300)) +}); + +#[instrument(skip(bytes, uploaded_files, semaphore), fields(size = bytes.len()))] pub async fn upload_file_to_bucket( path: String, bytes: Vec, content_type: Option, uploaded_files: &tokio::sync::Mutex>, semaphore: Arc, -) -> Result<(), anyhow::Error> { +) -> Result<(), crate::infrastructure::error::Error> { let _permit = semaphore.acquire().await?; - info!("{} started uploading", path); + info!(path = %path, "Started uploading"); (|| async { let key = path.clone(); @@ -232,23 +564,23 @@ pub async fn upload_file_to_bucket( CLIENT.put_object(key.clone(), &bytes).await } .map_err(|err| { - error!("{} failed to upload: {:?}", path, err); - err + error!(path = %path, error = %err, "Failed to upload"); + crate::infrastructure::error::s3_error(err, path.clone()) }); match result { Ok(_) => { { - info!("{} done uploading", path); + info!(path = %path, "Upload completed"); let mut uploaded_files = uploaded_files.lock().await; uploaded_files.push(key); } - return Ok(()); + Ok(()) } Err(err) => { - error!("{} failed to upload: {:?}", path, err); - return Err(err.into()); + error!(path = %path, error = %err, "Upload failed"); + Err(err) } } }) @@ -261,62 +593,46 @@ pub async fn upload_file_to_bucket( } pub fn format_url(path: &str) -> String { - info!("{}/{}", &*dotenvy::var("BASE_URL").unwrap(), path); - format!("{}/{}", &*dotenvy::var("BASE_URL").unwrap(), path) + let base_url = &*dotenvy::var("BASE_URL").unwrap(); + let full_url = format!("{}/{}", base_url, path); + info!(path = %path, url = %full_url, "Formatted URL"); + full_url } -pub async fn download_file( - url: &str, - sha1: Option<&str>, - semaphore: Arc, -) -> Result { - let _permit = semaphore.acquire().await?; - info!("{} started downloading", url); - let val = daedalus::download_file(url, sha1).await?; - info!("{} finished downloading", url); - Ok(val) -} - -pub async fn download_file_mirrors( - base: &str, - mirrors: &[&str], - sha1: Option<&str>, - semaphore: Arc, -) -> Result { - let _permit = semaphore.acquire().await?; - info!("{} started downloading", base); - let val = daedalus::download_file_mirrors(base, mirrors, sha1).await?; - info!("{} finished downloading", base); - - Ok(val) -} +pub use services::download::{download_file, download_file_mirrors}; +#[instrument(skip(uploaded_files, semaphore))] pub async fn upload_static_files( uploaded_files: &tokio::sync::Mutex>, semaphore: Arc, -) -> Result<(), anyhow::Error> { +) -> Result<(), crate::infrastructure::error::Error> { use path_slash::PathExt as _; let cdn_upload_dir = dotenvy::var("CDN_UPLOAD_DIR").unwrap_or("./upload_cdn".to_string()); - info!("uploading static files from {}", cdn_upload_dir); + info!(dir = %cdn_upload_dir, "Uploading static files"); if !std::path::Path::new(&cdn_upload_dir).exists() { panic!("CDN_UPLOAD_DIR does not exist"); } for entry in walkdir::WalkDir::new(&cdn_upload_dir) { - let entry = entry?; + let entry = entry.map_err(|e| { + crate::infrastructure::error::ErrorKind::Io(std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to walk directory: {}", e), + )) + })?; if entry.path().is_file() { let upload_path = entry.path() .strip_prefix(&cdn_upload_dir) .expect("Unwrap to be safe because we are striping the prefix to the directory walked") .to_slash() .ok_or_else(|| { - anyhow::anyhow!( + crate::infrastructure::error::invalid_input(format!( "Failed to convert path to utf8 string {}", entry.path().display() - ) + )) })?; if upload_path.ends_with(".DS_Store") { @@ -324,9 +640,9 @@ pub async fn upload_static_files( } info!( - "uploading {} to cdn at path {}", - entry.path().display(), - upload_path + file = %entry.path().display(), + cdn_path = %upload_path, + "Uploading static file to CDN" ); let content_type = diff --git a/daedalus_client/src/minecraft.rs b/daedalus_client/src/minecraft.rs index be6e671..ca1dd0d 100644 --- a/daedalus_client/src/minecraft.rs +++ b/daedalus_client/src/minecraft.rs @@ -1,6 +1,7 @@ use crate::download_file; -use crate::{format_url, upload_file_to_bucket}; -use anyhow::bail; +use crate::format_url; +use crate::services::upload::UploadQueue; +use dashmap::DashSet; use daedalus::minecraft::{ merge_partial_library, Dependency, DependencyRule, JavaVersion, LWJGLEntry, Library, LibraryDownload, LibraryDownloads, LibraryGroup, @@ -8,7 +9,7 @@ use daedalus::minecraft::{ VersionManifest, VersionType, }; use daedalus::{get_hash, GradleSpecifier}; -use log::{debug, error, info, warn}; +use tracing::{debug, error, info, warn}; use serde::Deserialize; use std::collections::{BTreeMap, HashMap, HashSet}; use std::convert::TryFrom; @@ -64,7 +65,7 @@ fn patch_library( fn process_single_lwjgl_variant( variant: &LibraryGroup, patches: &Vec, -) -> Result, anyhow::Error> { +) -> Result, crate::infrastructure::error::Error> { let lwjgl_version = variant.version.clone(); info!("Processing LWJGL variant {}", lwjgl_version); @@ -102,8 +103,6 @@ fn process_single_lwjgl_variant( rule: None, }]); - // remove jutils and jinput from LWJGL 3 - // this is a dependency that Mojang kept in, but doesn't belong there anymore let unneeded: HashSet<&str> = vec!["jutils", "jinput"].into_iter().collect(); let filtered_libs = lwjgl @@ -119,7 +118,7 @@ fn process_single_lwjgl_variant( lwjgl_version ) } else { - bail!("Unknown LWJGL version {}", lwjgl_version); + return Err(crate::infrastructure::error::invalid_input(format!("Unknown LWJGL version {}", lwjgl_version))); }; let mut good = true; @@ -172,16 +171,14 @@ fn process_single_lwjgl_variant( /// Patch CVE-2021-44228, CVE-2021-44832, CVE-2021-45046 fn map_log4j_artifact( version: &str, -) -> Result, anyhow::Error> { +) -> Result, crate::infrastructure::error::Error> { debug!("log4j version: {}", version); let x = lenient_semver::parse(version); if x <= lenient_semver::parse("2.0") { - // all versions below 2.0 (including beta9 and rc2) use a patch from cdn debug!("log4j use beta9 patch"); return Ok(Some(("2.0-beta9-fixed".to_string(), format_url("maven/")))); } - if x <= lenient_semver::parse("2.17.1") { - // CVE-2021-44832 fixed in 2.17.1 + if x < lenient_semver::parse("2.17.1") { debug!("bump log4j to 2.17.1"); return Ok(Some(( "2.17.1".to_string(), @@ -193,13 +190,14 @@ fn map_log4j_artifact( } pub async fn retrieve_data( - uploaded_files: &mut Vec, + upload_queue: &UploadQueue, + manifest_builder: &crate::services::cas::ManifestBuilder, semaphore: Arc, is_first_run: bool, -) -> Result { +) -> Result { - log::info!("Retrieving Minecraft data ... IS_FIRST_TIME: {}", is_first_run); + info!(is_first_run = is_first_run, "Retrieving Minecraft data"); // TODO: Old manifest doesn't take LWJGL meta into account let old_manifest = if is_first_run { @@ -225,10 +223,7 @@ pub async fn retrieve_data( let lwjgl_config = get_lwjgl_config().await?; - let visited_assets_mutex = Arc::new(Mutex::new(Vec::new())); - let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new())); - - // collections of seen lwjgl versions + let visited_assets = Arc::new(DashSet::new()); let lwjgl_version_variants_mutex: Arc< Mutex>>, @@ -305,9 +300,8 @@ pub async fn retrieve_data( } } - let visited_assets_mutex = Arc::clone(&visited_assets_mutex); + let visited_assets = Arc::clone(&visited_assets); let cloned_manifest_mutex = Arc::clone(&cloned_manifest); - let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex); let semaphore = Arc::clone(&semaphore); let patches = Arc::clone(&cloned_patches); @@ -317,8 +311,6 @@ pub async fn retrieve_data( old_version.and_then(|x| x.assets_index_sha1.clone()); async move { - let mut upload_futures = Vec::new(); - let mut version_info = daedalus::minecraft::fetch_version_info(version).await?; @@ -327,7 +319,7 @@ pub async fn retrieve_data( } fn version_has_split_natives(ver: &VersionInfo) -> bool { - ver.libraries.iter().any(|lib| lib_is_split_natives(lib)) + ver.libraries.iter().any(lib_is_split_natives) } fn is_macos_only(rules: &Option>) -> bool { @@ -383,7 +375,6 @@ pub async fn retrieve_data( info!("Candidate library {} is only for macOS and is therefore ignored", spec); continue; } - // NOTE: Prism does macos only lib exclusion here if spec.package == "org.lwjgl.lwjgl" && spec.artifact == "lwjgl" { Some(spec.version.clone()) } else if spec.package == "org.lwjgl" && spec.artifact == "lwjgl" { @@ -441,7 +432,7 @@ pub async fn retrieve_data( Ok(("677991ea2d7426f76309a73739cecf609679492c", 677588)) } _ => { - Err(anyhow::anyhow!("Unhandled log4j artifact {} for overridden version {}", spec.artifact, version_override)) + Err(crate::infrastructure::error::invalid_input(format!("Unhandled log4j artifact {} for overridden version {}", spec.artifact, version_override))) } } } @@ -457,12 +448,12 @@ pub async fn retrieve_data( Ok(("ca499d751f4ddd8afb016ef698c30be0da1d09f7", 21268)) } _ => { - Err(anyhow::anyhow!("Unhandled log4j artifact {} for overridden version {}", spec.artifact, version_override)) + Err(crate::infrastructure::error::invalid_input(format!("Unhandled log4j artifact {} for overridden version {}", spec.artifact, version_override))) } } } _ => { - Err(anyhow::anyhow!("Unhandled log4j version {}", version_override)) + Err(crate::infrastructure::error::invalid_input(format!("Unhandled log4j version {}", version_override))) } }?; let artifact = LibraryDownload { @@ -514,7 +505,6 @@ pub async fn retrieve_data( add_lwjgl_version(lwjgl_version_variants_mutex.clone(), lwjgl).await; info!("Found candidate LWJGL {:?} {:?}", lwjgl.version, key); } - // remove the common bucket lwjgl_buckets.remove(&None); } @@ -528,13 +518,13 @@ pub async fn retrieve_data( } } else { let bad_versions: HashSet<&str> = vec!["3.1.6", "3.2.1"].into_iter().collect(); - let our_versions: HashSet<&str> = lwjgl_buckets.values().into_iter().map(|lwjgl| lwjgl.version.as_str()).collect(); + let our_versions: HashSet<&str> = lwjgl_buckets.values().map(|lwjgl| lwjgl.version.as_str()).collect(); if our_versions == bad_versions { info!("Found broken 3.1.6/3.2.1 LWJGL combo in version {} , forcing LWJGL. 3.2.1", &version_info.id); Ok("3.2.1".to_string()) } else { - Err(anyhow::anyhow!("Can not determine a single suggested LWJGL version in version {} from among {:?}", &version_info.id, our_versions)) + Err(crate::infrastructure::error::invalid_input(format!("Can not determine a single suggested LWJGL version in version {} from among {:?}", &version_info.id, our_versions))) } }?; @@ -662,22 +652,13 @@ pub async fn retrieve_data( let mut download_assets = false; - { - let mut visited_assets = visited_assets_mutex.lock().await; - - if !visited_assets.contains(&version_info.asset_index.id) { - if let Some(assets_hash) = assets_hash { - if version_info.asset_index.sha1 != assets_hash { - download_assets = true; - } - } else { + if visited_assets.insert(version_info.asset_index.id.clone()) { + if let Some(assets_hash) = assets_hash { + if version_info.asset_index.sha1 != assets_hash { download_assets = true; } - } - - if download_assets { - visited_assets - .push(version_info.asset_index.id.clone()); + } else { + download_assets = true; } } @@ -689,62 +670,79 @@ pub async fn retrieve_data( ) .await?; - { - upload_futures.push(upload_file_to_bucket( - assets_path, - assets_index.to_vec(), - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - )); - } - } - - { - upload_futures.push(upload_file_to_bucket( - version_path, - serde_json::to_vec(&version_info)?, + let asset_bytes = assets_index.to_vec(); + let asset_hash = upload_queue.enqueue( + asset_bytes.clone(), Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - )); + ); + + let base_url = dotenvy::var("BASE_URL").unwrap(); + version_info.asset_index.url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &asset_hash[..2], + &asset_hash[2..] + ); } - futures::future::try_join_all(upload_futures).await?; + let version_bytes = serde_json::to_vec(&version_info)?; + let version_hash = upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), + ); - Ok::<(), anyhow::Error>(()) + manifest_builder.add_version( + "minecraft", + version_info.id.clone(), + version_hash, + version_bytes.len() as u64, + ); + + Ok::<(), crate::infrastructure::error::Error>(()) } .await?; - Ok::<(), anyhow::Error>(()) + Ok::<(), crate::infrastructure::error::Error>(()) }) } { let mut versions = version_futures.into_iter().peekable(); let mut chunk_index = 0; + let mut successful = 0; + let mut failed = 0; + while versions.peek().is_some() { let now = Instant::now(); let chunk: Vec<_> = versions.by_ref().take(100).collect(); - futures::future::try_join_all(chunk).await?; + + for future in chunk { + match future.await { + Ok(_) => { + successful += 1; + } + Err(e) => { + warn!("⚠️ Minecraft - Failed to process version: {}", e); + failed += 1; + } + } + } chunk_index += 1; let elapsed = now.elapsed(); - info!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed); + info!("Chunk {} Elapsed: {:.2?} (✓ {} ✗ {})", chunk_index, elapsed, successful, failed); } + + info!("📊 Minecraft - Processing complete: {} successful, {} failed", successful, failed); } //futures::future::try_join_all(version_futures).await?; { - debug!("waiting for lock on lwjgl version mutex"); let lwjgl_version_variants = lwjgl_version_variants_mutex.lock().await; - // info!( - // "Processing LWJGL variants ... {:#?}", - // lwjgl_version_variants - // ); info!("Processing LWJGL variants"); for (lwjgl_version_variant, lwjgl_variant_entries) in lwjgl_version_variants.iter() @@ -781,7 +779,6 @@ pub async fn retrieve_data( continue; } - // print natives data to decide which variant to use let natives = variant .group .libraries @@ -823,8 +820,6 @@ pub async fn retrieve_data( unknown_variants += 1; } - let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex); - let semaphore = Arc::clone(&semaphore); let patches = Arc::clone(&cloned_patches); async move { @@ -834,16 +829,28 @@ pub async fn retrieve_data( && unknown_variants == 0 { if let Some((lwjgl_path, lwjgl)) = process_single_lwjgl_variant(&decided_variant.expect("Unwrap to be safe inside is_some").group, &patches)? { - { - debug!("Uploading {}", lwjgl_path); - upload_file_to_bucket( - lwjgl_path, - serde_json::to_vec(&lwjgl)?, - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - ).await?; - } + debug!("Uploading {}", lwjgl_path); + + let lwjgl_bytes = serde_json::to_vec(&lwjgl)?; + let lwjgl_hash = upload_queue.enqueue( + lwjgl_bytes.clone(), + Some("application/json".to_string()), + ); + + let loader = if lwjgl.version.starts_with("2") { + "minecraft-lwjgl2" + } else if lwjgl.version.starts_with("3") { + "minecraft-lwjgl3" + } else { + return Err(crate::infrastructure::error::invalid_input(format!("Unknown LWJGL version {}", lwjgl.version))); + }; + + manifest_builder.add_version( + loader, + lwjgl.version.clone(), + lwjgl_hash, + lwjgl_bytes.len() as u64, + ); } else { info!("Skipped LWJGL {}", &decided_variant.expect("Unwrap to be safe inside is_some").group.version); @@ -860,36 +867,19 @@ pub async fn retrieve_data( error!("No variant decided for version {} of out {} possible and {} unknown", lwjgl_version_variant, accepted_variants, unknown_variants); } - Ok::<(), anyhow::Error>(()) + Ok::<(), crate::infrastructure::error::Error>(()) } .await? } } - upload_file_to_bucket( - format!( - "minecraft/v{}/manifest.json", - daedalus::minecraft::CURRENT_FORMAT_VERSION - ), - serde_json::to_vec(&*cloned_manifest.lock().await)?, - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore, - ) - .await?; - - if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) { - uploaded_files.extend(uploaded_files_mutex.into_inner()); - } - let elapsed = now.elapsed(); info!("Elapsed: {:.2?}", elapsed); Ok(Arc::try_unwrap(cloned_manifest) .map_err(|err| { - anyhow::anyhow!( - "Failed to unwrap Arc>: {:?}", - err + crate::infrastructure::error::invalid_input( + format!("Failed to unwrap Arc>: {:?}", err) ) })? .into_inner()) @@ -910,7 +900,7 @@ struct LibraryPatch { } /// Fetches the list of library patches -async fn get_library_patches() -> Result, anyhow::Error> { +async fn get_library_patches() -> Result, crate::infrastructure::error::Error> { let patches = include_bytes!("../patched-library-patches.json"); let unprocessed_patches: Vec = serde_json::from_slice(patches)?; @@ -919,7 +909,7 @@ async fn get_library_patches() -> Result, anyhow::Error> { fn pre_process_patch(patch: &LibraryPatch) -> LibraryPatch { fn patch_url(url: &mut String) { - *url = url.replace("${BASE_URL}", &*dotenvy::var("BASE_URL").unwrap()); + *url = url.replace("${BASE_URL}", &dotenvy::var("BASE_URL").unwrap()); } fn patch_downloads(downloads: &mut LibraryDownloads) { @@ -974,7 +964,79 @@ pub struct LWJGLVariantConfig { } /// Fetches -async fn get_lwjgl_config() -> Result { +async fn get_lwjgl_config() -> Result { let config = include_bytes!("../lwjgl-config.json"); Ok(serde_json::from_slice(config)?) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_lenient_semver_comparison() { + // Test basic version comparisons + assert!(lenient_semver::parse("1.0.0") < lenient_semver::parse("2.0.0")); + assert!(lenient_semver::parse("2.0.0") > lenient_semver::parse("1.0.0")); + assert!(lenient_semver::parse("2.0.0") == lenient_semver::parse("2.0.0")); + + // Test beta/pre-release versions (critical for Log4j patching) + assert!(lenient_semver::parse("2.0-beta9") <= lenient_semver::parse("2.0")); + assert!(lenient_semver::parse("2.0-beta9") < lenient_semver::parse("2.1.0")); + assert!(lenient_semver::parse("2.0-rc2") <= lenient_semver::parse("2.0")); + + // Test Log4j security threshold (CVE-2021-44832 fixed in 2.17.1) + assert!(lenient_semver::parse("2.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.15.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.16.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.17.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.17.1") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.18.0") > lenient_semver::parse("2.17.1")); + + // Test actual Log4j versions that have been patched + assert!(lenient_semver::parse("2.0-beta9") <= lenient_semver::parse("2.0")); + assert!(lenient_semver::parse("2.12.1") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.14.1") <= lenient_semver::parse("2.17.1")); + } + + #[test] + fn test_log4j_artifact_mapping() { + // Test versions below 2.0 (should use beta9 patch) + let result = map_log4j_artifact("1.2.17").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.0-beta9-fixed"); + + let result = map_log4j_artifact("2.0-beta9").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.0-beta9-fixed"); + + // Test versions between 2.0 and 2.17.1 (should bump to 2.17.1) + let result = map_log4j_artifact("2.12.1").unwrap(); + assert!(result.is_some()); + let (version, url) = result.unwrap(); + assert_eq!(version, "2.17.1"); + assert_eq!(url, "https://repo1.maven.org/maven2/"); + + let result = map_log4j_artifact("2.15.0").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.17.1"); + + let result = map_log4j_artifact("2.17.0").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.17.1"); + + // Test versions at or above 2.17.1 (no patching needed) + let result = map_log4j_artifact("2.17.1").unwrap(); + assert!(result.is_none()); + + let result = map_log4j_artifact("2.18.0").unwrap(); + assert!(result.is_none()); + + let result = map_log4j_artifact("2.19.0").unwrap(); + assert!(result.is_none()); + } +} diff --git a/daedalus_client/src/neoforge.rs b/daedalus_client/src/neoforge.rs index 00c4664..d598b9c 100644 --- a/daedalus_client/src/neoforge.rs +++ b/daedalus_client/src/neoforge.rs @@ -1,27 +1,52 @@ -use crate::{download_file, format_url, upload_file_to_bucket}; +use crate::{download_file, format_url}; +use crate::services::upload::UploadQueue; +use dashmap::DashSet; use daedalus::minecraft::{Library, VersionManifest}; use daedalus::modded::{ - LoaderVersion, Manifest, PartialVersionInfo, Processor, SidedDataEntry, + LoaderVersion, PartialVersionInfo, Processor, SidedDataEntry, }; -use log::info; +use tracing::{info, warn}; use semver::Version; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::convert::TryInto; use std::io::Read; -use std::sync::Arc; +use std::sync::{Arc, LazyLock}; use std::time::Instant; use tokio::sync::{Mutex, Semaphore}; +/// Skip list for known broken NeoForge/Forge versions +/// These versions have permanent issues (missing files, corrupted archives, etc.) +static NEOFORGE_SKIP_LIST: LazyLock> = LazyLock::new(|| { + vec![ + // Add known broken versions here as they're discovered + // Example: "21.3.0-beta", // Missing universal JAR + ] + .into_iter() + .collect() +}); + pub async fn retrieve_data( minecraft_versions: &VersionManifest, - uploaded_files: &mut Vec, + upload_queue: &UploadQueue, + manifest_builder: &crate::services::cas::ManifestBuilder, semaphore: Arc, -) -> Result<(), anyhow::Error> { +) -> Result<(), crate::infrastructure::error::Error> { + // Check if force reprocess is enabled + let force_reprocess = std::env::var("FORCE_REPROCESS") + .map(|v| v == "true" || v == "1") + .unwrap_or(false); + + if force_reprocess { + info!("🔄 NeoForge - FORCE_REPROCESS enabled, processing all versions"); + } else { + info!("📋 NeoForge - Incremental mode, skipping existing versions"); + } + let maven_metadata = fetch_maven_metadata(semaphore.clone()).await?; let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!( "neoforge/v{}/manifest.json", - daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION, + crate::services::cas::CAS_VERSION, ))) .await .ok(); @@ -33,17 +58,43 @@ pub async fn retrieve_data( Vec::new() })); + // Build a set of existing version IDs for fast lookup (minecraft_version/loader_version) + let existing_versions: HashSet = if force_reprocess { + HashSet::new() + } else { + let old_versions_guard = old_versions.lock().await; + old_versions_guard + .iter() + .flat_map(|mc_version| { + mc_version.loaders.iter().map(move |loader| { + format!("{}/{}", mc_version.id, loader.id) + }) + }) + .collect() + }; + let versions = Arc::new(Mutex::new(Vec::new())); - let visited_assets_mutex = Arc::new(Mutex::new(Vec::new())); - let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new())); + let visited_assets = Arc::new(DashSet::new()); let mut version_futures = Vec::new(); + let mut total_versions = 0; + let mut skipped_existing = 0; + for (minecraft_version, loader_versions) in maven_metadata.clone() { let mut loaders = Vec::new(); for (loader_version, new_forge) in loader_versions { + total_versions += 1; + + // Skip if version already exists (incremental mode) + let version_key = format!("{}/{}", minecraft_version, loader_version); + if existing_versions.contains(&version_key) { + skipped_existing += 1; + continue; + } + let version = Version::parse(&loader_version)?; loaders.push((loader_version, version, new_forge.to_string())) @@ -56,19 +107,24 @@ pub async fn retrieve_data( { let loaders_futures = loaders.into_iter().map(|(loader_version_full, _, new_forge)| async { let versions_mutex = Arc::clone(&old_versions); - let visited_assets = Arc::clone(&visited_assets_mutex); - let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex); + let visited_assets = Arc::clone(&visited_assets); let semaphore = Arc::clone(&semaphore); let minecraft_version = minecraft_version.clone(); async move { + // Check skip list first + if NEOFORGE_SKIP_LIST.contains(loader_version_full.as_str()) { + info!("⏭️ NeoForge - Skipping excluded version: {}", loader_version_full); + return Ok::, crate::infrastructure::error::Error>(None); + } + { let versions = versions_mutex.lock().await; let version = versions.iter().find(|x| x.id == minecraft_version).and_then(|x| x.loaders.iter().find(|x| x.id == loader_version_full)); if let Some(version) = version { - return Ok::, anyhow::Error>(Some(version.clone())); + return Ok::, crate::infrastructure::error::Error>(Some(version.clone())); } } @@ -87,7 +143,7 @@ pub async fn retrieve_data( let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - Ok::(serde_json::from_str::(&contents)?) + Ok::(serde_json::from_str::(&contents)?) }).await??; let mut archive_clone = archive.clone(); @@ -97,7 +153,7 @@ pub async fn retrieve_data( let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - Ok::(serde_json::from_str::(&contents)?) + Ok::(serde_json::from_str::(&contents)?) }).await??; @@ -125,7 +181,7 @@ pub async fn retrieve_data( let mut lib_bytes = Vec::new(); lib_file.read_to_end(&mut lib_bytes)?; - Ok::(bytes::Bytes::from(lib_bytes)) + Ok::(bytes::Bytes::from(lib_bytes)) }).await??; local_libs.insert(lib.name.to_string(), lib_bytes); @@ -146,7 +202,7 @@ pub async fn retrieve_data( let mut lib_bytes = Vec::new(); lib_file.read_to_end(&mut lib_bytes)?; - Ok::(bytes::Bytes::from(lib_bytes)) + Ok::(bytes::Bytes::from(lib_bytes)) }).await??; let split = $value.split('/').last(); @@ -193,22 +249,18 @@ pub async fn retrieve_data( let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| async { let artifact_path = &lib.name.path(); - { - let mut visited_assets = visited_assets.lock().await; - - if visited_assets.contains(&lib.name) { - if let Some(ref mut downloads) = lib.downloads { - if let Some(ref mut artifact) = downloads.artifact { - artifact.url = Some(format_url(&format!("maven/{}", artifact_path))); - } - } else if lib.url.is_some() { - lib.url = Some(format_url("maven/")); + // Check if we've already processed this artifact (lock-free) + if !visited_assets.insert(lib.name.clone()) { + // Already processed, skip download + if let Some(ref mut downloads) = lib.downloads { + if let Some(ref mut artifact) = downloads.artifact { + artifact.url = Some(format_url(&format!("maven/{}", artifact_path))); } - - return Ok::(lib); - } else { - visited_assets.push(lib.name.clone()) + } else if lib.url.is_some() { + lib.url = Some(format_url("maven/")); } + + return Ok::(lib); } let artifact_bytes = if let Some(ref mut downloads) = lib.downloads { @@ -250,16 +302,14 @@ pub async fn retrieve_data( } else { None }; if let Some(bytes) = artifact_bytes { - upload_file_to_bucket( + upload_queue.enqueue_path( format!("{}/{}", "maven", artifact_path), bytes.to_vec(), Some("application/java-archive".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - ).await?; + ); } - Ok::(lib) + Ok::(lib) })).await?; let elapsed = now.elapsed(); @@ -280,23 +330,33 @@ pub async fn retrieve_data( logging: None }; - let version_path = format!( - "neoforge/v{}/versions/{}.json", - daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION, - loader_version_full + // Upload version to CAS and track in manifest builder + let version_bytes = serde_json::to_vec(&new_profile)?; + let version_hash = upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), ); - upload_file_to_bucket( - version_path.clone(), - serde_json::to_vec(&new_profile)?, - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore.clone(), - ).await?; + manifest_builder.add_version( + "neoforge", + loader_version_full.to_string(), + version_hash.clone(), + version_bytes.len() as u64, + ); + + // Build CAS URL for LoaderVersion + let base_url = dotenvy::var("BASE_URL").unwrap(); + let cas_url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &version_hash[..2], + &version_hash[2..] + ); return Ok(Some(LoaderVersion { id: loader_version_full, - url: format_url(&version_path), + url: cas_url, stable: false })); } @@ -309,17 +369,40 @@ pub async fn retrieve_data( let len = loaders_futures.len(); let mut versions = loaders_futures.into_iter().peekable(); let mut chunk_index = 0; + let mut successful = 0; + let mut failed = 0; + while versions.peek().is_some() { let now = Instant::now(); let chunk: Vec<_> = versions.by_ref().take(1).collect(); - let res = futures::future::try_join_all(chunk).await?; - loaders_versions.extend(res.into_iter().flatten()); + + // Process each version and handle errors individually + for future in chunk { + match future.await { + Ok(result) => { + if let Some(version) = result { + loaders_versions.push(version); + successful += 1; + } + } + Err(e) => { + warn!("⚠️ NeoForge - Failed to process version: {}", e); + failed += 1; + // Continue processing other versions + } + } + } chunk_index += 1; let elapsed = now.elapsed(); - info!("Loader Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed); + info!("Loader Chunk {}/{len} Elapsed: {:.2?} ({} succeeded, {} failed)", + chunk_index, elapsed, successful, failed); + } + + if failed > 0 { + warn!("⚠️ NeoForge - Skipped {} versions due to errors, {} succeeded", failed, successful); } } } @@ -330,32 +413,91 @@ pub async fn retrieve_data( loaders: loaders_versions }); - Ok::<(), anyhow::Error>(()) + Ok::<(), crate::infrastructure::error::Error>(()) }); } } + info!("📊 NeoForge - Processing {} versions ({} skipped, {} to process)", + total_versions, skipped_existing, total_versions - skipped_existing); + { let len = version_futures.len(); let mut versions = version_futures.into_iter().peekable(); let mut chunk_index = 0; + let mut successful_mc_versions = 0; + let mut failed_mc_versions = 0; + while versions.peek().is_some() { let now = Instant::now(); let chunk: Vec<_> = versions.by_ref().take(1).collect(); - futures::future::try_join_all(chunk).await?; + + // Process each Minecraft version and handle errors individually + for future in chunk { + match future.await { + Ok(()) => { + successful_mc_versions += 1; + } + Err(e) => { + warn!("⚠️ NeoForge - Failed to process Minecraft version: {}", e); + failed_mc_versions += 1; + // Continue processing other Minecraft versions + } + } + } chunk_index += 1; let elapsed = now.elapsed(); - info!("Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed); + info!("Chunk {}/{len} Elapsed: {:.2?} ({} MC versions succeeded, {} failed)", + chunk_index, elapsed, successful_mc_versions, failed_mc_versions); + } + + if failed_mc_versions > 0 { + warn!("⚠️ NeoForge - {} Minecraft versions failed to process, {} succeeded", + failed_mc_versions, successful_mc_versions); } } if let Ok(versions) = Arc::try_unwrap(versions) { - let mut versions = versions.into_inner(); + let new_versions = versions.into_inner(); + + // Get old versions for merging + let old_manifest_versions = if let Ok(old_versions) = Arc::try_unwrap(old_versions) { + old_versions.into_inner() + } else { + Vec::new() + }; + + // Merge new versions with old ones: keep old versions + add/update new ones + let mut final_versions = old_manifest_versions; + + for new_version in new_versions { + // Find if this Minecraft version already exists + if let Some(existing) = final_versions.iter_mut().find(|v| v.id == new_version.id) { + // Merge loaders: keep old loaders + add/update new ones + for new_loader in new_version.loaders { + if let Some(existing_loader) = existing.loaders.iter_mut().find(|l| l.id == new_loader.id) { + // Update existing loader + let loader_id = new_loader.id.clone(); + *existing_loader = new_loader; + info!("✅ NeoForge - Updated loader: {}/{}", existing.id, loader_id); + } else { + // Add new loader + info!("✅ NeoForge - Added new loader: {}/{}", existing.id, new_loader.id); + existing.loaders.push(new_loader); + } + } + } else { + // Add new Minecraft version + info!("✅ NeoForge - Added new Minecraft version: {}", new_version.id); + final_versions.push(new_version); + } + } - versions.sort_by(|x, y| { + // Sort by Minecraft version order + final_versions.sort_by(|x, y| { minecraft_versions .versions .iter() @@ -370,7 +512,8 @@ pub async fn retrieve_data( ) }); - for version in &mut versions { + // Sort loaders within each version + for version in &mut final_versions { let loader_versions = maven_metadata.get(&version.id); if let Some(loader_versions) = loader_versions { version.loaders.sort_by(|x, y| { @@ -388,23 +531,13 @@ pub async fn retrieve_data( } } - upload_file_to_bucket( - format!( - "neoforge/v{}/manifest.json", - daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION, - ), - serde_json::to_vec(&Manifest { - game_versions: versions, - })?, - Some("application/json".to_string()), - uploaded_files_mutex.as_ref(), - semaphore, - ) - .await?; - } + // Note: Versions are now tracked in ManifestBuilder and uploaded separately + // in the main loop via manifest_builder.build_loader_manifest() - if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) { - uploaded_files.extend(uploaded_files_mutex.into_inner()); + info!( + "✅ NeoForge - Processed {} Minecraft versions", + final_versions.len() + ); } Ok(()) @@ -432,11 +565,11 @@ struct Versions { pub async fn fetch_maven_metadata( semaphore: Arc, -) -> Result>, anyhow::Error> { +) -> Result>, crate::infrastructure::error::Error> { async fn fetch_values( url: &str, semaphore: Arc, - ) -> Result { + ) -> Result { Ok(serde_xml_rs::from_str( &String::from_utf8( download_file(url, None, semaphore).await?.to_vec(), @@ -458,7 +591,7 @@ pub async fn fetch_maven_metadata( value.contains("-rc"); if is_snapshot { - log::info!("Skipping snapshot version: {}", value); + info!("Skipping snapshot version: {}", value); continue; } let original = value.clone(); @@ -477,10 +610,10 @@ pub async fn fetch_maven_metadata( value.contains("-rc"); if is_snapshot { - log::info!("Skipping snapshot version: {}", value); + info!("Skipping snapshot version: {}", value); continue; } - + let original = value.clone(); let mut parts = value.split('.'); diff --git a/daedalus_client/src/quilt.rs b/daedalus_client/src/quilt.rs index c27dca3..bf58254 100644 --- a/daedalus_client/src/quilt.rs +++ b/daedalus_client/src/quilt.rs @@ -1,389 +1,23 @@ -use crate::{download_file, format_url, upload_file_to_bucket}; -use daedalus::minecraft::{Library, VersionManifest}; -use daedalus::modded::{LoaderVersion, Manifest, PartialVersionInfo, Version}; -use daedalus::{Branding, BRANDING}; -use serde::{Deserialize, Serialize}; +use crate::loaders::quilt::{QuiltStrategy, QuiltVersions}; +use crate::loaders::LoaderProcessor; +use crate::services::upload::UploadQueue; +use daedalus::minecraft::VersionManifest; use std::sync::Arc; -use tokio::sync::{Mutex, RwLock, Semaphore}; +use tokio::sync::Semaphore; +/// Retrieve Quilt loader data using the strategy pattern +/// +/// This is now a thin wrapper around the generic LoaderProcessor. +/// All the common logic has been extracted to the strategy pattern, +/// eliminating hundreds of lines of duplicated code. pub async fn retrieve_data( minecraft_versions: &VersionManifest, - uploaded_files: &mut Vec, + upload_queue: &UploadQueue, + manifest_builder: &crate::services::cas::ManifestBuilder, semaphore: Arc, -) -> Result<(), anyhow::Error> { - log::info!("Retrieving Quilt data ..."); - - let list = fetch_quilt_versions(None, semaphore.clone()).await?; - - let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!( - "quilt/v{}/manifest.json", - daedalus::modded::CURRENT_QUILT_FORMAT_VERSION, - ))) - .await - .ok(); - - let mut versions = if let Some(old_manifest) = old_manifest { - old_manifest.game_versions - } else { - Vec::new() - }; - - let loaders_mutex = RwLock::new(Vec::new()); - - { - let mut loaders = loaders_mutex.write().await; - for (index, loader) in list.loader.iter().enumerate() { - if versions.iter().any(|x| { - x.id == BRANDING - .get_or_init(Branding::default) - .dummy_replace_string - && x.loaders.iter().any(|x| x.id == loader.version) - }) { - if index == 0 { - loaders.push(( - Box::new(false), - loader.version.clone(), - Box::new(true), - )) - } - } else { - loaders.push(( - Box::new(false), - loader.version.clone(), - Box::new(false), - )) - } - } - } - - const DUMMY_GAME_VERSION: &str = "1.19.4-rc2"; - - let loader_version_mutex = Mutex::new(Vec::new()); - let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new())); - - let loader_versions = futures::future::try_join_all( - loaders_mutex.read().await.clone().into_iter().map( - |(stable, loader, skip_upload)| async { - let version = fetch_quilt_version( - DUMMY_GAME_VERSION, - &loader, - semaphore.clone(), - ) - .await?; - - Ok::< - (Box, String, PartialVersionInfo, Box), - anyhow::Error, - >((stable, loader, version, skip_upload)) - }, - ), - ) - .await?; - - let visited_artifacts_mutex = Arc::new(Mutex::new(Vec::new())); - futures::future::try_join_all(loader_versions.into_iter() - .map( - |(stable, loader, version, skip_upload)| async { - let libs = futures::future::try_join_all( - version.libraries.into_iter().map(|mut lib| async { - { - let mut visited_assets = - visited_artifacts_mutex.lock().await; - - if visited_assets.contains(&lib.name) { - lib.name = lib.name.to_string().replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string).parse()?; - lib.url = Some(format_url("maven/")); - - return Ok(lib); - } else { - visited_assets.push(lib.name.clone()) - } - } - - if lib.name.to_string().contains(DUMMY_GAME_VERSION) { - lib.name = lib.name.to_string().replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string).parse()?; - futures::future::try_join_all(list.game.clone().into_iter().map(|game_version| async { - let semaphore = semaphore.clone(); - let uploaded_files_mutex = uploaded_files_mutex.clone(); - let lib_name = lib.name.to_string(); - let lib_url = lib.url.clone(); - - async move { - let artifact_path = - daedalus::get_path_from_artifact(&lib_name.replace(&BRANDING - .get_or_init(Branding::default) - .dummy_replace_string, &game_version.version))?; - - let artifact = download_file( - &format!( - "{}{}", - lib_url.unwrap_or_else(|| { - "https://maven.quiltmc.org/".to_string() - }), - artifact_path - ), - None, - semaphore.clone(), - ) - .await?; - - upload_file_to_bucket( - format!("{}/{}", "maven", artifact_path), - artifact.to_vec(), - Some("application/java-archive".to_string()), - &uploaded_files_mutex, - semaphore.clone(), - ) - .await?; - - Ok::<(), anyhow::Error>(()) - }.await?; - - Ok::<(), anyhow::Error>(()) - })).await?; - lib.url = Some(format_url("maven/")); - - return Ok(lib); - } - - let artifact_path = lib.name.path(); - - let artifact = download_file( - &format!( - "{}{}", - lib.url.unwrap_or_else(|| { - "https://maven.quiltmc.org/".to_string() - }), - artifact_path - ), - None, - semaphore.clone(), - ) - .await?; - - lib.url = Some(format_url("maven/")); - - upload_file_to_bucket( - format!("{}/{}", "maven", artifact_path), - artifact.to_vec(), - Some("application/java-archive".to_string()), - &uploaded_files_mutex, - semaphore.clone(), - ) - .await?; - - Ok::(lib) - }), - ) - .await?; - - - if async move { - *skip_upload - }.await { - return Ok::<(), anyhow::Error>(()) - } - - let version_path = format!( - "quilt/v{}/versions/{}.json", - daedalus::modded::CURRENT_QUILT_FORMAT_VERSION, - &loader - ); - - upload_file_to_bucket( - version_path.clone(), - serde_json::to_vec(&PartialVersionInfo { - arguments: version.arguments, - id: version - .id - .replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string), - main_class: version.main_class, - release_time: version.release_time, - time: version.time, - type_: version.type_, - logging: None, - inherits_from: version - .inherits_from - .replace(DUMMY_GAME_VERSION, &BRANDING - .get_or_init(Branding::default) - .dummy_replace_string), - libraries: libs, - minecraft_arguments: version.minecraft_arguments, - processors: None, - data: None, - })?, - Some("application/json".to_string()), - &uploaded_files_mutex, - semaphore.clone(), - ) - .await?; - - { - let mut loader_version_map = loader_version_mutex.lock().await; - async move { - loader_version_map.push(LoaderVersion { - id: loader.to_string(), - url: format_url(&version_path), - stable: *stable, - }); - } - .await; - } - - Ok::<(), anyhow::Error>(()) - }, - )) - .await?; - - let mut loader_version_mutex = loader_version_mutex.into_inner(); - if !loader_version_mutex.is_empty() { - if let Some(version) = versions.iter_mut().find(|x| { - x.id == BRANDING.get_or_init(Branding::default).dummy_replace_string - }) { - version.loaders.append(&mut loader_version_mutex); - } else { - versions.push(Version { - id: BRANDING - .get_or_init(Branding::default) - .dummy_replace_string - .clone(), - stable: true, - loaders: loader_version_mutex, - }); - } - } - - for version in &list.game { - if !versions.iter().any(|x| x.id == version.version) { - versions.push(Version { - id: version.version.clone(), - stable: version.stable, - loaders: vec![], - }); - } - } - - versions.sort_by(|x, y| { - minecraft_versions - .versions - .iter() - .position(|z| x.id == z.id) - .unwrap_or_default() - .cmp( - &minecraft_versions - .versions - .iter() - .position(|z| y.id == z.id) - .unwrap_or_default(), - ) - }); - - for version in &mut versions { - version.loaders.sort_by(|x, y| { - let x_pos = list - .loader - .iter() - .position(|z| x.id == *z.version) - .unwrap_or_default(); - let y_pos = &list - .loader - .iter() - .position(|z| y.id == z.version) - .unwrap_or_default(); - - x_pos.cmp(y_pos) - }) - } - - upload_file_to_bucket( - format!( - "quilt/v{}/manifest.json", - daedalus::modded::CURRENT_QUILT_FORMAT_VERSION, - ), - serde_json::to_vec(&Manifest { - game_versions: versions, - })?, - Some("application/json".to_string()), - &uploaded_files_mutex, - semaphore, - ) - .await?; - - if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) { - uploaded_files.extend(uploaded_files_mutex.into_inner()); - } - - Ok(()) -} - -const QUILT_META_URL: &str = "https://meta.quiltmc.org/v3"; - -async fn fetch_quilt_version( - version_number: &str, - loader_version: &str, - semaphore: Arc, -) -> Result { - Ok(serde_json::from_slice( - &download_file( - &format!( - "{}/versions/loader/{}/{}/profile/json", - QUILT_META_URL, version_number, loader_version - ), - None, - semaphore, - ) - .await?, - )?) -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -/// Versions of quilt components -struct QuiltVersions { - /// Versions of Minecraft that quilt supports - pub game: Vec, - /// Available versions of the quilt loader - pub loader: Vec, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -/// A version of Minecraft that quilt supports -struct QuiltGameVersion { - /// The version number of the game - pub version: String, - /// Whether the Minecraft version is stable or not - pub stable: bool, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -/// A version of the quilt loader -struct QuiltLoaderVersion { - /// The separator to get the build number - pub separator: String, - /// The build number - pub build: u32, - /// The maven artifact - pub maven: String, - /// The version number of the quilt loader - pub version: String, -} - -/// Fetches the list of quilt versions -async fn fetch_quilt_versions( - url: Option<&str>, - semaphore: Arc, -) -> Result { - Ok(serde_json::from_slice( - &download_file( - url.unwrap_or(&*format!("{}/versions", QUILT_META_URL)), - None, - semaphore, - ) - .await?, - )?) +) -> Result<(), crate::infrastructure::error::Error> { + let processor = LoaderProcessor::new(QuiltStrategy); + processor + .retrieve_data::(minecraft_versions, upload_queue, manifest_builder, semaphore) + .await } diff --git a/daedalus_client/src/services/betterstack.rs b/daedalus_client/src/services/betterstack.rs new file mode 100644 index 0000000..4f4befc --- /dev/null +++ b/daedalus_client/src/services/betterstack.rs @@ -0,0 +1,216 @@ +use serde_json::Value; +use std::sync::Arc; +use std::time::Duration; +use tokio::sync::Mutex; +use tracing::{info, warn}; +use tracing_subscriber::layer::Context; +use tracing_subscriber::Layer; + +/// Betterstack log shipping layer +/// +/// This layer captures tracing events and ships them to Betterstack's HTTP log ingestion API. +/// Logs are batched in memory and flushed periodically to reduce HTTP overhead. +/// +/// # Features +/// - Batched log shipping (configurable batch size) +/// - Background flush task (configurable interval) +/// - Graceful error handling (won't crash app on logging failures) +/// - JSON format compatible with Betterstack API +pub struct BetterstackLayer { + /// Shared buffer for batching logs + buffer: Arc>>, + /// Maximum batch size before forcing a flush + batch_size: usize, +} + +impl BetterstackLayer { + /// Create a new Betterstack layer with background flushing + /// + /// # Arguments + /// * `token` - Betterstack API token + /// * `batch_size` - Maximum logs to buffer before flushing (default: 100) + /// * `flush_interval` - Duration between automatic flushes (default: 5 seconds) + /// + /// # Returns + /// A tuple of (layer, flush_handle) where the handle can be used to await graceful shutdown + pub fn new( + token: String, + batch_size: Option, + flush_interval: Option, + ) -> (Self, tokio::task::JoinHandle<()>) { + let batch_size = batch_size.unwrap_or(100); + let flush_interval = flush_interval.unwrap_or(Duration::from_secs(5)); + let buffer = Arc::new(Mutex::new(Vec::new())); + + // Spawn background flush task + let flush_handle = { + let buffer_clone = Arc::clone(&buffer); + tokio::spawn(async move { + flush_loop(buffer_clone, token, flush_interval).await; + }) + }; + + let layer = Self { + buffer, + batch_size, + }; + + (layer, flush_handle) + } + + /// Add a log event to the buffer + async fn enqueue(&self, event: Value) { + let mut buffer = self.buffer.lock().await; + buffer.push(event); + + // Flush if batch is full + if buffer.len() >= self.batch_size { + drop(buffer); // Release lock before flushing + // Note: Actual flush happens in background task + } + } +} + +impl Layer for BetterstackLayer +where + S: tracing::Subscriber, +{ + fn on_event(&self, event: &tracing::Event<'_>, _ctx: Context<'_, S>) { + // Convert tracing event to JSON + let mut visitor = JsonVisitor::new(); + event.record(&mut visitor); + + let json_event = serde_json::json!({ + "timestamp": chrono::Utc::now().to_rfc3339(), + "level": format!("{:?}", event.metadata().level()).to_lowercase(), + "target": event.metadata().target(), + "fields": visitor.fields, + }); + + // Enqueue asynchronously (spawn to avoid blocking) + let buffer = Arc::clone(&self.buffer); + let batch_size = self.batch_size; + tokio::spawn(async move { + let layer = BetterstackLayer { buffer, batch_size }; + layer.enqueue(json_event).await; + }); + } +} + +/// Background flush loop +async fn flush_loop(buffer: Arc>>, token: String, interval: Duration) { + let mut timer = tokio::time::interval(interval); + let client = reqwest::Client::new(); + + loop { + timer.tick().await; + + // Take all pending logs + let logs = { + let mut buffer = buffer.lock().await; + if buffer.is_empty() { + continue; + } + std::mem::take(&mut *buffer) + }; + + // Ship to Betterstack + if let Err(e) = ship_logs(&client, &token, &logs).await { + warn!( + error = %e, + log_count = logs.len(), + "Failed to ship logs to Betterstack, logs dropped" + ); + } else { + info!(log_count = logs.len(), "Successfully shipped logs to Betterstack"); + } + } +} + +/// Ship logs to Betterstack HTTP API +async fn ship_logs( + client: &reqwest::Client, + token: &str, + logs: &[Value], +) -> Result<(), Box> { + if logs.is_empty() { + return Ok(()); + } + + let response = client + .post("https://in.logs.betterstack.com/") + .header("Authorization", format!("Bearer {}", token)) + .header("Content-Type", "application/json") + .json(&logs) + .timeout(Duration::from_secs(10)) + .send() + .await?; + + if !response.status().is_success() { + let status = response.status(); + let body = response.text().await.unwrap_or_else(|_| "".to_string()); + return Err(format!("Betterstack API error {}: {}", status, body).into()); + } + + Ok(()) +} + +/// Visitor for extracting fields from tracing events as JSON +struct JsonVisitor { + fields: serde_json::Map, +} + +impl JsonVisitor { + fn new() -> Self { + Self { + fields: serde_json::Map::new(), + } + } +} + +impl tracing::field::Visit for JsonVisitor { + fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) { + self.fields.insert( + field.name().to_string(), + Value::String(format!("{:?}", value)), + ); + } + + fn record_str(&mut self, field: &tracing::field::Field, value: &str) { + self.fields + .insert(field.name().to_string(), Value::String(value.to_string())); + } + + fn record_i64(&mut self, field: &tracing::field::Field, value: i64) { + self.fields + .insert(field.name().to_string(), Value::Number(value.into())); + } + + fn record_u64(&mut self, field: &tracing::field::Field, value: u64) { + self.fields + .insert(field.name().to_string(), Value::Number(value.into())); + } + + fn record_bool(&mut self, field: &tracing::field::Field, value: bool) { + self.fields + .insert(field.name().to_string(), Value::Bool(value)); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_json_visitor_basic() { + let visitor = JsonVisitor::new(); + assert_eq!(visitor.fields.len(), 0, "New visitor should have empty fields"); + } + + #[test] + fn test_betterstack_layer_creation() { + // Test that layer creation doesn't panic + let _runtime = tokio::runtime::Runtime::new().unwrap(); + // Layer creation happens in async context in real usage + } +} diff --git a/daedalus_client/src/services/cas.rs b/daedalus_client/src/services/cas.rs new file mode 100644 index 0000000..93babfc --- /dev/null +++ b/daedalus_client/src/services/cas.rs @@ -0,0 +1,468 @@ +use chrono::{DateTime, Utc}; +use dashmap::DashMap; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use tracing::{info, instrument}; + +/// Current CAS (Content-Addressable Storage) version +/// +/// This is the single version entrypoint for all metadata (minecraft, forge, fabric, quilt, neoforge). +/// Old versions had individual versioning per loader, but v3+ uses a unified version. +pub const CAS_VERSION: u32 = 3; + +/// Content-Addressable Storage (CAS) system +/// +/// This module implements a content-addressable storage architecture where: +/// - Files are stored by their SHA256 hash (immutable, deduplicated) +/// - Loader manifests are timestamped for version history +/// - A root manifest atomically points to the current versions +/// +/// # Architecture +/// +/// ```text +/// Root Manifest (root.json) +/// ├─> minecraft manifest (minecraft/.json) +/// ├─> forge manifest (forge/.json) +/// ├─> fabric manifest (fabric/.json) +/// ├─> quilt manifest (quilt/.json) +/// └─> neoforge manifest (neoforge/.json) +/// +/// Each loader manifest contains: +/// ├─> version entries with content hashes +/// └─> references to objects/ +/// ``` +/// +/// # Benefits +/// +/// - **Atomic updates**: Single root manifest update makes all changes visible +/// - **Rollback**: Keep historical manifests, update root to point to previous version +/// - **Deduplication**: Same content = same hash = stored once +/// - **Immutability**: Content never changes, only manifest pointers +/// - **Version history**: Timestamped manifests enable auditing and rollback +/// +/// Reference to a loader manifest with its location +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct LoaderReference { + /// Timestamp of the loader manifest (ISO 8601 format) + pub timestamp: String, + /// Full path to the loader manifest + pub url: String, +} + +impl LoaderReference { + /// Create a new loader reference + pub fn new(loader: &str, timestamp: String) -> Self { + let url = format!("v{}/manifests/{}/{}.json", CAS_VERSION, loader, timestamp); + Self { timestamp, url } + } +} + +/// Root manifest that points to the current version of each loader manifest +/// +/// This is the single source of truth for the current state of the metadata. +/// Updating this file atomically switches between versions. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct RootManifest { + /// Schema version for future compatibility + pub schema_version: u32, + /// Timestamp when this root manifest was created + #[serde(with = "chrono::serde::ts_seconds")] + pub created_at: DateTime, + /// Map of loader name to its manifest reference + /// Example: "minecraft" -> { timestamp: "2024-01-15T10-30-00Z", url: "v{CAS_VERSION}/manifests/minecraft/2024-01-15T10-30-00Z.json" } + pub loaders: HashMap, +} + +impl RootManifest { + /// Create a new root manifest + pub fn new(loaders: HashMap) -> Self { + Self { + schema_version: 1, + created_at: Utc::now(), + loaders, + } + } + + /// Create an empty root manifest + #[cfg(test)] + pub fn empty() -> Self { + Self::new(HashMap::new()) + } + + /// Add or update a loader reference + #[cfg(test)] + pub fn add_loader(&mut self, loader: String, timestamp: String) { + self.loaders.insert( + loader.clone(), + LoaderReference::new(&loader, timestamp), + ); + } +} + +/// Entry in a loader manifest that references content by hash +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct LoaderManifestEntry { + /// The version ID (e.g., "1.20.4", "23w10a") + pub id: String, + /// SHA256 hash of the content (references objects/) + pub hash: String, + /// Size of the content in bytes + pub size: u64, + /// When this entry was last updated + #[serde(with = "chrono::serde::ts_seconds")] + pub updated_at: DateTime, +} + +/// Loader manifest containing all versions for a specific loader +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct LoaderManifest { + /// Schema version for future compatibility + pub schema_version: u32, + /// Loader name (e.g., "minecraft", "forge") + pub loader: String, + /// Timestamp when this manifest was created (ISO 8601) + pub timestamp: String, + /// All version entries + pub versions: Vec, +} + +impl LoaderManifest { + /// Create a new loader manifest + pub fn new(loader: String, versions: Vec) -> Self { + let timestamp = Utc::now().format("%Y-%m-%dT%H-%M-%SZ").to_string(); + Self { + schema_version: 1, + loader, + timestamp, + versions, + } + } +} + +/// Builder for tracking version entries and constructing loader manifests +/// +/// This is used during metadata processing to collect version→hash mappings +/// for each loader. Once all versions are added, it can build LoaderManifest +/// structures for upload. +/// +/// # Thread Safety +/// +/// Uses DashMap for lock-free concurrent access, allowing multiple threads +/// to add versions simultaneously. +/// +/// # Example +/// +/// ```no_run +/// # use daedalus_client::services::cas::ManifestBuilder; +/// let builder = ManifestBuilder::new(); +/// +/// // Multiple threads can add versions concurrently +/// builder.add_version("minecraft", "1.20.4".to_string(), "abc123...".to_string(), 1024); +/// builder.add_version("forge", "49.0.3".to_string(), "def456...".to_string(), 2048); +/// +/// // Build loader manifests +/// let minecraft_manifest = builder.build_loader_manifest("minecraft"); +/// let forge_manifest = builder.build_loader_manifest("forge"); +/// ``` +pub struct ManifestBuilder { + /// Map of loader name → (version_id → (hash, size)) + /// Using nested DashMap for concurrent access at both levels + versions: DashMap>, +} + +impl ManifestBuilder { + /// Create a new empty manifest builder + pub fn new() -> Self { + Self { + versions: DashMap::new(), + } + } + + /// Add a version entry for a specific loader + /// + /// If the version already exists for this loader, it will be overwritten. + /// This is idempotent and thread-safe. + /// + /// # Arguments + /// + /// * `loader` - Loader name (e.g., "minecraft", "forge") + /// * `version_id` - Version identifier (e.g., "1.20.4", "49.0.3") + /// * `hash` - SHA256 hash of the version's content + /// * `size` - Size of the content in bytes + #[instrument(skip(self))] + pub fn add_version(&self, loader: &str, version_id: String, hash: String, size: u64) { + // Get or create the version map for this loader + let loader_map = self + .versions + .entry(loader.to_string()) + .or_default(); + + // Add the version entry + loader_map.insert(version_id, (hash, size)); + + info!(loader = %loader, "Added version to manifest builder"); + } + + /// Build a loader manifest from the tracked versions + /// + /// Creates a LoaderManifest with all versions that were added for this loader. + /// Returns None if no versions exist for this loader. + /// + /// # Arguments + /// + /// * `loader` - Loader name to build manifest for + #[instrument(skip(self))] + pub fn build_loader_manifest(&self, loader: &str) -> Option { + let loader_map = self.versions.get(loader)?; + + // Collect all version entries + let mut entries: Vec = loader_map + .iter() + .map(|entry| { + let (version_id, (hash, size)) = entry.pair(); + LoaderManifestEntry { + id: version_id.clone(), + hash: hash.clone(), + size: *size, + updated_at: Utc::now(), + } + }) + .collect(); + + // Sort by version ID for deterministic ordering + entries.sort_by(|a, b| a.id.cmp(&b.id)); + + info!( + loader = %loader, + version_count = entries.len(), + "Built loader manifest" + ); + + Some(LoaderManifest::new(loader.to_string(), entries)) + } + + /// Get list of all loaders that have versions + /// + /// Returns a sorted vector of loader names. + pub fn get_loaders(&self) -> Vec { + let mut loaders: Vec = self.versions.iter().map(|e| e.key().clone()).collect(); + loaders.sort(); + loaders + } + + /// Get the number of versions for a specific loader + /// + /// Returns 0 if the loader doesn't exist. + #[cfg(test)] + pub fn version_count(&self, loader: &str) -> usize { + self.versions + .get(loader) + .map(|m| m.len()) + .unwrap_or(0) + } + + /// Get the total number of loaders + #[cfg(test)] + pub fn loader_count(&self) -> usize { + self.versions.len() + } +} + +impl Default for ManifestBuilder { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_root_manifest_creation() { + let mut loaders = HashMap::new(); + loaders.insert( + "minecraft".to_string(), + LoaderReference::new("minecraft", "2024-01-15T10-30-00Z".to_string()), + ); + loaders.insert( + "forge".to_string(), + LoaderReference::new("forge", "2024-01-15T10-31-00Z".to_string()), + ); + + let root = RootManifest::new(loaders.clone()); + + assert_eq!(root.schema_version, 1); + assert_eq!(root.loaders, loaders); + } + + #[test] + fn test_loader_reference_creation() { + let reference = LoaderReference::new("minecraft", "2024-01-15T10-30-00Z".to_string()); + + assert_eq!(reference.timestamp, "2024-01-15T10-30-00Z"); + assert_eq!(reference.url, format!("v{}/manifests/minecraft/2024-01-15T10-30-00Z.json", CAS_VERSION)); + } + + #[test] + fn test_root_manifest_add_loader() { + let mut root = RootManifest::empty(); + + root.add_loader("minecraft".to_string(), "2024-01-15T10-30-00Z".to_string()); + + assert_eq!(root.loaders.len(), 1); + assert!(root.loaders.contains_key("minecraft")); + + let reference = &root.loaders["minecraft"]; + assert_eq!(reference.timestamp, "2024-01-15T10-30-00Z"); + assert_eq!(reference.url, format!("v{}/manifests/minecraft/2024-01-15T10-30-00Z.json", CAS_VERSION)); + } + + #[test] + fn test_root_manifest_serialization() { + let root = RootManifest::empty(); + let json = serde_json::to_string(&root).unwrap(); + let deserialized: RootManifest = serde_json::from_str(&json).unwrap(); + + assert_eq!(root.schema_version, deserialized.schema_version); + assert_eq!(root.loaders, deserialized.loaders); + } + + #[test] + fn test_loader_manifest_entry() { + let entry = LoaderManifestEntry { + id: "1.20.4".to_string(), + hash: "abc123".to_string(), + size: 1024, + updated_at: Utc::now(), + }; + + assert_eq!(entry.id, "1.20.4"); + assert_eq!(entry.hash, "abc123"); + assert_eq!(entry.size, 1024); + } + + #[test] + fn test_loader_manifest_creation() { + let entries = vec![ + LoaderManifestEntry { + id: "1.20.4".to_string(), + hash: "abc123".to_string(), + size: 1024, + updated_at: Utc::now(), + }, + LoaderManifestEntry { + id: "1.20.3".to_string(), + hash: "def456".to_string(), + size: 2048, + updated_at: Utc::now(), + }, + ]; + + let manifest = LoaderManifest::new("minecraft".to_string(), entries.clone()); + + assert_eq!(manifest.schema_version, 1); + assert_eq!(manifest.loader, "minecraft"); + assert_eq!(manifest.versions.len(), 2); + assert_eq!(manifest.versions[0].id, "1.20.4"); + } + + #[test] + fn test_loader_manifest_serialization() { + let manifest = LoaderManifest::new("forge".to_string(), vec![]); + let json = serde_json::to_string(&manifest).unwrap(); + let deserialized: LoaderManifest = serde_json::from_str(&json).unwrap(); + + assert_eq!(manifest.schema_version, deserialized.schema_version); + assert_eq!(manifest.loader, deserialized.loader); + assert_eq!(manifest.timestamp, deserialized.timestamp); + } + + #[test] + fn test_manifest_builder_creation() { + let builder = ManifestBuilder::new(); + assert_eq!(builder.loader_count(), 0); + } + + #[test] + fn test_manifest_builder_add_version() { + let builder = ManifestBuilder::new(); + + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + + assert_eq!(builder.loader_count(), 1); + assert_eq!(builder.version_count("minecraft"), 1); + } + + #[test] + fn test_manifest_builder_multiple_loaders() { + let builder = ManifestBuilder::new(); + + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + builder.add_version("forge", "49.0.3".to_string(), "def456".to_string(), 2048); + builder.add_version("fabric", "0.15.0".to_string(), "ghi789".to_string(), 512); + + assert_eq!(builder.loader_count(), 3); + assert_eq!(builder.version_count("minecraft"), 1); + assert_eq!(builder.version_count("forge"), 1); + assert_eq!(builder.version_count("fabric"), 1); + + let loaders = builder.get_loaders(); + assert_eq!(loaders, vec!["fabric", "forge", "minecraft"]); // Sorted + } + + #[test] + fn test_manifest_builder_multiple_versions() { + let builder = ManifestBuilder::new(); + + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + builder.add_version("minecraft", "1.20.3".to_string(), "def456".to_string(), 2048); + builder.add_version("minecraft", "1.20.2".to_string(), "ghi789".to_string(), 512); + + assert_eq!(builder.loader_count(), 1); + assert_eq!(builder.version_count("minecraft"), 3); + } + + #[test] + fn test_manifest_builder_build_manifest() { + let builder = ManifestBuilder::new(); + + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + builder.add_version("minecraft", "1.20.3".to_string(), "def456".to_string(), 2048); + + let manifest = builder.build_loader_manifest("minecraft").unwrap(); + + assert_eq!(manifest.loader, "minecraft"); + assert_eq!(manifest.versions.len(), 2); + + // Check versions are sorted by ID + assert_eq!(manifest.versions[0].id, "1.20.3"); + assert_eq!(manifest.versions[1].id, "1.20.4"); + } + + #[test] + fn test_manifest_builder_nonexistent_loader() { + let builder = ManifestBuilder::new(); + + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + + assert!(builder.build_loader_manifest("forge").is_none()); + assert_eq!(builder.version_count("forge"), 0); + } + + #[test] + fn test_manifest_builder_overwrite_version() { + let builder = ManifestBuilder::new(); + + // Add same version twice with different hashes + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + builder.add_version("minecraft", "1.20.4".to_string(), "def456".to_string(), 2048); + + assert_eq!(builder.version_count("minecraft"), 1); // Still 1, overwritten + + let manifest = builder.build_loader_manifest("minecraft").unwrap(); + assert_eq!(manifest.versions.len(), 1); + assert_eq!(manifest.versions[0].hash, "def456"); // Latest hash + assert_eq!(manifest.versions[0].size, 2048); // Latest size + } +} diff --git a/daedalus_client/src/services/cloudflare.rs b/daedalus_client/src/services/cloudflare.rs new file mode 100644 index 0000000..41baf77 --- /dev/null +++ b/daedalus_client/src/services/cloudflare.rs @@ -0,0 +1,130 @@ +use crate::infrastructure::error::{Error, fetch_error, invalid_input}; +use std::sync::LazyLock; +use std::time::Duration; +use tracing::{error, info, instrument, warn, Instrument}; + +/// HTTP client specifically for Cloudflare API requests +/// +/// This client is configured with: +/// - TCP keepalive for long-lived connections +/// - Generous timeouts for API operations +/// - Proper user agent identification +/// - Connection pooling for efficiency +static HTTP_CLIENT: LazyLock = LazyLock::new(|| { + reqwest::Client::builder() + .tcp_keepalive(Some(Duration::from_secs(10))) + .timeout(Duration::from_secs(120)) + .connect_timeout(Duration::from_secs(30)) + .user_agent(format!( + "gdlauncher/daedalus/{} ({})", + env!("CARGO_PKG_VERSION"), + dotenvy::var("SUPPORT_EMAIL") + .unwrap_or_else(|_| "support@gdlauncher.com".to_string()) + )) + .pool_max_idle_per_host(10) + .build() + .expect("Failed to build HTTP client") +}); + +/// Purges Cloudflare cache for the given URLs +/// +/// This function handles batching URLs according to Cloudflare's API limits +/// (30 URLs per request) and provides detailed error handling for individual +/// batch failures. This ensures that CDN serves the latest content immediately +/// after uploads. +/// +/// # Arguments +/// +/// * `token` - Cloudflare API token with cache purge permissions +/// * `zone_id` - The Cloudflare zone ID for the domain +/// * `urls` - List of full URLs to purge from cache +/// +/// # Returns +/// +/// Ok(()) if at least some URLs were purged successfully +/// Err if all batches failed (individual batch failures are logged as warnings) +/// +/// # Example +/// +/// ```no_run +/// let urls = vec![ +/// "https://example.com/file1.json".to_string(), +/// "https://example.com/file2.json".to_string(), +/// ]; +/// purge_cloudflare_cache("api_token", "zone_id", &urls).await?; +/// ``` +#[instrument(skip(token, zone_id, urls), fields(url_count = urls.len()))] +pub async fn purge_cloudflare_cache( + token: &str, + zone_id: &str, + urls: &[String], +) -> Result<(), Error> { + if urls.is_empty() { + info!("No URLs to purge from Cloudflare cache"); + return Ok(()); + } + + info!(url_count = urls.len(), "Starting Cloudflare cache purge"); + + let mut total_purged = 0; + let mut failed_batches = 0; + + // Cloudflare limit: 30 URLs per request + for (batch_idx, chunk) in urls.chunks(30).enumerate() { + let batch_span = tracing::info_span!("cloudflare_purge_batch", batch = batch_idx, batch_size = chunk.len()); + let result = async { + let response = HTTP_CLIENT + .post(format!( + "https://api.cloudflare.com/client/v4/zones/{}/purge_cache", + zone_id + )) + .header("Authorization", format!("Bearer {}", token)) + .header("Content-Type", "application/json") + .json(&serde_json::json!({ "files": chunk })) + .send() + .await + .map_err(|e| fetch_error(e, "cloudflare purge"))?; + + let status = response.status(); + if status.is_success() { + info!(batch = batch_idx, purged = chunk.len(), "Cloudflare cache purge batch succeeded"); + Ok::(chunk.len()) + } else { + let error_text = response.text().await.unwrap_or_else(|_| "Unable to read response".to_string()); + error!( + batch = batch_idx, + status = %status, + error = %error_text, + "Cloudflare cache purge batch failed" + ); + Err(invalid_input(format!( + "Cloudflare API returned status {}: {}", + status, + error_text + ))) + } + } + .instrument(batch_span) + .await; + + match result { + Ok(count) => total_purged += count, + Err(e) => { + failed_batches += 1; + warn!(error = %e, "Failed to purge batch, continuing with remaining batches"); + } + } + } + + if failed_batches > 0 { + warn!( + total_purged, + failed_batches, + "Cloudflare cache purge completed with some failures" + ); + } else { + info!(total_purged, "Cloudflare cache purge completed successfully"); + } + + Ok(()) +} diff --git a/daedalus_client/src/services/download.rs b/daedalus_client/src/services/download.rs new file mode 100644 index 0000000..76db439 --- /dev/null +++ b/daedalus_client/src/services/download.rs @@ -0,0 +1,61 @@ +use crate::infrastructure::error::Error; +use std::sync::Arc; +use tokio::sync::Semaphore; +use tracing::{info, instrument}; + +/// Download a file with optional SHA1 verification +/// +/// This function wraps the daedalus library's download_file function, +/// adding semaphore-based concurrency control and structured logging. +/// +/// # Arguments +/// +/// * `url` - The URL to download from +/// * `sha1` - Optional SHA1 hash for verification +/// * `semaphore` - Semaphore for limiting concurrent downloads +/// +/// # Returns +/// +/// The downloaded file contents as bytes +#[instrument(skip(semaphore))] +pub async fn download_file( + url: &str, + sha1: Option<&str>, + semaphore: Arc, +) -> Result { + let _permit = semaphore.acquire().await?; + info!(url = %url, has_sha1 = sha1.is_some(), "Started downloading"); + let val = daedalus::download_file(url, sha1).await?; + info!(url = %url, "Download completed"); + Ok(val) +} + +/// Download a file from multiple mirror URLs with automatic fallback +/// +/// This function wraps the daedalus library's download_file_mirrors function, +/// adding semaphore-based concurrency control and structured logging. +/// It will try each mirror in order until one succeeds. +/// +/// # Arguments +/// +/// * `base` - The base path to append to each mirror URL +/// * `mirrors` - Array of mirror base URLs to try +/// * `sha1` - Optional SHA1 hash for verification +/// * `semaphore` - Semaphore for limiting concurrent downloads +/// +/// # Returns +/// +/// The downloaded file contents as bytes +#[instrument(skip(semaphore), fields(mirror_count = mirrors.len()))] +pub async fn download_file_mirrors( + base: &str, + mirrors: &[&str], + sha1: Option<&str>, + semaphore: Arc, +) -> Result { + let _permit = semaphore.acquire().await?; + info!(base = %base, has_sha1 = sha1.is_some(), "Started downloading from mirrors"); + let val = daedalus::download_file_mirrors(base, mirrors, sha1).await?; + info!(base = %base, "Download from mirrors completed"); + Ok(val) +} diff --git a/daedalus_client/src/services/mod.rs b/daedalus_client/src/services/mod.rs new file mode 100644 index 0000000..7289517 --- /dev/null +++ b/daedalus_client/src/services/mod.rs @@ -0,0 +1,5 @@ +pub mod betterstack; +pub mod cas; +pub mod cloudflare; +pub mod download; +pub mod upload; diff --git a/daedalus_client/src/services/upload.rs b/daedalus_client/src/services/upload.rs new file mode 100644 index 0000000..8fe3545 --- /dev/null +++ b/daedalus_client/src/services/upload.rs @@ -0,0 +1,327 @@ +use backon::{ExponentialBuilder, Retryable}; +use dashmap::DashMap; +use s3::Bucket; +use sha2::{Digest, Sha256}; +use std::sync::Arc; +use std::time::Duration; +use tokio::sync::Semaphore; +use tracing::{error, info, instrument}; + +/// Upload queue for atomic batch uploads +/// +/// This queue collects files to be uploaded and provides an atomic +/// flush operation that ensures all-or-nothing semantics. This prevents +/// partial failures from leaving the CDN in an inconsistent state. +/// +/// Supports two upload modes: +/// - CAS (Content-Addressable Storage): Files stored by SHA256 hash at v{CAS_VERSION}/objects/ +/// - Path-based: Files stored at explicit paths (e.g., maven/ for compatibility) +/// +/// # Example +/// +/// ```no_run +/// let queue = UploadQueue::new(); +/// +/// // CAS upload (returns hash) +/// let hash = queue.enqueue_cas(vec![1, 2, 3], Some("application/json")); +/// +/// // Path-based upload (for maven artifacts, etc.) +/// queue.enqueue_path("maven/lib.jar", vec![4, 5, 6], Some("application/java-archive")); +/// +/// // Atomic: all files uploaded or none +/// queue.flush(&s3_client, semaphore).await?; +/// ``` +pub struct UploadQueue { + /// Lock-free concurrent map of pending CAS uploads + /// Key: content hash (SHA256), Value: (bytes, content_type) + cas_queue: DashMap, Option)>, + + /// Lock-free concurrent map of pending path-based uploads + /// Key: file path, Value: (bytes, content_type) + path_queue: DashMap, Option)>, +} + +impl UploadQueue { + /// Create a new empty upload queue + pub fn new() -> Self { + Self { + cas_queue: DashMap::new(), + path_queue: DashMap::new(), + } + } + + /// Compute SHA256 hash of content + fn compute_hash(content: &[u8]) -> String { + let mut hasher = Sha256::new(); + hasher.update(content); + format!("{:x}", hasher.finalize()) + } + + /// Enqueue content for CAS upload (does NOT upload yet) + /// + /// Content is stored by its SHA256 hash and will be uploaded to v{CAS_VERSION}/objects/{hash[0..2]}/{hash[2..]}. + /// Returns the hash so callers can reference it in manifests. + /// + /// Multiple enqueues of the same content (same hash) will deduplicate automatically. + #[instrument(skip(self, content), fields(size = content.len()))] + pub fn enqueue(&self, content: Vec, content_type: Option) -> String { + let hash = Self::compute_hash(&content); + info!(hash = %hash, "Enqueued for CAS upload"); + self.cas_queue.insert(hash.clone(), (content, content_type)); + hash + } + + /// Enqueue content for path-based upload (does NOT upload yet) + /// + /// Content is stored at the specified path (e.g., "maven/lib.jar"). + /// This is used for files that need predictable paths for compatibility. + /// + /// Multiple enqueues of the same path will overwrite previous content. + #[instrument(skip(self, content), fields(size = content.len()))] + pub fn enqueue_path(&self, path: String, content: Vec, content_type: Option) { + info!(path = %path, "Enqueued for path-based upload"); + self.path_queue.insert(path, (content, content_type)); + } + + /// Flush all queued uploads atomically to S3 + /// + /// Uploads both CAS objects (to v{CAS_VERSION}/objects/{hash[0..2]}/{hash[2..]}) + /// and path-based files (to their specified paths). + /// On error, all uploads are considered failed (no partial state). + /// + /// # Errors + /// + /// Returns error if any upload fails after retries. + #[instrument(skip(self, s3_client, semaphore), fields(cas_count = self.cas_queue.len(), path_count = self.path_queue.len()))] + pub async fn flush( + &self, + s3_client: &Bucket, + semaphore: Arc, + ) -> Result<(), crate::infrastructure::error::Error> { + let cas_size = self.cas_queue.len(); + let path_size = self.path_queue.len(); + let total_size = cas_size + path_size; + + if total_size == 0 { + info!("Upload queue is empty, nothing to flush"); + return Ok(()); + } + + info!( + cas_count = cas_size, + path_count = path_size, + "Starting atomic flush of {} objects ({} CAS, {} path-based)", + total_size, + cas_size, + path_size + ); + + // Upload CAS objects (content-addressed with 2-char prefix for sharding) + for entry in self.cas_queue.iter() { + let (hash, (bytes, content_type)) = entry.pair(); + let path = format!("v{}/objects/{}/{}", crate::services::cas::CAS_VERSION, &hash[..2], &hash[2..]); + + upload_single_file( + &path, + bytes, + content_type.as_deref(), + s3_client, + semaphore.clone(), + ) + .await?; + } + + // Upload path-based files + for entry in self.path_queue.iter() { + let (path, (bytes, content_type)) = entry.pair(); + + upload_single_file( + path, + bytes, + content_type.as_deref(), + s3_client, + semaphore.clone(), + ) + .await?; + } + + // Clear queues only on complete success + self.cas_queue.clear(); + self.path_queue.clear(); + + info!( + uploaded = total_size, + "Successfully flushed {} objects", + total_size + ); + + Ok(()) + } + + /// Get the total number of queued files (CAS + path-based) + pub fn len(&self) -> usize { + self.cas_queue.len() + self.path_queue.len() + } + + /// Check if queue is empty (both CAS and path-based) + #[allow(dead_code)] + pub fn is_empty(&self) -> bool { + self.cas_queue.is_empty() && self.path_queue.is_empty() + } +} + +impl Default for UploadQueue { + fn default() -> Self { + Self::new() + } +} + +/// Upload a single file to S3 with retry logic +/// +/// Internal helper function that handles the actual S3 upload with +/// exponential backoff retry on failure. +#[instrument(skip(bytes, s3_client, semaphore), fields(size = bytes.len()))] +async fn upload_single_file( + path: &str, + bytes: &[u8], + content_type: Option<&str>, + s3_client: &Bucket, + semaphore: Arc, +) -> Result<(), crate::infrastructure::error::Error> { + let _permit = semaphore.acquire().await?; + + info!(path = %path, "Started uploading"); + + (|| async { + let result = if let Some(content_type) = content_type { + s3_client + .put_object_with_content_type(path.to_string(), bytes, content_type) + .await + } else { + s3_client.put_object(path.to_string(), bytes).await + } + .map_err(|err| { + error!(path = %path, error = %err, "Failed to upload"); + crate::infrastructure::error::s3_error(err, path.to_string()) + }); + + match result { + Ok(_) => { + info!(path = %path, "Upload completed"); + Ok(()) + } + Err(err) => { + error!(path = %path, error = %err, "Upload failed"); + Err(err) + } + } + }) + .retry( + ExponentialBuilder::default() + .with_max_times(10) + .with_max_delay(Duration::from_secs(1800)), + ) + .await +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_upload_queue_creation() { + let queue = UploadQueue::new(); + assert_eq!(queue.len(), 0); + assert!(queue.is_empty()); + } + + #[test] + fn test_enqueue() { + let queue = UploadQueue::new(); + + let hash = queue.enqueue(vec![1, 2, 3], Some("application/json".to_string())); + + // SHA256 of [1, 2, 3] + assert_eq!( + hash, + "039058c6f2c0cb492c533b0a4d14ef77cc0f78abccced5287d84a1a2011cfb81" + ); + assert_eq!(queue.len(), 1); + assert!(!queue.is_empty()); + } + + #[test] + fn test_deduplication() { + let queue = UploadQueue::new(); + + // Enqueue same content twice + let hash1 = queue.enqueue(vec![1, 2, 3], None); + let hash2 = queue.enqueue(vec![1, 2, 3], None); + + // Same content = same hash, deduplicated + assert_eq!(hash1, hash2); + assert_eq!(queue.len(), 1); // Only stored once + } + + #[test] + fn test_multiple_objects() { + let queue = UploadQueue::new(); + + // Different content = different hashes + let hash1 = queue.enqueue(vec![1], None); + let hash2 = queue.enqueue(vec![2], None); + let hash3 = queue.enqueue(vec![3], None); + + // All three should be different + assert_ne!(hash1, hash2); + assert_ne!(hash2, hash3); + assert_ne!(hash1, hash3); + assert_eq!(queue.len(), 3); + } + + #[test] + fn test_compute_hash() { + let content = b"hello world"; + let hash = UploadQueue::compute_hash(content); + + // SHA256 of "hello world" + assert_eq!( + hash, + "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" + ); + } + + #[test] + fn test_enqueue_path() { + let queue = UploadQueue::new(); + + queue.enqueue_path( + "maven/lib.jar".to_string(), + vec![1, 2, 3], + Some("application/java-archive".to_string()), + ); + + assert_eq!(queue.len(), 1); + assert!(!queue.is_empty()); + } + + #[test] + fn test_mixed_cas_and_path() { + let queue = UploadQueue::new(); + + // Add CAS upload + let hash = queue.enqueue(vec![1, 2, 3], Some("application/json".to_string())); + assert!(!hash.is_empty()); + + // Add path-based upload + queue.enqueue_path( + "maven/lib.jar".to_string(), + vec![4, 5, 6], + Some("application/java-archive".to_string()), + ); + + // Should have 2 total files queued + assert_eq!(queue.len(), 2); + assert!(!queue.is_empty()); + } +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..c5794a6 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "1.85" From 5ddee85dec567f96a15e55a83890de29ebd6f9f0 Mon Sep 17 00:00:00 2001 From: Davide Date: Mon, 13 Oct 2025 23:32:38 +0200 Subject: [PATCH 2/7] Improves Forge and NeoForge processing --- .env.example | 4 + daedalus_client/README.md | 1 + daedalus_client/src/forge.rs | 107 +++++++++++++++----- daedalus_client/src/loaders/mod.rs | 106 +++++++++++-------- daedalus_client/src/main.rs | 4 + daedalus_client/src/neoforge.rs | 104 +++++++++---------- daedalus_client/src/services/betterstack.rs | 14 +-- daedalus_client/src/services/cas.rs | 45 ++++++++ 8 files changed, 254 insertions(+), 131 deletions(-) diff --git a/.env.example b/.env.example index 90a1753..5cdcf98 100644 --- a/.env.example +++ b/.env.example @@ -59,6 +59,10 @@ S3_SECRET=secret # Get from https://betterstack.com # BETTERSTACK_TOKEN=your-betterstack-token +# Betterstack ingestion endpoint URL +# Default: https://in.logs.betterstack.com +# BETTERSTACK_URL=https://in.logs.betterstack.com + # ============================================================================= # OPTIONAL: CLOUDFLARE INTEGRATION # ============================================================================= diff --git a/daedalus_client/README.md b/daedalus_client/README.md index ec2417e..17bcf30 100644 --- a/daedalus_client/README.md +++ b/daedalus_client/README.md @@ -80,6 +80,7 @@ cargo build --release | `LOG_FORMAT` | Log output format | `text` | `json` or `text` | | `RUST_LOG` | Rust log level filter | `info` | `debug`, `info`, `warn`, `error` | | `BETTERSTACK_TOKEN` | Betterstack logging token | None | `your-betterstack-token` | +| `BETTERSTACK_URL` | Betterstack ingestion endpoint | `https://in.logs.betterstack.com` | `https://in.logs.betterstack.com` | | `CLOUDFLARE_INTEGRATION` | Enable Cloudflare cache purging | `false` | `true` or `false` | | `CLOUDFLARE_TOKEN` | Cloudflare API token (required if integration enabled) | None | `your-cloudflare-token` | | `CLOUDFLARE_ZONE_ID` | Cloudflare zone ID (required if integration enabled) | None | `your-zone-id` | diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge.rs index e5dcc2f..9267eca 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge.rs @@ -10,7 +10,7 @@ use daedalus::minecraft::{ use daedalus::modded::{ LoaderVersion, PartialVersionInfo, Processor, SidedDataEntry, }; -use daedalus::GradleSpecifier; +use daedalus::{get_hash, GradleSpecifier}; use tracing::{info, warn}; use semver::{Version, VersionReq}; use serde::{Deserialize, Serialize}; @@ -37,6 +37,17 @@ static FORGE_MANIFEST_V3_QUERY: LazyLock = LazyLock::new(|| { VersionReq::parse(">=37.0.0").unwrap() }); +fn extract_hash_from_cas_url(url: &str) -> Option { + let parts: Vec<&str> = url.rsplitn(3, '/').collect(); + if parts.len() >= 2 { + let hash_suffix = parts[0]; + let hash_prefix = parts[1]; + Some(format!("{}{}", hash_prefix, hash_suffix)) + } else { + None + } +} + pub async fn fetch_generated_version_info( version_id: &str, ) -> Result { @@ -236,19 +247,9 @@ pub async fn retrieve_data( if FORGE_SKIP_LIST.contains(&&*loader_version_full) { info!("⏭️ Forge - Skipping excluded version: {}", loader_version_full); - return Ok(None); + return Ok::, crate::infrastructure::error::Error>(None); } - { - let versions = versions_mutex.lock().await; - let version = versions.iter().find(|x| - x.id == minecraft_version).and_then(|x| x.loaders.iter().find(|x| x.id == loader_version_full)); - - if let Some(version) = version { - info!("Already have Forge {}", loader_version_full.clone()); - return Ok::, crate::infrastructure::error::Error>(Some(version.clone())); - } - } info!("Forge - Installer Start {}", loader_version_full.clone()); let bytes = download_file(&format!("https://maven.minecraftforge.net/net/minecraftforge/forge/{0}/forge-{0}-installer.jar", loader_version_full), None, semaphore.clone()).await?; @@ -347,12 +348,42 @@ pub async fn retrieve_data( processors: None }; - // Upload version to CAS and track in manifest builder let version_bytes = serde_json::to_vec(&new_profile)?; - let version_hash = upload_queue.enqueue( - version_bytes.clone(), - Some("application/json".to_string()), - ); + let new_hash = get_hash(bytes::Bytes::from(version_bytes.clone())).await?; + + let old_loader_version = { + let versions = versions_mutex.lock().await; + versions.iter() + .find(|v| v.id == minecraft_version) + .and_then(|v| v.loaders.iter().find(|l| l.id == loader_version_full)) + .cloned() + }; + + let should_upload = if let Some(old_version) = &old_loader_version { + if let Some(old_hash) = extract_hash_from_cas_url(&old_version.url) { + if old_hash == new_hash { + info!("✓ Forge {} unchanged (hash: {})", loader_version_full, &new_hash[..8]); + false + } else { + info!("↻ Forge {} changed (old: {}, new: {})", loader_version_full, &old_hash[..8], &new_hash[..8]); + true + } + } else { + true + } + } else { + info!("+ Forge {} is new", loader_version_full); + true + }; + + let version_hash = if should_upload { + upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), + ) + } else { + new_hash.clone() + }; manifest_builder.add_version( "forge", @@ -361,7 +392,6 @@ pub async fn retrieve_data( version_bytes.len() as u64, ); - // Build CAS URL for LoaderVersion let base_url = dotenvy::var("BASE_URL").unwrap(); let cas_url = format!( "{}/v{}/objects/{}/{}", @@ -620,12 +650,42 @@ pub async fn retrieve_data( processors: Some(profile.processors), }; - // Upload version to CAS and track in manifest builder let version_bytes = serde_json::to_vec(&new_profile)?; - let version_hash = upload_queue.enqueue( - version_bytes.clone(), - Some("application/json".to_string()), - ); + let new_hash = get_hash(bytes::Bytes::from(version_bytes.clone())).await?; + + let old_loader_version = { + let versions = versions_mutex.lock().await; + versions.iter() + .find(|v| v.id == minecraft_version) + .and_then(|v| v.loaders.iter().find(|l| l.id == loader_version_full)) + .cloned() + }; + + let should_upload = if let Some(old_version) = &old_loader_version { + if let Some(old_hash) = extract_hash_from_cas_url(&old_version.url) { + if old_hash == new_hash { + info!("✓ Forge {} unchanged (hash: {})", loader_version_full, &new_hash[..8]); + false + } else { + info!("↻ Forge {} changed (old: {}, new: {})", loader_version_full, &old_hash[..8], &new_hash[..8]); + true + } + } else { + true + } + } else { + info!("+ Forge {} is new", loader_version_full); + true + }; + + let version_hash = if should_upload { + upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), + ) + } else { + new_hash.clone() + }; manifest_builder.add_version( "forge", @@ -634,7 +694,6 @@ pub async fn retrieve_data( version_bytes.len() as u64, ); - // Build CAS URL for LoaderVersion let base_url = dotenvy::var("BASE_URL").unwrap(); let cas_url = format!( "{}/v{}/objects/{}/{}", diff --git a/daedalus_client/src/loaders/mod.rs b/daedalus_client/src/loaders/mod.rs index f0f31c1..6cf40b0 100644 --- a/daedalus_client/src/loaders/mod.rs +++ b/daedalus_client/src/loaders/mod.rs @@ -6,12 +6,23 @@ use crate::services::upload::UploadQueue; use dashmap::DashSet; use daedalus::minecraft::{Library, VersionManifest}; use daedalus::modded::{LoaderVersion, PartialVersionInfo, Version}; -use daedalus::{Branding, BRANDING}; +use daedalus::{get_hash, Branding, BRANDING}; use serde::Deserialize; use std::sync::Arc; use tokio::sync::{Mutex, RwLock, Semaphore}; use tracing::{info, warn}; +fn extract_hash_from_cas_url(url: &str) -> Option { + let parts: Vec<&str> = url.rsplitn(3, '/').collect(); + if parts.len() >= 2 { + let hash_suffix = parts[0]; + let hash_prefix = parts[1]; + Some(format!("{}{}", hash_prefix, hash_suffix)) + } else { + None + } +} + /// Strategy trait for loader-specific behavior /// /// This trait abstracts the differences between loaders like Fabric and Quilt, @@ -110,36 +121,28 @@ impl LoaderProcessor { }; // Prepare list of loaders to process - // Format: (stable, version, skip_upload) + // Format: (stable, version, old_version_opt) let loaders_mutex = RwLock::new(Vec::new()); { let mut loaders = loaders_mutex.write().await; - for (index, loader) in list.loader().iter().enumerate() { - // Check if this loader already exists in the dummy version - let already_exists = versions.iter().any(|x| { - x.id == BRANDING - .get_or_init(Branding::default) - .dummy_replace_string - && x.loaders.iter().any(|x| x.id == loader.version()) - }); - - if already_exists { - // Only add the first loader to update it - if index == 0 { - loaders.push(( - Box::new(self.strategy.is_stable(loader as &dyn LoaderVersionInfo)), - loader.version().to_string(), - Box::new(true), // skip_upload - )) - } - } else { - loaders.push(( - Box::new(self.strategy.is_stable(loader as &dyn LoaderVersionInfo)), - loader.version().to_string(), - Box::new(false), // don't skip - )) - } + for loader in list.loader() { + // Find old version if it exists in the dummy version + let old_loader_version = versions + .iter() + .find(|x| { + x.id == BRANDING + .get_or_init(Branding::default) + .dummy_replace_string + }) + .and_then(|x| x.loaders.iter().find(|l| l.id == loader.version())) + .cloned(); + + loaders.push(( + Box::new(self.strategy.is_stable(loader as &dyn LoaderVersionInfo)), + loader.version().to_string(), + old_loader_version, + )) } } @@ -152,13 +155,13 @@ impl LoaderProcessor { let mut fetch_successful = 0; let mut fetch_failed = 0; - for (stable, loader, skip_upload) in loaders_mutex.read().await.clone() { + for (stable, loader, old_loader_version) in loaders_mutex.read().await.clone() { match self .fetch_loader_version(DUMMY_GAME_VERSION, &loader, semaphore.clone()) .await { Ok(version) => { - loader_versions.push((stable, loader, version, skip_upload)); + loader_versions.push((stable, loader, version, old_loader_version)); fetch_successful += 1; } Err(e) => { @@ -187,14 +190,14 @@ impl LoaderProcessor { let mut process_successful = 0; let mut process_failed = 0; - for (stable, loader, version, skip_upload) in loader_versions { + for (stable, loader, version, old_loader_version) in loader_versions { let loader_clone = loader.clone(); let process_result = self .process_loader_version( stable, loader, version, - skip_upload, + old_loader_version, &list, &loader_version_mutex, upload_queue, @@ -351,7 +354,7 @@ impl LoaderProcessor { stable: Box, loader: String, version: PartialVersionInfo, - skip_upload: Box, + old_loader_version: Option, list: &V, loader_version_mutex: &Mutex>, upload_queue: &UploadQueue, @@ -468,11 +471,6 @@ impl LoaderProcessor { })) .await?; - // Skip upload if this loader already exists - if *skip_upload { - return Ok(()); - } - // Prepare version info with replaced dummy game version let version_info = PartialVersionInfo { arguments: version.arguments, @@ -495,12 +493,34 @@ impl LoaderProcessor { data: None, }; - // Upload version to CAS and track in manifest builder let version_bytes = serde_json::to_vec(&version_info)?; - let version_hash = upload_queue.enqueue( - version_bytes.clone(), - Some("application/json".to_string()), - ); + let new_hash = get_hash(bytes::Bytes::from(version_bytes.clone())).await?; + + let should_upload = if let Some(old_version) = &old_loader_version { + if let Some(old_hash) = extract_hash_from_cas_url(&old_version.url) { + if old_hash == new_hash { + info!("✓ {} {} unchanged (hash: {})", self.strategy.name(), loader, &new_hash[..8]); + false + } else { + info!("↻ {} {} changed (old: {}, new: {})", self.strategy.name(), loader, &old_hash[..8], &new_hash[..8]); + true + } + } else { + true + } + } else { + info!("+ {} {} is new", self.strategy.name(), loader); + true + }; + + let version_hash = if should_upload { + upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), + ) + } else { + new_hash.clone() + }; manifest_builder.add_version( self.strategy.manifest_path_prefix(), @@ -509,7 +529,6 @@ impl LoaderProcessor { version_bytes.len() as u64, ); - // Build CAS URL for LoaderVersion let base_url = dotenvy::var("BASE_URL").unwrap(); let cas_url = format!( "{}/v{}/objects/{}/{}", @@ -519,7 +538,6 @@ impl LoaderProcessor { &version_hash[2..] ); - // Add to loader version list let mut loader_version_map = loader_version_mutex.lock().await; loader_version_map.push(LoaderVersion { id: loader, diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index 334be8f..7a992c7 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -48,8 +48,12 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { let betterstack_token = dotenvy::var("BETTERSTACK_TOKEN").ok(); let _betterstack_handle = if let Some(ref token) = betterstack_token { + let betterstack_url = dotenvy::var("BETTERSTACK_URL") + .unwrap_or_else(|_| "https://in.logs.betterstack.com".to_string()); + let (betterstack_layer, handle) = services::betterstack::BetterstackLayer::new( token.clone(), + betterstack_url, None, None, ); diff --git a/daedalus_client/src/neoforge.rs b/daedalus_client/src/neoforge.rs index d598b9c..7a4e727 100644 --- a/daedalus_client/src/neoforge.rs +++ b/daedalus_client/src/neoforge.rs @@ -5,6 +5,7 @@ use daedalus::minecraft::{Library, VersionManifest}; use daedalus::modded::{ LoaderVersion, PartialVersionInfo, Processor, SidedDataEntry, }; +use daedalus::get_hash; use tracing::{info, warn}; use semver::Version; use serde::{Deserialize, Serialize}; @@ -26,22 +27,24 @@ static NEOFORGE_SKIP_LIST: LazyLock> = LazyLock::new(|| { .collect() }); +fn extract_hash_from_cas_url(url: &str) -> Option { + let parts: Vec<&str> = url.rsplitn(3, '/').collect(); + if parts.len() >= 2 { + let hash_suffix = parts[0]; + let hash_prefix = parts[1]; + Some(format!("{}{}", hash_prefix, hash_suffix)) + } else { + None + } +} + pub async fn retrieve_data( minecraft_versions: &VersionManifest, upload_queue: &UploadQueue, manifest_builder: &crate::services::cas::ManifestBuilder, semaphore: Arc, ) -> Result<(), crate::infrastructure::error::Error> { - // Check if force reprocess is enabled - let force_reprocess = std::env::var("FORCE_REPROCESS") - .map(|v| v == "true" || v == "1") - .unwrap_or(false); - - if force_reprocess { - info!("🔄 NeoForge - FORCE_REPROCESS enabled, processing all versions"); - } else { - info!("📋 NeoForge - Incremental mode, skipping existing versions"); - } + info!("Retrieving NeoForge data ..."); let maven_metadata = fetch_maven_metadata(semaphore.clone()).await?; let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!( @@ -58,43 +61,16 @@ pub async fn retrieve_data( Vec::new() })); - // Build a set of existing version IDs for fast lookup (minecraft_version/loader_version) - let existing_versions: HashSet = if force_reprocess { - HashSet::new() - } else { - let old_versions_guard = old_versions.lock().await; - old_versions_guard - .iter() - .flat_map(|mc_version| { - mc_version.loaders.iter().map(move |loader| { - format!("{}/{}", mc_version.id, loader.id) - }) - }) - .collect() - }; - let versions = Arc::new(Mutex::new(Vec::new())); let visited_assets = Arc::new(DashSet::new()); let mut version_futures = Vec::new(); - let mut total_versions = 0; - let mut skipped_existing = 0; - for (minecraft_version, loader_versions) in maven_metadata.clone() { let mut loaders = Vec::new(); for (loader_version, new_forge) in loader_versions { - total_versions += 1; - - // Skip if version already exists (incremental mode) - let version_key = format!("{}/{}", minecraft_version, loader_version); - if existing_versions.contains(&version_key) { - skipped_existing += 1; - continue; - } - let version = Version::parse(&loader_version)?; loaders.push((loader_version, version, new_forge.to_string())) @@ -118,16 +94,6 @@ pub async fn retrieve_data( return Ok::, crate::infrastructure::error::Error>(None); } - { - let versions = versions_mutex.lock().await; - let version = versions.iter().find(|x| - x.id == minecraft_version).and_then(|x| x.loaders.iter().find(|x| x.id == loader_version_full)); - - if let Some(version) = version { - return Ok::, crate::infrastructure::error::Error>(Some(version.clone())); - } - } - info!("Neoforge - Installer Start {}", loader_version_full.clone()); let download_url = format!("https://maven.neoforged.net/net/neoforged/{1}/{0}/{1}-{0}-installer.jar", loader_version_full, if &*new_forge == "true" { "neoforge" } else { "forge" }); @@ -330,12 +296,42 @@ pub async fn retrieve_data( logging: None }; - // Upload version to CAS and track in manifest builder let version_bytes = serde_json::to_vec(&new_profile)?; - let version_hash = upload_queue.enqueue( - version_bytes.clone(), - Some("application/json".to_string()), - ); + let new_hash = get_hash(bytes::Bytes::from(version_bytes.clone())).await?; + + let old_loader_version = { + let versions = versions_mutex.lock().await; + versions.iter() + .find(|v| v.id == minecraft_version) + .and_then(|v| v.loaders.iter().find(|l| l.id == loader_version_full)) + .cloned() + }; + + let should_upload = if let Some(old_version) = &old_loader_version { + if let Some(old_hash) = extract_hash_from_cas_url(&old_version.url) { + if old_hash == new_hash { + info!("✓ NeoForge {} unchanged (hash: {})", loader_version_full, &new_hash[..8]); + false + } else { + info!("↻ NeoForge {} changed (old: {}, new: {})", loader_version_full, &old_hash[..8], &new_hash[..8]); + true + } + } else { + true + } + } else { + info!("+ NeoForge {} is new", loader_version_full); + true + }; + + let version_hash = if should_upload { + upload_queue.enqueue( + version_bytes.clone(), + Some("application/json".to_string()), + ) + } else { + new_hash.clone() + }; manifest_builder.add_version( "neoforge", @@ -344,7 +340,6 @@ pub async fn retrieve_data( version_bytes.len() as u64, ); - // Build CAS URL for LoaderVersion let base_url = dotenvy::var("BASE_URL").unwrap(); let cas_url = format!( "{}/v{}/objects/{}/{}", @@ -418,9 +413,6 @@ pub async fn retrieve_data( } } - info!("📊 NeoForge - Processing {} versions ({} skipped, {} to process)", - total_versions, skipped_existing, total_versions - skipped_existing); - { let len = version_futures.len(); let mut versions = version_futures.into_iter().peekable(); diff --git a/daedalus_client/src/services/betterstack.rs b/daedalus_client/src/services/betterstack.rs index 4f4befc..01a31ab 100644 --- a/daedalus_client/src/services/betterstack.rs +++ b/daedalus_client/src/services/betterstack.rs @@ -28,6 +28,7 @@ impl BetterstackLayer { /// /// # Arguments /// * `token` - Betterstack API token + /// * `url` - Betterstack ingestion URL /// * `batch_size` - Maximum logs to buffer before flushing (default: 100) /// * `flush_interval` - Duration between automatic flushes (default: 5 seconds) /// @@ -35,6 +36,7 @@ impl BetterstackLayer { /// A tuple of (layer, flush_handle) where the handle can be used to await graceful shutdown pub fn new( token: String, + url: String, batch_size: Option, flush_interval: Option, ) -> (Self, tokio::task::JoinHandle<()>) { @@ -42,11 +44,10 @@ impl BetterstackLayer { let flush_interval = flush_interval.unwrap_or(Duration::from_secs(5)); let buffer = Arc::new(Mutex::new(Vec::new())); - // Spawn background flush task let flush_handle = { let buffer_clone = Arc::clone(&buffer); tokio::spawn(async move { - flush_loop(buffer_clone, token, flush_interval).await; + flush_loop(buffer_clone, token, url, flush_interval).await; }) }; @@ -98,14 +99,13 @@ where } /// Background flush loop -async fn flush_loop(buffer: Arc>>, token: String, interval: Duration) { +async fn flush_loop(buffer: Arc>>, token: String, url: String, interval: Duration) { let mut timer = tokio::time::interval(interval); let client = reqwest::Client::new(); loop { timer.tick().await; - // Take all pending logs let logs = { let mut buffer = buffer.lock().await; if buffer.is_empty() { @@ -114,8 +114,7 @@ async fn flush_loop(buffer: Arc>>, token: String, interval: Dur std::mem::take(&mut *buffer) }; - // Ship to Betterstack - if let Err(e) = ship_logs(&client, &token, &logs).await { + if let Err(e) = ship_logs(&client, &token, &url, &logs).await { warn!( error = %e, log_count = logs.len(), @@ -131,6 +130,7 @@ async fn flush_loop(buffer: Arc>>, token: String, interval: Dur async fn ship_logs( client: &reqwest::Client, token: &str, + url: &str, logs: &[Value], ) -> Result<(), Box> { if logs.is_empty() { @@ -138,7 +138,7 @@ async fn ship_logs( } let response = client - .post("https://in.logs.betterstack.com/") + .post(url) .header("Authorization", format!("Bearer {}", token)) .header("Content-Type", "application/json") .json(&logs) diff --git a/daedalus_client/src/services/cas.rs b/daedalus_client/src/services/cas.rs index 93babfc..5f70651 100644 --- a/daedalus_client/src/services/cas.rs +++ b/daedalus_client/src/services/cas.rs @@ -266,6 +266,51 @@ impl ManifestBuilder { pub fn loader_count(&self) -> usize { self.versions.len() } + + /// Load old manifest data for comparison + /// + /// This populates the builder with version hashes from an existing manifest, + /// allowing us to detect which versions have changed. + /// + /// # Arguments + /// + /// * `manifest` - The old loader manifest to load + pub fn load_old_manifest(&self, manifest: &LoaderManifest) { + let loader_map = self + .versions + .entry(manifest.loader.clone()) + .or_default(); + + for entry in &manifest.versions { + loader_map.insert(entry.id.clone(), (entry.hash.clone(), entry.size)); + } + } + + /// Check if a version's content hash has changed compared to the old manifest + /// + /// Returns true if: + /// - The version doesn't exist in old data (new version) + /// - The version exists but hash is different (content changed) + /// + /// Returns false if: + /// - The version exists with the same hash (no changes) + /// + /// # Arguments + /// + /// * `loader` - Loader name (e.g., "minecraft", "forge") + /// * `version_id` - Version identifier + /// * `new_hash` - New content hash to compare + pub fn has_version_changed(&self, loader: &str, version_id: &str, new_hash: &str) -> bool { + if let Some(loader_map) = self.versions.get(loader) { + if let Some(entry) = loader_map.get(version_id) { + // Version exists, check if hash changed + let (old_hash, _) = entry.value(); + return old_hash.as_str() != new_hash; + } + } + // Version doesn't exist, so it's new + true + } } impl Default for ManifestBuilder { From 484bddbc76bf92a047ffde5cae59011c0a01df3d Mon Sep 17 00:00:00 2001 From: Davide Date: Tue, 14 Oct 2025 00:32:50 +0200 Subject: [PATCH 3/7] Refactors to use batch uploader --- daedalus_client/src/fabric.rs | 7 +- daedalus_client/src/forge.rs | 5 +- daedalus_client/src/loaders/mod.rs | 41 ++-- daedalus_client/src/main.rs | 74 +++++-- daedalus_client/src/minecraft.rs | 23 +- daedalus_client/src/quilt.rs | 7 +- daedalus_client/src/services/upload.rs | 296 ++++++++----------------- 7 files changed, 207 insertions(+), 246 deletions(-) diff --git a/daedalus_client/src/fabric.rs b/daedalus_client/src/fabric.rs index ecaf60e..844840d 100644 --- a/daedalus_client/src/fabric.rs +++ b/daedalus_client/src/fabric.rs @@ -1,6 +1,6 @@ use crate::loaders::fabric::{FabricStrategy, FabricVersions}; use crate::loaders::LoaderProcessor; -use crate::services::upload::UploadQueue; +use crate::services::upload::BatchUploader; use daedalus::minecraft::VersionManifest; use std::sync::Arc; use tokio::sync::Semaphore; @@ -12,12 +12,13 @@ use tokio::sync::Semaphore; /// eliminating hundreds of lines of duplicated code. pub async fn retrieve_data( minecraft_versions: &VersionManifest, - upload_queue: &UploadQueue, + uploader: &BatchUploader, manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, semaphore: Arc, ) -> Result<(), crate::infrastructure::error::Error> { let processor = LoaderProcessor::new(FabricStrategy); processor - .retrieve_data::(minecraft_versions, upload_queue, manifest_builder, semaphore) + .retrieve_data::(minecraft_versions, uploader, manifest_builder, s3_client, semaphore) .await } diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge.rs index 9267eca..c1f2b3f 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge.rs @@ -1,7 +1,7 @@ use crate::{ download_file, download_file_mirrors, format_url, }; -use crate::services::upload::UploadQueue; +use crate::services::upload::BatchUploader; use chrono::{DateTime, Utc}; use dashmap::DashSet; use daedalus::minecraft::{ @@ -147,8 +147,9 @@ pub fn should_ignore_artifact( pub async fn retrieve_data( minecraft_versions: &VersionManifest, - upload_queue: &UploadQueue, + uploader: &BatchUploader, manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, semaphore: Arc, ) -> Result<(), crate::infrastructure::error::Error> { info!("Retrieving Forge data ..."); diff --git a/daedalus_client/src/loaders/mod.rs b/daedalus_client/src/loaders/mod.rs index 6cf40b0..447c3cc 100644 --- a/daedalus_client/src/loaders/mod.rs +++ b/daedalus_client/src/loaders/mod.rs @@ -2,7 +2,7 @@ pub mod fabric; pub mod quilt; use crate::{download_file, format_url}; -use crate::services::upload::UploadQueue; +use crate::services::upload::BatchUploader; use dashmap::DashSet; use daedalus::minecraft::{Library, VersionManifest}; use daedalus::modded::{LoaderVersion, PartialVersionInfo, Version}; @@ -93,8 +93,9 @@ impl LoaderProcessor { pub async fn retrieve_data( &self, minecraft_versions: &VersionManifest, - upload_queue: &UploadQueue, + uploader: &BatchUploader, manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, semaphore: Arc, ) -> Result<(), crate::infrastructure::error::Error> where @@ -200,8 +201,9 @@ impl LoaderProcessor { old_loader_version, &list, &loader_version_mutex, - upload_queue, + uploader, manifest_builder, + s3_client, &visited_artifacts, semaphore.clone(), ) @@ -357,8 +359,9 @@ impl LoaderProcessor { old_loader_version: Option, list: &V, loader_version_mutex: &Mutex>, - upload_queue: &UploadQueue, + uploader: &BatchUploader, manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, visited_artifacts: &Arc>, semaphore: Arc, ) -> Result<(), crate::infrastructure::error::Error> @@ -373,6 +376,8 @@ impl LoaderProcessor { let visited_artifacts = visited_artifacts.clone(); let list_game = list.game().to_vec(); let maven_fallback = self.strategy.maven_fallback().to_string(); + let uploader = uploader; + let s3_client = s3_client; async move { // Check if we've already processed this artifact (lock-free) @@ -407,6 +412,8 @@ impl LoaderProcessor { let lib_url = lib.url.clone(); let maven_fallback = maven_fallback.clone(); let game_version_str = game_version.version().to_string(); + let uploader = uploader; + let s3_client = s3_client; async move { let artifact_path = daedalus::get_path_from_artifact( @@ -428,11 +435,13 @@ impl LoaderProcessor { ) .await?; - upload_queue.enqueue_path( - format!("{}/{}", "maven", artifact_path), + // Upload to CAS and get hash + let _hash = uploader.upload_cas( artifact.to_vec(), Some("application/java-archive".to_string()), - ); + s3_client, + semaphore.clone(), + ).await?; Ok::<(), crate::infrastructure::error::Error>(()) } @@ -458,13 +467,15 @@ impl LoaderProcessor { ) .await?; - lib.url = Some(format_url("maven/")); - - upload_queue.enqueue_path( - format!("{}/{}", "maven", artifact_path), + // Upload to CAS and get hash + let _hash = uploader.upload_cas( artifact.to_vec(), Some("application/java-archive".to_string()), - ); + s3_client, + semaphore.clone(), + ).await?; + + lib.url = Some(format_url("maven/")); Ok::(lib) } @@ -514,10 +525,12 @@ impl LoaderProcessor { }; let version_hash = if should_upload { - upload_queue.enqueue( + uploader.upload_cas( version_bytes.clone(), Some("application/json".to_string()), - ) + s3_client, + semaphore.clone(), + ).await? } else { new_hash.clone() }; diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index 7a992c7..3915d73 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -11,6 +11,9 @@ use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::util::SubscriberInitExt; use tracing_subscriber::EnvFilter; +#[cfg(unix)] +use tokio::signal::unix::{signal, SignalKind}; + mod fabric; mod infrastructure; mod forge; @@ -20,6 +23,35 @@ mod neoforge; mod quilt; mod services; +/// Create a future that completes when a shutdown signal is received (SIGTERM or Ctrl+C) +async fn shutdown_signal() { + let ctrl_c = async { + tokio::signal::ctrl_c() + .await + .expect("failed to install Ctrl+C handler"); + }; + + #[cfg(unix)] + let terminate = async { + signal(SignalKind::terminate()) + .expect("failed to install signal handler") + .recv() + .await; + }; + + #[cfg(not(unix))] + let terminate = std::future::pending::<()>(); + + tokio::select! { + _ = ctrl_c => { + info!("Received Ctrl+C signal"); + } + _ = terminate => { + info!("Received SIGTERM signal"); + } + } +} + fn main() -> Result<(), crate::infrastructure::error::Error> { #[cfg(feature = "sentry")] let _guard = sentry::init(( @@ -172,12 +204,21 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { let mut is_first_run = true; loop { + // Wait for either timer tick or shutdown signal + info!("Waiting for next update timer or shutdown signal"); + tokio::select! { + _ = timer.tick() => { + // Timer ticked - continue with processing cycle + } + _ = shutdown_signal() => { + info!("Shutdown signal received - exiting gracefully"); + break; + } + } + let loop_span = tracing::info_span!("processing_cycle", is_first_run); async { - info!("Waiting for next update timer"); - timer.tick().await; - - let upload_queue = services::upload::UploadQueue::new(); + let uploader = services::upload::BatchUploader::new(); let manifest_builder = services::cas::ManifestBuilder::new(); let versions = { @@ -185,8 +226,9 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { async { match MINECRAFT_BREAKER.call(async { minecraft::retrieve_data( - &upload_queue, + &uploader, &manifest_builder, + &CLIENT, semaphore.clone(), is_first_run, ) @@ -219,8 +261,9 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { match FABRIC_BREAKER.call(async { fabric::retrieve_data( &manifest, - &upload_queue, + &uploader, &manifest_builder, + &CLIENT, semaphore.clone(), ) .await @@ -246,7 +289,7 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { match FORGE_BREAKER.call(async { forge::retrieve_data( &manifest, - &upload_queue, + &uploader, &manifest_builder, semaphore.clone(), ) @@ -273,8 +316,9 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { match QUILT_BREAKER.call(async { quilt::retrieve_data( &manifest, - &upload_queue, + &uploader, &manifest_builder, + &CLIENT, semaphore.clone(), ) .await @@ -300,7 +344,7 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { match NEOFORGE_BREAKER.call(async { neoforge::retrieve_data( &manifest, - &upload_queue, + &uploader, &manifest_builder, semaphore.clone(), ) @@ -321,12 +365,8 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { .await; } - info!(queued_count = upload_queue.len(), "Flushing CAS objects and path-based files"); - let flush_result = upload_queue.flush(&CLIENT, semaphore.clone()).await; - if let Err(e) = flush_result { - error!(error = %e, "Failed to flush upload queue - skipping manifest upload this cycle"); - } else { - info!("Upload queue flushed successfully"); + // All CAS objects have been uploaded immediately during processing + // Now we upload the loader manifests and root manifest atomically let timestamp = chrono::Utc::now().format("%Y-%m-%dT%H-%M-%SZ").to_string(); let mut loader_references = std::collections::HashMap::new(); let mut uploaded_manifest_urls = Vec::new(); @@ -454,13 +494,15 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { warn!("No loader manifests were built - skipping root manifest upload"); } } - } is_first_run = false; } .instrument(loop_span) .await; } + + info!("Application shutdown complete"); + Ok(()) }) } diff --git a/daedalus_client/src/minecraft.rs b/daedalus_client/src/minecraft.rs index ca1dd0d..8859d9b 100644 --- a/daedalus_client/src/minecraft.rs +++ b/daedalus_client/src/minecraft.rs @@ -1,6 +1,6 @@ use crate::download_file; use crate::format_url; -use crate::services::upload::UploadQueue; +use crate::services::upload::BatchUploader; use dashmap::DashSet; use daedalus::minecraft::{ merge_partial_library, Dependency, DependencyRule, JavaVersion, LWJGLEntry, @@ -190,8 +190,9 @@ fn map_log4j_artifact( } pub async fn retrieve_data( - upload_queue: &UploadQueue, + uploader: &BatchUploader, manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, semaphore: Arc, is_first_run: bool, ) -> Result { @@ -671,10 +672,12 @@ pub async fn retrieve_data( .await?; let asset_bytes = assets_index.to_vec(); - let asset_hash = upload_queue.enqueue( + let asset_hash = uploader.upload_cas( asset_bytes.clone(), Some("application/json".to_string()), - ); + s3_client, + semaphore.clone(), + ).await?; let base_url = dotenvy::var("BASE_URL").unwrap(); version_info.asset_index.url = format!( @@ -687,10 +690,12 @@ pub async fn retrieve_data( } let version_bytes = serde_json::to_vec(&version_info)?; - let version_hash = upload_queue.enqueue( + let version_hash = uploader.upload_cas( version_bytes.clone(), Some("application/json".to_string()), - ); + s3_client, + semaphore.clone(), + ).await?; manifest_builder.add_version( "minecraft", @@ -832,10 +837,12 @@ pub async fn retrieve_data( debug!("Uploading {}", lwjgl_path); let lwjgl_bytes = serde_json::to_vec(&lwjgl)?; - let lwjgl_hash = upload_queue.enqueue( + let lwjgl_hash = uploader.upload_cas( lwjgl_bytes.clone(), Some("application/json".to_string()), - ); + s3_client, + semaphore.clone(), + ).await?; let loader = if lwjgl.version.starts_with("2") { "minecraft-lwjgl2" diff --git a/daedalus_client/src/quilt.rs b/daedalus_client/src/quilt.rs index bf58254..e54ae94 100644 --- a/daedalus_client/src/quilt.rs +++ b/daedalus_client/src/quilt.rs @@ -1,6 +1,6 @@ use crate::loaders::quilt::{QuiltStrategy, QuiltVersions}; use crate::loaders::LoaderProcessor; -use crate::services::upload::UploadQueue; +use crate::services::upload::BatchUploader; use daedalus::minecraft::VersionManifest; use std::sync::Arc; use tokio::sync::Semaphore; @@ -12,12 +12,13 @@ use tokio::sync::Semaphore; /// eliminating hundreds of lines of duplicated code. pub async fn retrieve_data( minecraft_versions: &VersionManifest, - upload_queue: &UploadQueue, + uploader: &BatchUploader, manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, semaphore: Arc, ) -> Result<(), crate::infrastructure::error::Error> { let processor = LoaderProcessor::new(QuiltStrategy); processor - .retrieve_data::(minecraft_versions, upload_queue, manifest_builder, semaphore) + .retrieve_data::(minecraft_versions, uploader, manifest_builder, s3_client, semaphore) .await } diff --git a/daedalus_client/src/services/upload.rs b/daedalus_client/src/services/upload.rs index 8fe3545..4014b92 100644 --- a/daedalus_client/src/services/upload.rs +++ b/daedalus_client/src/services/upload.rs @@ -1,5 +1,4 @@ use backon::{ExponentialBuilder, Retryable}; -use dashmap::DashMap; use s3::Bucket; use sha2::{Digest, Sha256}; use std::sync::Arc; @@ -7,170 +6,104 @@ use std::time::Duration; use tokio::sync::Semaphore; use tracing::{error, info, instrument}; -/// Upload queue for atomic batch uploads +/// Batch uploader for immediate CAS (Content-Addressable Storage) uploads /// -/// This queue collects files to be uploaded and provides an atomic -/// flush operation that ensures all-or-nothing semantics. This prevents -/// partial failures from leaving the CDN in an inconsistent state. +/// This uploader handles immediate uploads of content to S3 using content-addressable storage. +/// Files are stored by their SHA256 hash at v{CAS_VERSION}/objects/{hash[0..2]}/{hash[2..]}. /// -/// Supports two upload modes: -/// - CAS (Content-Addressable Storage): Files stored by SHA256 hash at v{CAS_VERSION}/objects/ -/// - Path-based: Files stored at explicit paths (e.g., maven/ for compatibility) +/// Benefits: +/// - **Immediate uploads**: No queuing, files upload as soon as requested +/// - **Deduplication**: Same content (same hash) = same storage location, uploaded once +/// - **Immutability**: Content never changes, only manifest pointers +/// - **Reproducibility**: Hash is deterministic from file content /// /// # Example /// /// ```no_run -/// let queue = UploadQueue::new(); +/// let uploader = BatchUploader::new(); /// -/// // CAS upload (returns hash) -/// let hash = queue.enqueue_cas(vec![1, 2, 3], Some("application/json")); +/// // Upload content to CAS and get its hash +/// let hash = uploader.upload_cas( +/// vec![1, 2, 3], +/// Some("application/json".to_string()), +/// &s3_client, +/// semaphore.clone() +/// ).await?; /// -/// // Path-based upload (for maven artifacts, etc.) -/// queue.enqueue_path("maven/lib.jar", vec![4, 5, 6], Some("application/java-archive")); -/// -/// // Atomic: all files uploaded or none -/// queue.flush(&s3_client, semaphore).await?; +/// // Hash can be used in manifests to reference the content +/// println!("Content stored at hash: {}", hash); /// ``` -pub struct UploadQueue { - /// Lock-free concurrent map of pending CAS uploads - /// Key: content hash (SHA256), Value: (bytes, content_type) - cas_queue: DashMap, Option)>, - - /// Lock-free concurrent map of pending path-based uploads - /// Key: file path, Value: (bytes, content_type) - path_queue: DashMap, Option)>, -} +pub struct BatchUploader; -impl UploadQueue { - /// Create a new empty upload queue +impl BatchUploader { + /// Create a new batch uploader pub fn new() -> Self { - Self { - cas_queue: DashMap::new(), - path_queue: DashMap::new(), - } + Self } /// Compute SHA256 hash of content - fn compute_hash(content: &[u8]) -> String { + /// + /// This hash is used as the content-addressable identifier for CAS storage. + pub fn compute_hash(content: &[u8]) -> String { let mut hasher = Sha256::new(); hasher.update(content); format!("{:x}", hasher.finalize()) } - /// Enqueue content for CAS upload (does NOT upload yet) + /// Upload content to CAS immediately and return its hash /// - /// Content is stored by its SHA256 hash and will be uploaded to v{CAS_VERSION}/objects/{hash[0..2]}/{hash[2..]}. - /// Returns the hash so callers can reference it in manifests. + /// Content is uploaded to v{CAS_VERSION}/objects/{hash[0..2]}/{hash[2..]}. + /// The hash is computed from the content's SHA256 and returned immediately. /// - /// Multiple enqueues of the same content (same hash) will deduplicate automatically. - #[instrument(skip(self, content), fields(size = content.len()))] - pub fn enqueue(&self, content: Vec, content_type: Option) -> String { - let hash = Self::compute_hash(&content); - info!(hash = %hash, "Enqueued for CAS upload"); - self.cas_queue.insert(hash.clone(), (content, content_type)); - hash - } - - /// Enqueue content for path-based upload (does NOT upload yet) + /// The upload happens concurrently (limited by semaphore) and will retry on failure. /// - /// Content is stored at the specified path (e.g., "maven/lib.jar"). - /// This is used for files that need predictable paths for compatibility. + /// # Arguments /// - /// Multiple enqueues of the same path will overwrite previous content. - #[instrument(skip(self, content), fields(size = content.len()))] - pub fn enqueue_path(&self, path: String, content: Vec, content_type: Option) { - info!(path = %path, "Enqueued for path-based upload"); - self.path_queue.insert(path, (content, content_type)); - } - - /// Flush all queued uploads atomically to S3 + /// * `content` - The file content to upload + /// * `content_type` - Optional MIME type (e.g., "application/json") + /// * `s3_client` - S3 bucket client + /// * `semaphore` - Semaphore for concurrent upload limiting /// - /// Uploads both CAS objects (to v{CAS_VERSION}/objects/{hash[0..2]}/{hash[2..]}) - /// and path-based files (to their specified paths). - /// On error, all uploads are considered failed (no partial state). + /// # Returns + /// + /// The SHA256 hash of the content, which serves as its CAS identifier /// /// # Errors /// - /// Returns error if any upload fails after retries. - #[instrument(skip(self, s3_client, semaphore), fields(cas_count = self.cas_queue.len(), path_count = self.path_queue.len()))] - pub async fn flush( + /// Returns error if upload fails after retries + #[instrument(skip(self, content, s3_client, semaphore), fields(size = content.len()))] + pub async fn upload_cas( &self, + content: Vec, + content_type: Option, s3_client: &Bucket, semaphore: Arc, - ) -> Result<(), crate::infrastructure::error::Error> { - let cas_size = self.cas_queue.len(); - let path_size = self.path_queue.len(); - let total_size = cas_size + path_size; - - if total_size == 0 { - info!("Upload queue is empty, nothing to flush"); - return Ok(()); - } - - info!( - cas_count = cas_size, - path_count = path_size, - "Starting atomic flush of {} objects ({} CAS, {} path-based)", - total_size, - cas_size, - path_size - ); - - // Upload CAS objects (content-addressed with 2-char prefix for sharding) - for entry in self.cas_queue.iter() { - let (hash, (bytes, content_type)) = entry.pair(); - let path = format!("v{}/objects/{}/{}", crate::services::cas::CAS_VERSION, &hash[..2], &hash[2..]); - - upload_single_file( - &path, - bytes, - content_type.as_deref(), - s3_client, - semaphore.clone(), - ) - .await?; - } - - // Upload path-based files - for entry in self.path_queue.iter() { - let (path, (bytes, content_type)) = entry.pair(); - - upload_single_file( - path, - bytes, - content_type.as_deref(), - s3_client, - semaphore.clone(), - ) - .await?; - } - - // Clear queues only on complete success - self.cas_queue.clear(); - self.path_queue.clear(); - - info!( - uploaded = total_size, - "Successfully flushed {} objects", - total_size + ) -> Result { + let hash = Self::compute_hash(&content); + let path = format!( + "v{}/objects/{}/{}", + crate::services::cas::CAS_VERSION, + &hash[..2], + &hash[2..] ); - Ok(()) - } + info!(hash = %hash, path = %path, "Uploading to CAS"); - /// Get the total number of queued files (CAS + path-based) - pub fn len(&self) -> usize { - self.cas_queue.len() + self.path_queue.len() - } + upload_single_file( + &path, + &content, + content_type.as_deref(), + s3_client, + semaphore, + ) + .await?; - /// Check if queue is empty (both CAS and path-based) - #[allow(dead_code)] - pub fn is_empty(&self) -> bool { - self.cas_queue.is_empty() && self.path_queue.is_empty() + info!(hash = %hash, "CAS upload completed"); + Ok(hash) } } -impl Default for UploadQueue { +impl Default for BatchUploader { fn default() -> Self { Self::new() } @@ -180,6 +113,14 @@ impl Default for UploadQueue { /// /// Internal helper function that handles the actual S3 upload with /// exponential backoff retry on failure. +/// +/// # Arguments +/// +/// * `path` - S3 object path +/// * `bytes` - File content +/// * `content_type` - Optional MIME type +/// * `s3_client` - S3 bucket client +/// * `semaphore` - Semaphore for concurrent upload limiting #[instrument(skip(bytes, s3_client, semaphore), fields(size = bytes.len()))] async fn upload_single_file( path: &str, @@ -229,99 +170,54 @@ mod tests { use super::*; #[test] - fn test_upload_queue_creation() { - let queue = UploadQueue::new(); - assert_eq!(queue.len(), 0); - assert!(queue.is_empty()); + fn test_batch_uploader_creation() { + let uploader = BatchUploader::new(); + // Uploader is stateless, just verify it can be created + let _ = uploader; } #[test] - fn test_enqueue() { - let queue = UploadQueue::new(); - - let hash = queue.enqueue(vec![1, 2, 3], Some("application/json".to_string())); + fn test_compute_hash() { + let content = b"hello world"; + let hash = BatchUploader::compute_hash(content); - // SHA256 of [1, 2, 3] + // SHA256 of "hello world" assert_eq!( hash, - "039058c6f2c0cb492c533b0a4d14ef77cc0f78abccced5287d84a1a2011cfb81" + "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" ); - assert_eq!(queue.len(), 1); - assert!(!queue.is_empty()); } #[test] - fn test_deduplication() { - let queue = UploadQueue::new(); + fn test_compute_hash_deterministic() { + let content = vec![1, 2, 3]; + let hash1 = BatchUploader::compute_hash(&content); + let hash2 = BatchUploader::compute_hash(&content); - // Enqueue same content twice - let hash1 = queue.enqueue(vec![1, 2, 3], None); - let hash2 = queue.enqueue(vec![1, 2, 3], None); - - // Same content = same hash, deduplicated + // Same content should always produce same hash (reproducibility) assert_eq!(hash1, hash2); - assert_eq!(queue.len(), 1); // Only stored once + assert_eq!( + hash1, + "039058c6f2c0cb492c533b0a4d14ef77cc0f78abccced5287d84a1a2011cfb81" + ); } #[test] - fn test_multiple_objects() { - let queue = UploadQueue::new(); + fn test_different_content_different_hash() { + let hash1 = BatchUploader::compute_hash(&[1]); + let hash2 = BatchUploader::compute_hash(&[2]); + let hash3 = BatchUploader::compute_hash(&[3]); - // Different content = different hashes - let hash1 = queue.enqueue(vec![1], None); - let hash2 = queue.enqueue(vec![2], None); - let hash3 = queue.enqueue(vec![3], None); - - // All three should be different + // Different content should produce different hashes assert_ne!(hash1, hash2); assert_ne!(hash2, hash3); assert_ne!(hash1, hash3); - assert_eq!(queue.len(), 3); - } - - #[test] - fn test_compute_hash() { - let content = b"hello world"; - let hash = UploadQueue::compute_hash(content); - - // SHA256 of "hello world" - assert_eq!( - hash, - "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" - ); } #[test] - fn test_enqueue_path() { - let queue = UploadQueue::new(); - - queue.enqueue_path( - "maven/lib.jar".to_string(), - vec![1, 2, 3], - Some("application/java-archive".to_string()), - ); - - assert_eq!(queue.len(), 1); - assert!(!queue.is_empty()); - } - - #[test] - fn test_mixed_cas_and_path() { - let queue = UploadQueue::new(); - - // Add CAS upload - let hash = queue.enqueue(vec![1, 2, 3], Some("application/json".to_string())); - assert!(!hash.is_empty()); - - // Add path-based upload - queue.enqueue_path( - "maven/lib.jar".to_string(), - vec![4, 5, 6], - Some("application/java-archive".to_string()), - ); - - // Should have 2 total files queued - assert_eq!(queue.len(), 2); - assert!(!queue.is_empty()); + fn test_hash_length() { + let hash = BatchUploader::compute_hash(b"test"); + // SHA256 produces 64 hex characters + assert_eq!(hash.len(), 64); } } From 8611fbd039aa236c39ae4e7db8fa675e26128159 Mon Sep 17 00:00:00 2001 From: Davide Date: Tue, 14 Oct 2025 01:45:35 +0200 Subject: [PATCH 4/7] Adds version-specific library resolution --- daedalus/src/minecraft.rs | 66 ++++++++++++++++++++++ daedalus/src/modded.rs | 1 + daedalus_client/src/forge.rs | 91 ++++++++++++++++++++++++------ daedalus_client/src/loaders/mod.rs | 33 ++++++++--- daedalus_client/src/main.rs | 2 + daedalus_client/src/minecraft.rs | 4 ++ daedalus_client/src/neoforge.rs | 51 ++++++++++++++--- 7 files changed, 215 insertions(+), 33 deletions(-) diff --git a/daedalus/src/minecraft.rs b/daedalus/src/minecraft.rs index d5745ab..597eda2 100644 --- a/daedalus/src/minecraft.rs +++ b/daedalus/src/minecraft.rs @@ -380,6 +380,72 @@ pub struct Library { #[serde(skip)] /// if this library was patched or added by a patch pub patched: bool, + #[serde(skip_serializing_if = "Option::is_none")] + /// Game-version-specific hash mapping for libraries that vary by Minecraft version + /// Maps minecraft_version → SHA256 hash of the artifact + /// e.g., {"1.16.5": "abc123...", "1.17.1": "def456..."} + /// When present, clients should look up their game version and construct CAS URL from hash + pub version_hashes: Option>, +} + +impl Library { + /// Resolves the URL for this library based on the minecraft version. + /// + /// For libraries with `version_hashes`, looks up the hash for the given version + /// and constructs a CAS URL. Falls back to the library's `url` field if + /// `version_hashes` is not present or doesn't contain the version. + /// + /// # Arguments + /// * `minecraft_version` - The Minecraft version to resolve the URL for + /// * `base_url` - The base URL for the CAS (e.g., "https://maven.modrinth.com") + /// * `cas_version` - The CAS version number (e.g., 0) + /// + /// # Returns + /// * `Some(String)` - The resolved URL, either from CAS or the url field + /// * `None` - If neither version_hashes nor url contain a valid URL + /// + /// # Example + /// ``` + /// # use daedalus::minecraft::Library; + /// # use daedalus::GradleSpecifier; + /// # use std::collections::HashMap; + /// let mut library = Library { + /// name: "net.fabricmc:intermediary:1.16.5".parse().unwrap(), + /// url: None, + /// downloads: None, + /// extract: None, + /// natives: None, + /// rules: None, + /// checksums: None, + /// include_in_classpath: true, + /// patched: false, + /// version_hashes: Some({ + /// let mut map = HashMap::new(); + /// map.insert("1.16.5".to_string(), "abc123def456".to_string()); + /// map + /// }), + /// }; + /// + /// let url = library.resolve_url("1.16.5", "https://maven.modrinth.com", 0); + /// assert_eq!(url, Some("https://maven.modrinth.com/v0/objects/ab/c123def456".to_string())); + /// ``` + pub fn resolve_url(&self, minecraft_version: &str, base_url: &str, cas_version: u32) -> Option { + // First try version_hashes if present + if let Some(ref hashes) = self.version_hashes { + if let Some(hash) = hashes.get(minecraft_version) { + return Some(format!( + "{}/v{}/objects/{}/{}", + base_url, + cas_version, + &hash[..2], + &hash[2..] + )); + } + } + + // Fall back to url field + self.url.clone() + } } #[derive(Deserialize, Debug, Clone)] diff --git a/daedalus/src/modded.rs b/daedalus/src/modded.rs index e880aa2..fe76d1b 100644 --- a/daedalus/src/modded.rs +++ b/daedalus/src/modded.rs @@ -174,6 +174,7 @@ pub fn merge_partial_version( rules: x.rules, checksums: x.checksums, include_in_classpath: x.include_in_classpath, + version_hashes: x.version_hashes, patched: false, }) .collect::>(), diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge.rs index c1f2b3f..cbf6c38 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge.rs @@ -287,8 +287,16 @@ pub async fn retrieve_data( let mut mc_library_cache = mc_library_cache_mutex.lock().await; mc_library_cache.load_minecraft_version_libs(&profile.install.minecraft).await?.clone() }; - let libs = futures::future::try_join_all(profile.version_info.libraries.into_iter().map(|mut lib| async { - + let libs = futures::future::try_join_all(profile.version_info.libraries.into_iter().map(|mut lib| { + let semaphore = semaphore.clone(); + let visited_assets = visited_assets.clone(); + let forge_universal_bytes = forge_universal_bytes.clone(); + let forge_universal_path = forge_universal_path.clone(); + let minecraft_libs_filter = minecraft_libs_filter.clone(); + let uploader = uploader; + let s3_client = s3_client; + + async move { if lib.name.is_lwjgl() || lib.name.is_log4j() || should_ignore_artifact(&minecraft_libs_filter, &lib.name) { return Ok::, crate::infrastructure::error::Error>(None); } @@ -298,7 +306,12 @@ pub async fn retrieve_data( // Check if we've already processed this artifact (lock-free) if !visited_assets.insert(lib.name.clone()) { // Already processed, skip download - lib.url = Some(format_url("maven/")); + let base_url = dotenvy::var("BASE_URL").unwrap(); + lib.url = Some(format!( + "{}/v{}/objects/", + base_url, + crate::services::cas::CAS_VERSION + )); return Ok::, crate::infrastructure::error::Error>(Some(lib)); } @@ -316,20 +329,30 @@ pub async fn retrieve_data( .await? }; - lib.url = Some(format_url("maven/")); - - upload_queue.enqueue_path( - format!("{}/{}", "maven", artifact_path), + // Upload to CAS and get hash + let hash = uploader.upload_cas( artifact.to_vec(), Some("application/java-archive".to_string()), - ); + s3_client, + semaphore.clone(), + ).await?; + + // Store full CAS URL + let base_url = dotenvy::var("BASE_URL").unwrap(); + lib.url = Some(format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &hash[..2], + &hash[2..] + )); } else if lib.downloads.is_none() { lib.url = Some(String::from("https://libraries.minecraft.net/")); } Ok::, crate::infrastructure::error::Error>(Some(lib)) - })).await?; + }})).await?; let elapsed = now.elapsed(); info!("Elapsed lib DL: {:.2?}", elapsed); @@ -378,10 +401,12 @@ pub async fn retrieve_data( }; let version_hash = if should_upload { - upload_queue.enqueue( + uploader.upload_cas( version_bytes.clone(), Some("application/json".to_string()), - ) + s3_client, + semaphore.clone(), + ).await? } else { new_hash.clone() }; @@ -442,6 +467,7 @@ pub async fn retrieve_data( rules: x.rules, checksums: x.checksums, include_in_classpath: false, + version_hashes: None, patched: false, }) ) @@ -536,6 +562,7 @@ pub async fn retrieve_data( rules: None, checksums: None, include_in_classpath: false, + version_hashes: None, patched: false, }); } @@ -557,7 +584,14 @@ pub async fn retrieve_data( let now = Instant::now(); - let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| async { + let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| { + let semaphore = semaphore.clone(); + let visited_assets = visited_assets.clone(); + let local_libs = local_libs.clone(); + let uploader = uploader; + let s3_client = s3_client; + + async move { let artifact_path = lib.name.path(); // Check if we've already processed this artifact (lock-free) @@ -623,15 +657,36 @@ pub async fn retrieve_data( }; if let Some(bytes) = artifact_bytes { - upload_queue.enqueue_path( - format!("{}/{}", "maven", artifact_path), + // Upload to CAS and get hash + let hash = uploader.upload_cas( bytes.to_vec(), Some("application/java-archive".to_string()), + s3_client, + semaphore.clone(), + ).await?; + + // Store full CAS URL + let base_url = dotenvy::var("BASE_URL").unwrap(); + let cas_url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &hash[..2], + &hash[2..] ); + + // Update library URL with CAS URL + if let Some(ref mut downloads) = lib.downloads { + if let Some(ref mut artifact) = downloads.artifact { + artifact.url = Some(cas_url); + } + } else if lib.url.is_some() { + lib.url = Some(cas_url); + } } Ok::, crate::infrastructure::error::Error>(Some(lib)) - })).await?; + }})).await?; let elapsed = now.elapsed(); info!("Elapsed lib DL: {:.2?}", elapsed); @@ -680,10 +735,12 @@ pub async fn retrieve_data( }; let version_hash = if should_upload { - upload_queue.enqueue( + uploader.upload_cas( version_bytes.clone(), Some("application/json".to_string()), - ) + s3_client, + semaphore.clone(), + ).await? } else { new_hash.clone() }; diff --git a/daedalus_client/src/loaders/mod.rs b/daedalus_client/src/loaders/mod.rs index 447c3cc..da83850 100644 --- a/daedalus_client/src/loaders/mod.rs +++ b/daedalus_client/src/loaders/mod.rs @@ -8,6 +8,7 @@ use daedalus::minecraft::{Library, VersionManifest}; use daedalus::modded::{LoaderVersion, PartialVersionInfo, Version}; use daedalus::{get_hash, Branding, BRANDING}; use serde::Deserialize; +use std::collections::HashMap; use std::sync::Arc; use tokio::sync::{Mutex, RwLock, Semaphore}; use tracing::{info, warn}; @@ -382,7 +383,7 @@ impl LoaderProcessor { async move { // Check if we've already processed this artifact (lock-free) if !visited_artifacts.insert(lib.name.to_string()) { - // Already processed, skip download + // Already processed, skip download but still update name lib.name = lib .name .to_string() @@ -391,7 +392,10 @@ impl LoaderProcessor { &BRANDING.get_or_init(Branding::default).dummy_replace_string, ) .parse()?; - lib.url = Some(format_url("maven/")); + + // Note: url and version_hashes remain as-is from the original library entry + // This path should only be hit if the library is referenced multiple times, + // which means url/version_hashes were already set correctly the first time return Ok(lib); } @@ -406,7 +410,8 @@ impl LoaderProcessor { ) .parse()?; - futures::future::try_join_all(list_game.iter().map(|game_version| { + // Collect hashes for all game versions + let version_hash_results = futures::future::try_join_all(list_game.iter().map(|game_version| { let semaphore = semaphore.clone(); let lib_name = lib.name.to_string(); let lib_url = lib.url.clone(); @@ -436,19 +441,23 @@ impl LoaderProcessor { .await?; // Upload to CAS and get hash - let _hash = uploader.upload_cas( + let hash = uploader.upload_cas( artifact.to_vec(), Some("application/java-archive".to_string()), s3_client, semaphore.clone(), ).await?; - Ok::<(), crate::infrastructure::error::Error>(()) + Ok::<(String, String), crate::infrastructure::error::Error>((game_version_str, hash)) } })) .await?; - lib.url = Some(format_url("maven/")); + // Build version_hashes map from results + let version_hashes: HashMap = version_hash_results.into_iter().collect(); + lib.version_hashes = Some(version_hashes); + lib.url = None; + return Ok(lib); } @@ -468,14 +477,22 @@ impl LoaderProcessor { .await?; // Upload to CAS and get hash - let _hash = uploader.upload_cas( + let hash = uploader.upload_cas( artifact.to_vec(), Some("application/java-archive".to_string()), s3_client, semaphore.clone(), ).await?; - lib.url = Some(format_url("maven/")); + // Store full CAS URL + let base_url = dotenvy::var("BASE_URL").unwrap(); + lib.url = Some(format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &hash[..2], + &hash[2..] + )); Ok::(lib) } diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index 3915d73..33f3d43 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -291,6 +291,7 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { &manifest, &uploader, &manifest_builder, + &CLIENT, semaphore.clone(), ) .await @@ -346,6 +347,7 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { &manifest, &uploader, &manifest_builder, + &CLIENT, semaphore.clone(), ) .await diff --git a/daedalus_client/src/minecraft.rs b/daedalus_client/src/minecraft.rs index 8859d9b..3ebed7a 100644 --- a/daedalus_client/src/minecraft.rs +++ b/daedalus_client/src/minecraft.rs @@ -474,6 +474,7 @@ pub async fn retrieve_data( rules: None, checksums: None, include_in_classpath: library.include_in_classpath, + version_hashes: None, patched: true, } ); @@ -826,6 +827,9 @@ pub async fn retrieve_data( } let patches = Arc::clone(&cloned_patches); + let semaphore = semaphore.clone(); + let uploader = uploader; + let s3_client = s3_client; async move { diff --git a/daedalus_client/src/neoforge.rs b/daedalus_client/src/neoforge.rs index 7a4e727..b735b2f 100644 --- a/daedalus_client/src/neoforge.rs +++ b/daedalus_client/src/neoforge.rs @@ -1,5 +1,5 @@ use crate::{download_file, format_url}; -use crate::services::upload::UploadQueue; +use crate::services::upload::BatchUploader; use dashmap::DashSet; use daedalus::minecraft::{Library, VersionManifest}; use daedalus::modded::{ @@ -40,8 +40,9 @@ fn extract_hash_from_cas_url(url: &str) -> Option { pub async fn retrieve_data( minecraft_versions: &VersionManifest, - upload_queue: &UploadQueue, + uploader: &BatchUploader, manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, semaphore: Arc, ) -> Result<(), crate::infrastructure::error::Error> { info!("Retrieving NeoForge data ..."); @@ -86,6 +87,8 @@ pub async fn retrieve_data( let visited_assets = Arc::clone(&visited_assets); let semaphore = Arc::clone(&semaphore); let minecraft_version = minecraft_version.clone(); + let uploader = uploader; + let s3_client = s3_client; async move { // Check skip list first @@ -132,6 +135,7 @@ pub async fn retrieve_data( rules: x.rules, checksums: x.checksums, include_in_classpath: false, + version_hashes: None, patched: false, })).filter(|lib| !lib.name.is_log4j() ).collect(); @@ -191,6 +195,7 @@ pub async fn retrieve_data( rules: None, checksums: None, include_in_classpath: false, + version_hashes: None, patched: false, }); } @@ -212,7 +217,14 @@ pub async fn retrieve_data( let now = Instant::now(); - let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| async { + let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| { + let semaphore = semaphore.clone(); + let visited_assets = visited_assets.clone(); + let local_libs = local_libs.clone(); + let uploader = uploader; + let s3_client = s3_client; + + async move { let artifact_path = &lib.name.path(); // Check if we've already processed this artifact (lock-free) @@ -268,15 +280,36 @@ pub async fn retrieve_data( } else { None }; if let Some(bytes) = artifact_bytes { - upload_queue.enqueue_path( - format!("{}/{}", "maven", artifact_path), + // Upload to CAS and get hash + let hash = uploader.upload_cas( bytes.to_vec(), Some("application/java-archive".to_string()), + s3_client, + semaphore.clone(), + ).await?; + + // Store full CAS URL + let base_url = dotenvy::var("BASE_URL").unwrap(); + let cas_url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &hash[..2], + &hash[2..] ); + + // Update library URL with CAS URL + if let Some(ref mut downloads) = lib.downloads { + if let Some(ref mut artifact) = downloads.artifact { + artifact.url = Some(cas_url); + } + } else if lib.url.is_some() { + lib.url = Some(cas_url); + } } Ok::(lib) - })).await?; + }})).await?; let elapsed = now.elapsed(); info!("Elapsed lib DL: {:.2?}", elapsed); @@ -325,10 +358,12 @@ pub async fn retrieve_data( }; let version_hash = if should_upload { - upload_queue.enqueue( + uploader.upload_cas( version_bytes.clone(), Some("application/json".to_string()), - ) + s3_client, + semaphore.clone(), + ).await? } else { new_hash.clone() }; From a8577e77458f56ea3758f58ab124ecaf3050df2e Mon Sep 17 00:00:00 2001 From: Davide Date: Tue, 14 Oct 2025 23:34:39 +0200 Subject: [PATCH 5/7] Refactors CAS manifest handling for loaders --- daedalus/src/lib.rs | 8 +- daedalus/src/version.rs | 8 ++ daedalus_client/src/forge.rs | 20 +-- daedalus_client/src/loaders/mod.rs | 1 + daedalus_client/src/main.rs | 2 +- daedalus_client/src/minecraft.rs | 29 ++-- daedalus_client/src/neoforge.rs | 19 +-- daedalus_client/src/services/cas.rs | 202 +++++++++++++++++++++------- 8 files changed, 195 insertions(+), 94 deletions(-) diff --git a/daedalus/src/lib.rs b/daedalus/src/lib.rs index b516761..b26a6e4 100644 --- a/daedalus/src/lib.rs +++ b/daedalus/src/lib.rs @@ -187,12 +187,10 @@ impl GradleSpecifier { /// Returns if specifier belongs to a lwjgl library pub fn is_lwjgl(&self) -> bool { - vec![ - "org.lwjgl", + ["org.lwjgl", "org.lwjgl.lwjgl", "net.java.jinput", - "net.java.jutils", - ] + "net.java.jutils"] .contains(&self.package.as_str()) } @@ -379,7 +377,7 @@ pub async fn download_file_mirrors( } } - return Err(Error::MirrorsFailed("No mirrors succeeded!".to_string())); + Err(Error::MirrorsFailed("No mirrors succeeded!".to_string())) } /// Downloads a file with retry and checksum functionality diff --git a/daedalus/src/version.rs b/daedalus/src/version.rs index 29474e9..2d37911 100644 --- a/daedalus/src/version.rs +++ b/daedalus/src/version.rs @@ -20,17 +20,25 @@ pub enum MinecraftVersion { /// Snapshot version (YYwWWx format) /// Example: 23w10a = year 23, week 10, revision a Snapshot { + /// Year (e.g., 23 for 2023) year: u32, + /// Week number (1-52) week: u32, + /// Revision letter (e.g., "a", "b") revision: String, }, /// Regular release version /// Example: 1.20.4 Release { + /// Major version number major: u32, + /// Minor version number minor: u32, + /// Patch version number patch: u32, + /// Pre-release identifier (pre, rc, etc.) prerelease: Option, + /// Build metadata (for Forge versions) build: Option>, }, } diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge.rs index cbf6c38..91d60b5 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge.rs @@ -411,13 +411,6 @@ pub async fn retrieve_data( new_hash.clone() }; - manifest_builder.add_version( - "forge", - loader_version_full.to_string(), - version_hash.clone(), - version_bytes.len() as u64, - ); - let base_url = dotenvy::var("BASE_URL").unwrap(); let cas_url = format!( "{}/v{}/objects/{}/{}", @@ -745,13 +738,6 @@ pub async fn retrieve_data( new_hash.clone() }; - manifest_builder.add_version( - "forge", - loader_version_full.to_string(), - version_hash.clone(), - version_bytes.len() as u64, - ); - let base_url = dotenvy::var("BASE_URL").unwrap(); let cas_url = format!( "{}/v{}/objects/{}/{}", @@ -930,6 +916,12 @@ pub async fn retrieve_data( }) } } + + // Set the full Forge versions JSON in manifest_builder with nested structure + // This preserves game version -> loader version mappings + let versions_json = serde_json::to_value(&final_versions)?; + manifest_builder.set_loader_versions("forge", versions_json); + info!(version_count = final_versions.len(), "Set Forge versions with nested structure in CAS manifest builder"); } Ok(()) diff --git a/daedalus_client/src/loaders/mod.rs b/daedalus_client/src/loaders/mod.rs index da83850..b827a71 100644 --- a/daedalus_client/src/loaders/mod.rs +++ b/daedalus_client/src/loaders/mod.rs @@ -557,6 +557,7 @@ impl LoaderProcessor { loader.clone(), version_hash.clone(), version_bytes.len() as u64, + version_info.release_time, ); let base_url = dotenvy::var("BASE_URL").unwrap(); diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index 33f3d43..38fec57 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -382,7 +382,7 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { info!( loader = %loader, - version_count = loader_manifest.versions.len(), + version_count = loader_manifest.versions.as_array().map(|a| a.len()).unwrap_or(0), path = %manifest_path, "Uploading loader manifest" ); diff --git a/daedalus_client/src/minecraft.rs b/daedalus_client/src/minecraft.rs index 3ebed7a..68c8d76 100644 --- a/daedalus_client/src/minecraft.rs +++ b/daedalus_client/src/minecraft.rs @@ -641,9 +641,7 @@ pub async fn retrieve_data( .expect("Safe to unwrap since we ensure it's valid in version_json already") }), compliance_level: 1, - assets_index_url: Some( - version_info.asset_index.sha1.clone(), - ), + assets_index_url: Some(format_url(&assets_path)), assets_index_sha1: Some( version_info.asset_index.sha1.clone(), ), @@ -691,19 +689,16 @@ pub async fn retrieve_data( } let version_bytes = serde_json::to_vec(&version_info)?; - let version_hash = uploader.upload_cas( + let _version_hash = uploader.upload_cas( version_bytes.clone(), Some("application/json".to_string()), s3_client, semaphore.clone(), ).await?; - manifest_builder.add_version( - "minecraft", - version_info.id.clone(), - version_hash, - version_bytes.len() as u64, - ); + // NOTE: We don't call manifest_builder.add_version() for minecraft here. + // Instead, we use set_loader_versions() with the full VersionManifest at the end + // to preserve rich metadata (type, url, time, releaseTime, sha1, etc.) Ok::<(), crate::infrastructure::error::Error>(()) } @@ -861,6 +856,7 @@ pub async fn retrieve_data( lwjgl.version.clone(), lwjgl_hash, lwjgl_bytes.len() as u64, + lwjgl.release_time, ); } else { @@ -887,13 +883,22 @@ pub async fn retrieve_data( let elapsed = now.elapsed(); info!("Elapsed: {:.2?}", elapsed); - Ok(Arc::try_unwrap(cloned_manifest) + // Get the final manifest with all processed versions + let final_manifest = Arc::try_unwrap(cloned_manifest) .map_err(|err| { crate::infrastructure::error::invalid_input( format!("Failed to unwrap Arc>: {:?}", err) ) })? - .into_inner()) + .into_inner(); + + // Set the full Minecraft versions JSON in manifest_builder + // This preserves rich metadata (type, url, time, releaseTime, sha1, complianceLevel, etc.) + let versions_json = serde_json::to_value(&final_manifest.versions)?; + manifest_builder.set_loader_versions("minecraft", versions_json); + info!(version_count = final_manifest.versions.len(), "Set Minecraft versions with rich metadata in CAS manifest builder"); + + Ok(final_manifest) } #[derive(Deserialize, Debug, Clone)] diff --git a/daedalus_client/src/neoforge.rs b/daedalus_client/src/neoforge.rs index b735b2f..4f4d33c 100644 --- a/daedalus_client/src/neoforge.rs +++ b/daedalus_client/src/neoforge.rs @@ -368,13 +368,6 @@ pub async fn retrieve_data( new_hash.clone() }; - manifest_builder.add_version( - "neoforge", - loader_version_full.to_string(), - version_hash.clone(), - version_bytes.len() as u64, - ); - let base_url = dotenvy::var("BASE_URL").unwrap(); let cas_url = format!( "{}/v{}/objects/{}/{}", @@ -558,13 +551,11 @@ pub async fn retrieve_data( } } - // Note: Versions are now tracked in ManifestBuilder and uploaded separately - // in the main loop via manifest_builder.build_loader_manifest() - - info!( - "✅ NeoForge - Processed {} Minecraft versions", - final_versions.len() - ); + // Set the full NeoForge versions JSON in manifest_builder with nested structure + // This preserves game version -> loader version mappings + let versions_json = serde_json::to_value(&final_versions)?; + manifest_builder.set_loader_versions("neoforge", versions_json); + info!(version_count = final_versions.len(), "Set NeoForge versions with nested structure in CAS manifest builder"); } Ok(()) diff --git a/daedalus_client/src/services/cas.rs b/daedalus_client/src/services/cas.rs index 5f70651..a193ff7 100644 --- a/daedalus_client/src/services/cas.rs +++ b/daedalus_client/src/services/cas.rs @@ -8,7 +8,7 @@ use tracing::{info, instrument}; /// /// This is the single version entrypoint for all metadata (minecraft, forge, fabric, quilt, neoforge). /// Old versions had individual versioning per loader, but v3+ uses a unified version. -pub const CAS_VERSION: u32 = 3; +pub const CAS_VERSION: u32 = 4; /// Content-Addressable Storage (CAS) system /// @@ -108,13 +108,18 @@ pub struct LoaderManifestEntry { pub hash: String, /// Size of the content in bytes pub size: u64, - /// When this entry was last updated + /// When this version was originally released #[serde(with = "chrono::serde::ts_seconds")] - pub updated_at: DateTime, + pub release_time: DateTime, } /// Loader manifest containing all versions for a specific loader -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +/// +/// The `versions` field is flexible and can contain different schemas per loader: +/// - Simple loaders (forge, neoforge): Vec (id, hash, size, updated_at) +/// - Complex loaders (minecraft): Full metadata (type, url, time, releaseTime, sha1, etc.) +/// - Platform loaders (fabric, quilt): Custom format with game-specific versions +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct LoaderManifest { /// Schema version for future compatibility pub schema_version: u32, @@ -122,13 +127,13 @@ pub struct LoaderManifest { pub loader: String, /// Timestamp when this manifest was created (ISO 8601) pub timestamp: String, - /// All version entries - pub versions: Vec, + /// All version entries (schema varies per loader type) + pub versions: serde_json::Value, } impl LoaderManifest { - /// Create a new loader manifest - pub fn new(loader: String, versions: Vec) -> Self { + /// Create a new loader manifest with custom JSON for versions + pub fn new(loader: String, versions: serde_json::Value) -> Self { let timestamp = Utc::now().format("%Y-%m-%dT%H-%M-%SZ").to_string(); Self { schema_version: 1, @@ -137,6 +142,16 @@ impl LoaderManifest { versions, } } + + /// Create a new loader manifest from simple version entries + /// + /// This is a convenience method for simple loaders (forge, neoforge) that use + /// the standard LoaderManifestEntry schema (id, hash, size, updated_at). + pub fn from_entries(loader: String, entries: Vec) -> Self { + let versions = serde_json::to_value(&entries) + .expect("LoaderManifestEntry should always serialize to JSON"); + Self::new(loader, versions) + } } /// Builder for tracking version entries and constructing loader manifests @@ -165,9 +180,14 @@ impl LoaderManifest { /// let forge_manifest = builder.build_loader_manifest("forge"); /// ``` pub struct ManifestBuilder { - /// Map of loader name → (version_id → (hash, size)) + /// Map of loader name → (version_id → (hash, size, release_time)) /// Using nested DashMap for concurrent access at both levels - versions: DashMap>, + /// Used for simple loaders (forge, neoforge) that use LoaderManifestEntry schema + versions: DashMap)>>, + + /// Map of loader name → custom JSON for versions + /// Used for complex loaders (minecraft, fabric, quilt) that provide full custom schemas + custom_versions: DashMap, } impl ManifestBuilder { @@ -175,22 +195,27 @@ impl ManifestBuilder { pub fn new() -> Self { Self { versions: DashMap::new(), + custom_versions: DashMap::new(), } } - /// Add a version entry for a specific loader + /// Add a version entry for a specific loader (simple mode) + /// + /// This is for simple loaders (forge, neoforge) that use the standard + /// LoaderManifestEntry schema (id, hash, size, release_time). /// /// If the version already exists for this loader, it will be overwritten. /// This is idempotent and thread-safe. /// /// # Arguments /// - /// * `loader` - Loader name (e.g., "minecraft", "forge") - /// * `version_id` - Version identifier (e.g., "1.20.4", "49.0.3") + /// * `loader` - Loader name (e.g., "forge", "neoforge") + /// * `version_id` - Version identifier (e.g., "49.0.3") /// * `hash` - SHA256 hash of the version's content /// * `size` - Size of the content in bytes + /// * `release_time` - When this version was originally released #[instrument(skip(self))] - pub fn add_version(&self, loader: &str, version_id: String, hash: String, size: u64) { + pub fn add_version(&self, loader: &str, version_id: String, hash: String, size: u64, release_time: DateTime) { // Get or create the version map for this loader let loader_map = self .versions @@ -198,33 +223,85 @@ impl ManifestBuilder { .or_default(); // Add the version entry - loader_map.insert(version_id, (hash, size)); + loader_map.insert(version_id, (hash, size, release_time)); info!(loader = %loader, "Added version to manifest builder"); } + /// Set custom versions JSON for a loader (complex mode) + /// + /// This is for complex loaders (minecraft, fabric, quilt) that provide their + /// own custom schema with rich metadata beyond just id/hash/size. + /// + /// The entire versions array is set at once, replacing any previous data. + /// This is thread-safe. + /// + /// # Arguments + /// + /// * `loader` - Loader name (e.g., "minecraft", "fabric") + /// * `versions` - Custom JSON value containing the versions array + /// + /// # Example + /// + /// ```no_run + /// # use daedalus_client::services::cas::ManifestBuilder; + /// let builder = ManifestBuilder::new(); + /// let minecraft_versions = serde_json::json!([ + /// { + /// "id": "1.20.4", + /// "type": "release", + /// "url": "https://meta.gdl.gg/minecraft/v2/versions/1.20.4.json", + /// "sha1": "abc123...", + /// "releaseTime": "2023-12-07T12:00:00Z", + /// // ... other fields + /// } + /// ]); + /// builder.set_loader_versions("minecraft", minecraft_versions); + /// ``` + #[instrument(skip(self, versions))] + pub fn set_loader_versions(&self, loader: &str, versions: serde_json::Value) { + self.custom_versions.insert(loader.to_string(), versions); + info!(loader = %loader, "Set custom versions JSON for loader"); + } + /// Build a loader manifest from the tracked versions /// /// Creates a LoaderManifest with all versions that were added for this loader. /// Returns None if no versions exist for this loader. /// + /// Checks custom_versions first (for complex loaders like minecraft), then falls back + /// to building from simple version entries (for forge, neoforge, etc.). + /// /// # Arguments /// /// * `loader` - Loader name to build manifest for #[instrument(skip(self))] pub fn build_loader_manifest(&self, loader: &str) -> Option { + // Check if we have custom versions JSON (complex loaders) + if let Some(custom) = self.custom_versions.get(loader) { + let versions_json = custom.value().clone(); + + info!( + loader = %loader, + "Built loader manifest from custom versions JSON" + ); + + return Some(LoaderManifest::new(loader.to_string(), versions_json)); + } + + // Fall back to building from simple version entries (forge, neoforge, etc.) let loader_map = self.versions.get(loader)?; // Collect all version entries let mut entries: Vec = loader_map .iter() .map(|entry| { - let (version_id, (hash, size)) = entry.pair(); + let (version_id, (hash, size, release_time)) = entry.pair(); LoaderManifestEntry { id: version_id.clone(), hash: hash.clone(), size: *size, - updated_at: Utc::now(), + release_time: *release_time, } }) .collect(); @@ -235,17 +312,26 @@ impl ManifestBuilder { info!( loader = %loader, version_count = entries.len(), - "Built loader manifest" + "Built loader manifest from simple entries" ); - Some(LoaderManifest::new(loader.to_string(), entries)) + Some(LoaderManifest::from_entries(loader.to_string(), entries)) } /// Get list of all loaders that have versions /// - /// Returns a sorted vector of loader names. + /// Returns a sorted vector of loader names from both simple and custom versions. pub fn get_loaders(&self) -> Vec { let mut loaders: Vec = self.versions.iter().map(|e| e.key().clone()).collect(); + + // Add loaders from custom_versions that aren't already in the list + for entry in self.custom_versions.iter() { + let loader = entry.key().clone(); + if !loaders.contains(&loader) { + loaders.push(loader); + } + } + loaders.sort(); loaders } @@ -275,14 +361,20 @@ impl ManifestBuilder { /// # Arguments /// /// * `manifest` - The old loader manifest to load + #[allow(dead_code)] pub fn load_old_manifest(&self, manifest: &LoaderManifest) { let loader_map = self .versions .entry(manifest.loader.clone()) .or_default(); - for entry in &manifest.versions { - loader_map.insert(entry.id.clone(), (entry.hash.clone(), entry.size)); + // Try to deserialize versions as Vec + // This works for simple loaders (forge, neoforge) that use the standard schema + // For complex loaders (minecraft), this method won't be used + if let Ok(entries) = serde_json::from_value::>(manifest.versions.clone()) { + for entry in entries { + loader_map.insert(entry.id.clone(), (entry.hash.clone(), entry.size, entry.release_time)); + } } } @@ -300,11 +392,12 @@ impl ManifestBuilder { /// * `loader` - Loader name (e.g., "minecraft", "forge") /// * `version_id` - Version identifier /// * `new_hash` - New content hash to compare + #[allow(dead_code)] pub fn has_version_changed(&self, loader: &str, version_id: &str, new_hash: &str) -> bool { if let Some(loader_map) = self.versions.get(loader) { if let Some(entry) = loader_map.get(version_id) { // Version exists, check if hash changed - let (old_hash, _) = entry.value(); + let (old_hash, _, _) = entry.value(); return old_hash.as_str() != new_hash; } } @@ -379,7 +472,7 @@ mod tests { id: "1.20.4".to_string(), hash: "abc123".to_string(), size: 1024, - updated_at: Utc::now(), + release_time: Utc::now(), }; assert_eq!(entry.id, "1.20.4"); @@ -389,32 +482,35 @@ mod tests { #[test] fn test_loader_manifest_creation() { + let release_time = Utc::now(); let entries = vec![ LoaderManifestEntry { id: "1.20.4".to_string(), hash: "abc123".to_string(), size: 1024, - updated_at: Utc::now(), + release_time, }, LoaderManifestEntry { id: "1.20.3".to_string(), hash: "def456".to_string(), size: 2048, - updated_at: Utc::now(), + release_time, }, ]; - let manifest = LoaderManifest::new("minecraft".to_string(), entries.clone()); + let manifest = LoaderManifest::from_entries("minecraft".to_string(), entries.clone()); assert_eq!(manifest.schema_version, 1); assert_eq!(manifest.loader, "minecraft"); - assert_eq!(manifest.versions.len(), 2); - assert_eq!(manifest.versions[0].id, "1.20.4"); + // versions is now serde_json::Value, so deserialize to check + let versions: Vec = serde_json::from_value(manifest.versions).unwrap(); + assert_eq!(versions.len(), 2); + assert_eq!(versions[0].id, "1.20.4"); } #[test] fn test_loader_manifest_serialization() { - let manifest = LoaderManifest::new("forge".to_string(), vec![]); + let manifest = LoaderManifest::from_entries("forge".to_string(), vec![]); let json = serde_json::to_string(&manifest).unwrap(); let deserialized: LoaderManifest = serde_json::from_str(&json).unwrap(); @@ -433,7 +529,7 @@ mod tests { fn test_manifest_builder_add_version() { let builder = ManifestBuilder::new(); - builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024, Utc::now()); assert_eq!(builder.loader_count(), 1); assert_eq!(builder.version_count("minecraft"), 1); @@ -442,10 +538,11 @@ mod tests { #[test] fn test_manifest_builder_multiple_loaders() { let builder = ManifestBuilder::new(); + let release_time = Utc::now(); - builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); - builder.add_version("forge", "49.0.3".to_string(), "def456".to_string(), 2048); - builder.add_version("fabric", "0.15.0".to_string(), "ghi789".to_string(), 512); + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024, release_time); + builder.add_version("forge", "49.0.3".to_string(), "def456".to_string(), 2048, release_time); + builder.add_version("fabric", "0.15.0".to_string(), "ghi789".to_string(), 512, release_time); assert_eq!(builder.loader_count(), 3); assert_eq!(builder.version_count("minecraft"), 1); @@ -459,10 +556,11 @@ mod tests { #[test] fn test_manifest_builder_multiple_versions() { let builder = ManifestBuilder::new(); + let release_time = Utc::now(); - builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); - builder.add_version("minecraft", "1.20.3".to_string(), "def456".to_string(), 2048); - builder.add_version("minecraft", "1.20.2".to_string(), "ghi789".to_string(), 512); + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024, release_time); + builder.add_version("minecraft", "1.20.3".to_string(), "def456".to_string(), 2048, release_time); + builder.add_version("minecraft", "1.20.2".to_string(), "ghi789".to_string(), 512, release_time); assert_eq!(builder.loader_count(), 1); assert_eq!(builder.version_count("minecraft"), 3); @@ -471,25 +569,29 @@ mod tests { #[test] fn test_manifest_builder_build_manifest() { let builder = ManifestBuilder::new(); + let release_time = Utc::now(); - builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); - builder.add_version("minecraft", "1.20.3".to_string(), "def456".to_string(), 2048); + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024, release_time); + builder.add_version("minecraft", "1.20.3".to_string(), "def456".to_string(), 2048, release_time); let manifest = builder.build_loader_manifest("minecraft").unwrap(); assert_eq!(manifest.loader, "minecraft"); - assert_eq!(manifest.versions.len(), 2); + + // Deserialize versions to check them + let versions: Vec = serde_json::from_value(manifest.versions).unwrap(); + assert_eq!(versions.len(), 2); // Check versions are sorted by ID - assert_eq!(manifest.versions[0].id, "1.20.3"); - assert_eq!(manifest.versions[1].id, "1.20.4"); + assert_eq!(versions[0].id, "1.20.3"); + assert_eq!(versions[1].id, "1.20.4"); } #[test] fn test_manifest_builder_nonexistent_loader() { let builder = ManifestBuilder::new(); - builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024, Utc::now()); assert!(builder.build_loader_manifest("forge").is_none()); assert_eq!(builder.version_count("forge"), 0); @@ -498,16 +600,20 @@ mod tests { #[test] fn test_manifest_builder_overwrite_version() { let builder = ManifestBuilder::new(); + let release_time = Utc::now(); // Add same version twice with different hashes - builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024); - builder.add_version("minecraft", "1.20.4".to_string(), "def456".to_string(), 2048); + builder.add_version("minecraft", "1.20.4".to_string(), "abc123".to_string(), 1024, release_time); + builder.add_version("minecraft", "1.20.4".to_string(), "def456".to_string(), 2048, release_time); assert_eq!(builder.version_count("minecraft"), 1); // Still 1, overwritten let manifest = builder.build_loader_manifest("minecraft").unwrap(); - assert_eq!(manifest.versions.len(), 1); - assert_eq!(manifest.versions[0].hash, "def456"); // Latest hash - assert_eq!(manifest.versions[0].size, 2048); // Latest size + + // Deserialize versions to check them + let versions: Vec = serde_json::from_value(manifest.versions).unwrap(); + assert_eq!(versions.len(), 1); + assert_eq!(versions[0].hash, "def456"); // Latest hash + assert_eq!(versions[0].size, 2048); // Latest size } } From a57fe6b7f9e021ef8edaaa464fcbbfdc7e05dea7 Mon Sep 17 00:00:00 2001 From: Davide Date: Fri, 17 Oct 2025 22:20:51 +0200 Subject: [PATCH 6/7] Refactors loader processing and adds common utils Removes `bincode` dependency. Introduces common utilities for loader processing to reduce code duplication. This includes CAS URL handling, version change detection, and manifest merging. Refactors Forge and NeoForge to utilize the shared utilities. Adds types for Forge and renames some Forge types. Removes `extract_hash_from_cas_url` and `should_ignore_artifact` from Forge module. Improves code organization and maintainability. --- daedalus/Cargo.toml | 1 - daedalus/src/lib.rs | 41 +- daedalus/src/minecraft.rs | 64 +- daedalus/src/minecraft.rs.backup | 805 +++++++++++++ daedalus/src/modded.rs | 10 - daedalus/src/version.rs | 18 +- daedalus_client/Cargo.toml | 2 +- daedalus_client/src/common/cas.rs | 99 ++ .../src/common/change_detection.rs | 141 +++ daedalus_client/src/common/manifest_merge.rs | 218 ++++ daedalus_client/src/common/mod.rs | 15 + daedalus_client/src/forge/archive.rs | 121 ++ daedalus_client/src/forge/libraries.rs | 19 + .../src/{forge.rs => forge/mod.rs} | 388 +----- daedalus_client/src/forge/types.rs | 137 +++ daedalus_client/src/forge/version.rs | 134 +++ .../src/infrastructure/circuit_breaker.rs | 13 + daedalus_client/src/loaders/mod.rs | 150 ++- daedalus_client/src/main.rs | 35 +- daedalus_client/src/minecraft.rs | 1058 ----------------- daedalus_client/src/minecraft/helpers.rs | 72 ++ .../src/minecraft/library_patches.rs | 107 ++ daedalus_client/src/minecraft/log4j.rs | 261 ++++ daedalus_client/src/minecraft/lwjgl.rs | 210 ++++ daedalus_client/src/minecraft/mod.rs | 700 +++++++++++ daedalus_client/src/minecraft/types.rs | 37 + .../src/{neoforge.rs => neoforge/mod.rs} | 163 +-- daedalus_client/src/neoforge/types.rs | 5 + daedalus_client/src/services/cas.rs | 59 +- 29 files changed, 3369 insertions(+), 1714 deletions(-) create mode 100644 daedalus/src/minecraft.rs.backup create mode 100644 daedalus_client/src/common/cas.rs create mode 100644 daedalus_client/src/common/change_detection.rs create mode 100644 daedalus_client/src/common/manifest_merge.rs create mode 100644 daedalus_client/src/common/mod.rs create mode 100644 daedalus_client/src/forge/archive.rs create mode 100644 daedalus_client/src/forge/libraries.rs rename daedalus_client/src/{forge.rs => forge/mod.rs} (76%) create mode 100644 daedalus_client/src/forge/types.rs create mode 100644 daedalus_client/src/forge/version.rs delete mode 100644 daedalus_client/src/minecraft.rs create mode 100644 daedalus_client/src/minecraft/helpers.rs create mode 100644 daedalus_client/src/minecraft/library_patches.rs create mode 100644 daedalus_client/src/minecraft/log4j.rs create mode 100644 daedalus_client/src/minecraft/lwjgl.rs create mode 100644 daedalus_client/src/minecraft/mod.rs create mode 100644 daedalus_client/src/minecraft/types.rs rename daedalus_client/src/{neoforge.rs => neoforge/mod.rs} (81%) create mode 100644 daedalus_client/src/neoforge/types.rs diff --git a/daedalus/Cargo.toml b/daedalus/Cargo.toml index 22aeb63..a8ceb17 100644 --- a/daedalus/Cargo.toml +++ b/daedalus/Cargo.toml @@ -26,7 +26,6 @@ bytes = "1" thiserror = "1" tokio = { version = "1", features = ["full"] } sha1 = { version = "0.6.1", features = ["std"] } -bincode = { version = "2.0.0-rc.3", features = ["serde"], optional = true } once_cell = "1" url = "2" lenient_semver = "0" diff --git a/daedalus/src/lib.rs b/daedalus/src/lib.rs index b26a6e4..7a9ec53 100644 --- a/daedalus/src/lib.rs +++ b/daedalus/src/lib.rs @@ -20,6 +20,16 @@ pub mod modded; /// Custom version comparison for Minecraft versions pub mod version; +/// HTTP client configuration constants +/// TCP keepalive interval for persistent connections +const TCP_KEEPALIVE_SECS: u64 = 10; +/// Overall request timeout including reading response +const REQUEST_TIMEOUT_SECS: u64 = 120; +/// Connection establishment timeout +const CONNECT_TIMEOUT_SECS: u64 = 30; +/// Maximum idle connections per host in the pool +const MAX_IDLE_CONNECTIONS_PER_HOST: usize = 10; + /// Your branding, used for the user agent and similar #[derive(Debug)] pub struct Branding { @@ -33,6 +43,11 @@ pub struct Branding { pub static BRANDING: OnceCell = OnceCell::new(); /// Global HTTP client with connection pooling and TCP keepalive +/// +/// # Panics +/// Panics if the HTTP client fails to initialize. This is intentional as +/// the application cannot function without a working HTTP client (e.g., if +/// TLS initialization fails, which is extremely rare on modern systems). static HTTP_CLIENT: LazyLock = LazyLock::new(|| { let mut headers = reqwest::header::HeaderMap::new(); if let Ok(header) = reqwest::header::HeaderValue::from_str( @@ -42,11 +57,11 @@ static HTTP_CLIENT: LazyLock = LazyLock::new(|| { } reqwest::Client::builder() - .tcp_keepalive(Some(Duration::from_secs(10))) - .timeout(Duration::from_secs(120)) - .connect_timeout(Duration::from_secs(30)) + .tcp_keepalive(Some(Duration::from_secs(TCP_KEEPALIVE_SECS))) + .timeout(Duration::from_secs(REQUEST_TIMEOUT_SECS)) + .connect_timeout(Duration::from_secs(CONNECT_TIMEOUT_SECS)) .default_headers(headers) - .pool_max_idle_per_host(10) + .pool_max_idle_per_host(MAX_IDLE_CONNECTIONS_PER_HOST) .build() .expect("Failed to create HTTP client") }); @@ -121,7 +136,6 @@ pub enum Error { MirrorsFailed(String), } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Clone, Default)] /// A specifier string for Gradle pub struct GradleSpecifier { @@ -205,7 +219,7 @@ impl GradleSpecifier { "{}:{}:{}", self.package, self.artifact, - self.identifier.clone().unwrap_or("".to_string()) + self.identifier.as_deref().unwrap_or("") ) } @@ -214,17 +228,12 @@ impl GradleSpecifier { /// Returns Ordering::Greater if self is greater than other /// Returns Ordering::Less if self is less than other pub fn compare_versions(&self, other: &Self) -> Result { - let x = lenient_semver::parse(self.version.as_str()); - let y = lenient_semver::parse(other.version.as_str()); - - if x.is_err() || y.is_err() { - return Err(Error::ParseError( - "Unable to parse version".to_string(), - )); - } + let x = lenient_semver::parse(self.version.as_str()) + .map_err(|_| Error::ParseError("Unable to parse version".to_string()))?; + let y = lenient_semver::parse(other.version.as_str()) + .map_err(|_| Error::ParseError("Unable to parse version".to_string()))?; - // safe to unwrap because we already checked for errors - Ok(x.unwrap().cmp(&y.unwrap())) + Ok(x.cmp(&y)) } } diff --git a/daedalus/src/minecraft.rs b/daedalus/src/minecraft.rs index 597eda2..9c6e96c 100644 --- a/daedalus/src/minecraft.rs +++ b/daedalus/src/minecraft.rs @@ -5,13 +5,9 @@ use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, HashMap}; use std::convert::TryFrom; -#[cfg(feature = "bincode")] -use bincode::{Decode, Encode}; - /// The latest version of the format the model structs deserialize to pub const CURRENT_FORMAT_VERSION: usize = 2; -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] /// The version type @@ -38,7 +34,6 @@ impl VersionType { } } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// A game version of Minecraft @@ -51,10 +46,8 @@ pub struct Version { /// A link to additional information about the version pub url: String, /// The latest time a file in this version was updated - #[cfg_attr(feature = "bincode", bincode(with_serde))] pub time: DateTime, /// The time this version was released - #[cfg_attr(feature = "bincode", bincode(with_serde))] pub release_time: DateTime, /// The SHA1 hash of the additional information about the version pub sha1: String, @@ -73,7 +66,6 @@ pub struct Version { pub java_profile: Option, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] /// Java profile required to run this mc version @@ -136,7 +128,6 @@ impl TryFrom<&str> for MinecraftJavaProfile { } } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// The latest snapshot and release of the game pub struct LatestVersion { @@ -146,7 +137,6 @@ pub struct LatestVersion { pub snapshot: String, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// Data of all game versions of Minecraft pub struct VersionManifest { @@ -169,7 +159,6 @@ pub async fn fetch_version_manifest( )?) } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Information about the assets of the game @@ -186,7 +175,6 @@ pub struct AssetIndex { pub url: String, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone)] #[serde(rename_all = "snake_case")] /// The type of download @@ -203,7 +191,6 @@ pub enum DownloadType { WindowsServer, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// Download information of a file pub struct Download { @@ -215,7 +202,6 @@ pub struct Download { pub url: String, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// Download information of a library pub struct LibraryDownload { @@ -229,7 +215,6 @@ pub struct LibraryDownload { pub url: Option, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// A list of files that should be downloaded for libraries pub struct LibraryDownloads { @@ -242,7 +227,6 @@ pub struct LibraryDownloads { pub classifiers: Option>, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[serde(rename_all = "snake_case")] /// The action a rule can follow @@ -253,7 +237,6 @@ pub enum RuleAction { Disallow, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive( Serialize, Deserialize, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Clone, )] @@ -278,7 +261,6 @@ pub enum Os { Unknown, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] /// A rule which depends on what OS the user is on pub struct OsRule { @@ -293,7 +275,6 @@ pub struct OsRule { pub arch: Option, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] /// A rule which depends on the toggled features of the launcher pub struct FeatureRule { @@ -316,7 +297,6 @@ pub struct FeatureRule { pub is_quick_play_realms: Option, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] /// A rule deciding whether a file is downloaded, an argument is used, etc. pub struct Rule { @@ -330,7 +310,6 @@ pub struct Rule { pub features: Option, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// Information delegating the extraction of the library pub struct LibraryExtract { @@ -339,7 +318,6 @@ pub struct LibraryExtract { pub exclude: Option>, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Information about the java version the game needs @@ -350,7 +328,6 @@ pub struct JavaVersion { pub major_version: u32, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// A library which the game relies on to run pub struct Library { @@ -433,6 +410,10 @@ impl Library { // First try version_hashes if present if let Some(ref hashes) = self.version_hashes { if let Some(hash) = hashes.get(minecraft_version) { + // Validate hash is at least 2 characters to avoid panic on slicing + if hash.len() < 2 { + return None; + } return Some(format!( "{}/v{}/objects/{}/{}", base_url, @@ -469,7 +450,6 @@ pub struct PartialLibrary { pub include_in_classpath: Option, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "snake_case")] /// A dependency rule, either suggests or equals @@ -480,7 +460,6 @@ pub enum DependencyRule { Suggests(String), } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// A library dependency pub struct Dependency { @@ -494,7 +473,21 @@ pub struct Dependency { pub rule: Option, } -/// Merges a partial library to make a complete library +/// Merges a partial library definition into a complete library +/// +/// This function takes a partial library (which may override specific fields) +/// and merges it with an existing complete library. Fields present in the partial +/// library will override the corresponding fields in the complete library. +/// +/// # Arguments +/// +/// * `partial` - Partial library with fields to override +/// * `merge` - Complete library to merge into +/// +/// # Returns +/// +/// A complete library with merged fields. The `patched` flag is set to true +/// to indicate this library has been modified by a partial library. pub fn merge_partial_library( partial: PartialLibrary, mut merge: Library, @@ -557,11 +550,15 @@ pub fn merge_partial_library( merge } +/// Default value for include_in_classpath field +/// +/// Returns `true` because libraries should be included in the classpath by default. +/// Only specialized libraries (like native libraries that are extracted but not loaded) +/// should set this to false explicitly. fn default_include_in_classpath() -> bool { true } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(untagged)] /// A container for an argument or multiple arguments @@ -572,7 +569,6 @@ pub enum ArgumentValue { Many(Vec), } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(untagged)] /// A command line argument passed to a program @@ -588,7 +584,6 @@ pub enum Argument { }, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone, Copy)] #[serde(rename_all = "snake_case")] /// The type of argument @@ -599,7 +594,6 @@ pub enum ArgumentType { Jvm, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone, Copy)] #[serde(rename_all = "kebab-case")] /// Java Logging type @@ -608,7 +602,6 @@ pub enum LoggingType { Log4j2Xml, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone, Copy)] #[serde(rename_all = "kebab-case")] /// Java Logging config names @@ -617,7 +610,6 @@ pub enum LoggingConfigName { Client, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Java Logging artifact for download @@ -632,7 +624,6 @@ pub struct LoggingArtifact { pub url: String, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Java Logging configuration @@ -646,7 +637,6 @@ pub struct LoggingConfig { pub type_: LoggingType, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Information about a version @@ -681,10 +671,8 @@ pub struct VersionInfo { /// The minimum version of the Minecraft Launcher that can run this version of the game pub minimum_launcher_version: u32, /// The time that the version was released - #[cfg_attr(feature = "bincode", bincode(with_serde))] pub release_time: DateTime, /// The latest time a file in this version was updated - #[cfg_attr(feature = "bincode", bincode(with_serde))] pub time: DateTime, #[serde(rename = "type")] /// The type of version @@ -700,7 +688,6 @@ pub struct VersionInfo { pub processors: Option>, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Information about grouping of libraries @@ -711,7 +698,6 @@ pub struct LibraryGroup { pub version: String, /// The uid aka maven package group id of this group pub uid: String, - #[cfg_attr(feature = "bincode", bincode(with_serde))] /// The time that the version was released pub release_time: DateTime, /// The type of version @@ -766,7 +752,6 @@ pub async fn fetch_version_info( )?) } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// An asset of the game pub struct Asset { @@ -776,7 +761,6 @@ pub struct Asset { pub size: u32, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// An index containing all assets the game needs pub struct AssetsIndex { diff --git a/daedalus/src/minecraft.rs.backup b/daedalus/src/minecraft.rs.backup new file mode 100644 index 0000000..597eda2 --- /dev/null +++ b/daedalus/src/minecraft.rs.backup @@ -0,0 +1,805 @@ +use crate::modded::{Processor, SidedDataEntry}; +use crate::{download_file, Error, GradleSpecifier}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::collections::{BTreeMap, HashMap}; +use std::convert::TryFrom; + +#[cfg(feature = "bincode")] +use bincode::{Decode, Encode}; + +/// The latest version of the format the model structs deserialize to +pub const CURRENT_FORMAT_VERSION: usize = 2; + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +/// The version type +pub enum VersionType { + /// A major version, which is stable for all players to use + Release, + /// An experimental version, which is unstable and used for feature previews and beta testing + Snapshot, + /// The oldest versions before the game was released + OldAlpha, + /// Early versions of the game + OldBeta, +} + +impl VersionType { + /// Converts the version type to a string + pub fn as_str(&self) -> &'static str { + match self { + VersionType::Release => "release", + VersionType::Snapshot => "snapshot", + VersionType::OldAlpha => "old_alpha", + VersionType::OldBeta => "old_beta", + } + } +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// A game version of Minecraft +pub struct Version { + /// A unique identifier of the version + pub id: String, + #[serde(rename = "type")] + /// The release type of the version + pub type_: VersionType, + /// A link to additional information about the version + pub url: String, + /// The latest time a file in this version was updated + #[cfg_attr(feature = "bincode", bincode(with_serde))] + pub time: DateTime, + /// The time this version was released + #[cfg_attr(feature = "bincode", bincode(with_serde))] + pub release_time: DateTime, + /// The SHA1 hash of the additional information about the version + pub sha1: String, + /// Whether the version supports the latest player safety features + pub compliance_level: u32, + #[serde(skip_serializing_if = "Option::is_none")] + /// (GDLauncher Provided) The link to the assets index for this version + /// This is only available when using the GDLauncher mirror + pub assets_index_url: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// (GDLauncher Provided) The SHA1 hash of the assets index for this version + /// This is only available when using the GDLauncher mirror + pub assets_index_sha1: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// (GDLauncher Provided) The java profile required to run this mc version + pub java_profile: Option, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "kebab-case")] +/// Java profile required to run this mc version +pub enum MinecraftJavaProfile { + /// Java 8 + JreLegacy, + /// Java 16 + JavaRuntimeAlpha, + /// Java 17 + JavaRuntimeBeta, + /// Java 17 + JavaRuntimeGamma, + /// Java 17 + JavaRuntimeGammaSnapshot, + /// Java 14 + MinecraftJavaExe, + /// Java 21 + JavaRuntimeDelta, + #[serde(untagged)] + /// Unknown + Unknown(String), +} + +impl MinecraftJavaProfile { + /// Converts the version type to a string + pub fn as_str(&self) -> Result<&'static str, Error> { + match self { + MinecraftJavaProfile::JreLegacy => Ok("jre-legacy"), + MinecraftJavaProfile::JavaRuntimeAlpha => Ok("java-runtime-alpha"), + MinecraftJavaProfile::JavaRuntimeBeta => Ok("java-runtime-beta"), + MinecraftJavaProfile::JavaRuntimeGamma => Ok("java-runtime-gamma"), + MinecraftJavaProfile::JavaRuntimeGammaSnapshot => { + Ok("java-runtime-gamma-snapshot") + } + MinecraftJavaProfile::JavaRuntimeDelta => Ok("java-runtime-delta"), + MinecraftJavaProfile::MinecraftJavaExe => Ok("minecraft-java-exe"), + MinecraftJavaProfile::Unknown(value) => { + Err(Error::InvalidMinecraftJavaProfile(value.to_string())) + } + } + } +} + +impl TryFrom<&str> for MinecraftJavaProfile { + type Error = Error; + + fn try_from(value: &str) -> Result { + match value { + "jre-legacy" => Ok(MinecraftJavaProfile::JreLegacy), + "java-runtime-alpha" => Ok(MinecraftJavaProfile::JavaRuntimeAlpha), + "java-runtime-beta" => Ok(MinecraftJavaProfile::JavaRuntimeBeta), + "java-runtime-gamma" => Ok(MinecraftJavaProfile::JavaRuntimeGamma), + "java-runtime-gamma-snapshot" => { + Ok(MinecraftJavaProfile::JavaRuntimeGammaSnapshot) + } + "java-runtime-delta" => Ok(MinecraftJavaProfile::JavaRuntimeDelta), + "minecraft-java-exe" => Ok(MinecraftJavaProfile::MinecraftJavaExe), + _ => Err(Error::InvalidMinecraftJavaProfile(value.to_string())), + } + } +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// The latest snapshot and release of the game +pub struct LatestVersion { + /// The version id of the latest release + pub release: String, + /// The version id of the latest snapshot + pub snapshot: String, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// Data of all game versions of Minecraft +pub struct VersionManifest { + /// A struct containing the latest snapshot and release of the game + pub latest: LatestVersion, + /// A list of game versions of Minecraft + pub versions: Vec, +} + +/// The URL to the version manifest +pub const VERSION_MANIFEST_URL: &str = + "https://piston-meta.mojang.com/mc/game/version_manifest_v2.json"; + +/// Fetches a version manifest from the specified URL. If no URL is specified, the default is used. +pub async fn fetch_version_manifest( + url: Option<&str>, +) -> Result { + Ok(serde_json::from_slice( + &download_file(url.unwrap_or(VERSION_MANIFEST_URL), None).await?, + )?) +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// Information about the assets of the game +pub struct AssetIndex { + /// The game version ID the assets are for + pub id: String, + /// The SHA1 hash of the assets index + pub sha1: String, + /// The size of the assets index + pub size: u32, + /// The size of the game version's assets + pub total_size: u32, + /// A URL to a file which contains information about the version's assets + pub url: String, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone)] +#[serde(rename_all = "snake_case")] +/// The type of download +pub enum DownloadType { + /// The download is for the game client + Client, + /// The download is mappings for the game + ClientMappings, + /// The download is for the game server + Server, + /// The download is mappings for the game server + ServerMappings, + /// The download is for the windows server + WindowsServer, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// Download information of a file +pub struct Download { + /// The SHA1 hash of the file + pub sha1: String, + /// The size of the file + pub size: u32, + /// The URL where the file can be downloaded + pub url: String, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// Download information of a library +pub struct LibraryDownload { + /// The path that the library should be saved to + pub path: String, + /// The SHA1 hash of the library + pub sha1: String, + /// The size of the library + pub size: u32, + /// The URL where the library can be downloaded + pub url: Option, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// A list of files that should be downloaded for libraries +pub struct LibraryDownloads { + #[serde(skip_serializing_if = "Option::is_none")] + /// The primary library artifact + pub artifact: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Conditional files that may be needed to be downloaded alongside the library + /// The HashMap key specifies a classifier as additional information for downloading files + pub classifiers: Option>, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] +#[serde(rename_all = "snake_case")] +/// The action a rule can follow +pub enum RuleAction { + /// The rule's status allows something to be done + Allow, + /// The rule's status disallows something to be done + Disallow, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive( + Serialize, Deserialize, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Clone, +)] +#[serde(rename_all = "kebab-case")] +/// An enum representing the different types of operating systems +pub enum Os { + /// MacOS (x86) + Osx, + /// M1-Based Macs + OsxArm64, + /// Windows (x86) + Windows, + /// Windows ARM + WindowsArm64, + /// Linux (x86) and its derivatives + Linux, + /// Linux ARM 64 + LinuxArm64, + /// Linux ARM 32 + LinuxArm32, + /// The OS is unknown + Unknown, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] +/// A rule which depends on what OS the user is on +pub struct OsRule { + #[serde(skip_serializing_if = "Option::is_none")] + /// The name of the OS + pub name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// The version of the OS. This is normally a RegEx + pub version: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// The architecture of the OS + pub arch: Option, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] +/// A rule which depends on the toggled features of the launcher +pub struct FeatureRule { + #[serde(skip_serializing_if = "Option::is_none")] + /// Whether the user is in demo mode + pub is_demo_user: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Whether the user is using a custom resolution + pub has_custom_resolution: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Whether the launcher has quick plays support + pub has_quick_plays_support: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Whether the instance is being launched to a single-player world + pub is_quick_play_singleplayer: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Whether the instance is being launched to a multi-player world + pub is_quick_play_multiplayer: Option, + /// Whether the instance is being launched to a realms world + pub is_quick_play_realms: Option, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] +/// A rule deciding whether a file is downloaded, an argument is used, etc. +pub struct Rule { + /// The action the rule takes + pub action: RuleAction, + #[serde(skip_serializing_if = "Option::is_none")] + /// The OS rule + pub os: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// The feature rule + pub features: Option, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// Information delegating the extraction of the library +pub struct LibraryExtract { + #[serde(skip_serializing_if = "Option::is_none")] + /// Files/Folders to be excluded from the extraction of the library + pub exclude: Option>, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// Information about the java version the game needs +pub struct JavaVersion { + /// The component needed for the Java installation + pub component: String, + /// The major Java version number + pub major_version: u32, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// A library which the game relies on to run +pub struct Library { + #[serde(skip_serializing_if = "Option::is_none")] + /// The files the library has + pub downloads: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Rules of the extraction of the file + pub extract: Option, + /// The maven name of the library. The format is `groupId:artifactId:version` + pub name: GradleSpecifier, + #[serde(skip_serializing_if = "Option::is_none")] + /// The URL to the repository where the library can be downloaded + pub url: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Native files that the library relies on + pub natives: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + /// Rules deciding whether the library should be downloaded or not + pub rules: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + /// SHA1 Checksums for validating the library's integrity. Only present for forge libraries + pub checksums: Option>, + #[serde(default = "default_include_in_classpath")] + /// Whether the library should be included in the classpath at the game's launch + pub include_in_classpath: bool, + #[serde(skip)] + /// if this library was patched or added by a patch + pub patched: bool, + #[serde(skip_serializing_if = "Option::is_none")] + /// Game-version-specific hash mapping for libraries that vary by Minecraft version + /// Maps minecraft_version → SHA256 hash of the artifact + /// e.g., {"1.16.5": "abc123...", "1.17.1": "def456..."} + /// When present, clients should look up their game version and construct CAS URL from hash + pub version_hashes: Option>, +} + +impl Library { + /// Resolves the URL for this library based on the minecraft version. + /// + /// For libraries with `version_hashes`, looks up the hash for the given version + /// and constructs a CAS URL. Falls back to the library's `url` field if + /// `version_hashes` is not present or doesn't contain the version. + /// + /// # Arguments + /// * `minecraft_version` - The Minecraft version to resolve the URL for + /// * `base_url` - The base URL for the CAS (e.g., "https://maven.modrinth.com") + /// * `cas_version` - The CAS version number (e.g., 0) + /// + /// # Returns + /// * `Some(String)` - The resolved URL, either from CAS or the url field + /// * `None` - If neither version_hashes nor url contain a valid URL + /// + /// # Example + /// ``` + /// # use daedalus::minecraft::Library; + /// # use daedalus::GradleSpecifier; + /// # use std::collections::HashMap; + /// let mut library = Library { + /// name: "net.fabricmc:intermediary:1.16.5".parse().unwrap(), + /// url: None, + /// downloads: None, + /// extract: None, + /// natives: None, + /// rules: None, + /// checksums: None, + /// include_in_classpath: true, + /// patched: false, + /// version_hashes: Some({ + /// let mut map = HashMap::new(); + /// map.insert("1.16.5".to_string(), "abc123def456".to_string()); + /// map + /// }), + /// }; + /// + /// let url = library.resolve_url("1.16.5", "https://maven.modrinth.com", 0); + /// assert_eq!(url, Some("https://maven.modrinth.com/v0/objects/ab/c123def456".to_string())); + /// ``` + pub fn resolve_url(&self, minecraft_version: &str, base_url: &str, cas_version: u32) -> Option { + // First try version_hashes if present + if let Some(ref hashes) = self.version_hashes { + if let Some(hash) = hashes.get(minecraft_version) { + return Some(format!( + "{}/v{}/objects/{}/{}", + base_url, + cas_version, + &hash[..2], + &hash[2..] + )); + } + } + + // Fall back to url field + self.url.clone() + } +} + +#[derive(Deserialize, Debug, Clone)] +/// A partial library which should be merged with a full library +pub struct PartialLibrary { + /// The files the library has + pub downloads: Option, + /// Rules of the extraction of the file + pub extract: Option, + /// The maven name of the library. The format is `groupId:artifactId:version` + pub name: Option, + /// The URL to the repository where the library can be downloaded + pub url: Option, + /// Native files that the library relies on + pub natives: Option>, + /// Rules deciding whether the library should be downloaded or not + pub rules: Option>, + /// SHA1 Checksums for validating the library's integrity. Only present for forge libraries + pub checksums: Option>, + /// Whether the library should be included in the classpath at the game's launch + pub include_in_classpath: Option, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +/// A dependency rule, either suggests or equals +pub enum DependencyRule { + /// A rule to specify the version exactly + Equals(String), + /// A rule to suggest a soft requirement + Suggests(String), +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// A library dependency +pub struct Dependency { + /// A group name that identifies a library group this dependency refers to, ie. `"lwjgl"` + pub name: String, + /// a component uid like `"org.lwjgl"` + pub uid: String, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(flatten)] + /// a rule to specify the version exactly + pub rule: Option, +} + +/// Merges a partial library to make a complete library +pub fn merge_partial_library( + partial: PartialLibrary, + mut merge: Library, +) -> Library { + if let Some(downloads) = partial.downloads { + if let Some(merge_downloads) = &mut merge.downloads { + if let Some(artifact) = downloads.artifact { + merge_downloads.artifact = Some(artifact); + } + if let Some(classifiers) = downloads.classifiers { + if let Some(merge_classifiers) = + &mut merge_downloads.classifiers + { + for classifier in classifiers { + merge_classifiers.insert(classifier.0, classifier.1); + } + } else { + merge_downloads.classifiers = Some(classifiers); + } + } + } else { + merge.downloads = Some(downloads) + } + } + if let Some(extract) = partial.extract { + merge.extract = Some(extract) + } + if let Some(name) = partial.name { + merge.name = name + } + if let Some(url) = partial.url { + merge.url = Some(url) + } + if let Some(natives) = partial.natives { + if let Some(merge_natives) = &mut merge.natives { + for native in natives { + merge_natives.insert(native.0, native.1); + } + } else { + merge.natives = Some(natives); + } + } + if let Some(rules) = partial.rules { + if let Some(merge_rules) = &mut merge.rules { + for rule in rules { + merge_rules.push(rule); + } + } else { + merge.rules = Some(rules) + } + } + if let Some(checksums) = partial.checksums { + merge.checksums = Some(checksums) + } + if let Some(include_in_classpath) = partial.include_in_classpath { + merge.include_in_classpath = include_in_classpath + } + merge.patched = true; + + merge +} + +fn default_include_in_classpath() -> bool { + true +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(untagged)] +/// A container for an argument or multiple arguments +pub enum ArgumentValue { + /// The container has one argument + Single(String), + /// The container has multiple arguments + Many(Vec), +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(untagged)] +/// A command line argument passed to a program +pub enum Argument { + /// An argument which is applied no matter what + Normal(String), + /// An argument which is only applied if certain conditions are met + Ruled { + /// The rules deciding whether the argument(s) is used or not + rules: Vec, + /// The container of the argument(s) that should be applied accordingly + value: ArgumentValue, + }, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone, Copy)] +#[serde(rename_all = "snake_case")] +/// The type of argument +pub enum ArgumentType { + /// The argument is passed to the game + Game, + /// The argument is passed to the JVM + Jvm, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone, Copy)] +#[serde(rename_all = "kebab-case")] +/// Java Logging type +pub enum LoggingType { + /// Log4j XML config file + Log4j2Xml, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone, Copy)] +#[serde(rename_all = "kebab-case")] +/// Java Logging config names +pub enum LoggingConfigName { + /// Client logging config + Client, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// Java Logging artifact for download +pub struct LoggingArtifact { + /// The Name of the artifact + pub id: String, + /// The Sha1 hash of the file + pub sha1: String, + /// The Size of the file + pub size: u32, + /// The url where this file cna be reached + pub url: String, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// Java Logging configuration +pub struct LoggingConfig { + /// Logging config file + pub file: LoggingArtifact, + /// JVM config arg + pub argument: String, + #[serde(rename = "type")] + /// Logging type + pub type_: LoggingType, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// Information about a version +pub struct VersionInfo { + #[serde(skip_serializing_if = "Option::is_none")] + /// Arguments passed to the game or JVM + pub arguments: Option>>, + /// Assets for the game + pub asset_index: AssetIndex, + /// The version ID of the assets + pub assets: String, + /// Game downloads of the version + pub downloads: HashMap, + /// The version ID of the version + pub id: String, + + /// When merged with a partial version, this is the vanilla id, otherwise it's the same as `id` + pub inherits_from: Option, + + /// The Java version this version supports + pub java_version: Option, + /// Libraries that the version depends on + pub libraries: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + /// dependencies not included in libraries + pub requires: Option>, + /// The classpath to the main class to launch the game + pub main_class: String, + #[serde(skip_serializing_if = "Option::is_none")] + /// (Legacy) Arguments passed to the game + pub minecraft_arguments: Option, + /// The minimum version of the Minecraft Launcher that can run this version of the game + pub minimum_launcher_version: u32, + /// The time that the version was released + #[cfg_attr(feature = "bincode", bincode(with_serde))] + pub release_time: DateTime, + /// The latest time a file in this version was updated + #[cfg_attr(feature = "bincode", bincode(with_serde))] + pub time: DateTime, + #[serde(rename = "type")] + /// The type of version + pub type_: VersionType, + #[serde(skip_serializing_if = "Option::is_none")] + /// Logging configuration + pub logging: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + /// (Forge-only) + pub data: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + /// (Forge-only) The list of processors to run after downloading the files + pub processors: Option>, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// Information about grouping of libraries +pub struct LibraryGroup { + /// The version ID of the version + pub id: String, + /// The version string for this group + pub version: String, + /// The uid aka maven package group id of this group + pub uid: String, + #[cfg_attr(feature = "bincode", bincode(with_serde))] + /// The time that the version was released + pub release_time: DateTime, + /// The type of version + pub type_: VersionType, + /// The library listing for this group + pub libraries: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + /// libraries required by this group + pub requires: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + /// libraries that conflict with this group + pub conflicts: Option>, + #[serde(skip_serializing)] + /// group has libs with split natives + pub has_split_natives: Option, +} + +#[derive(Debug, Clone)] +/// A paring of a library group with a sha1 of it's json representation +pub struct LWJGLEntry { + /// The sha1 of the groups json representation + pub sha1: String, + /// LibraryGroup for the entry + pub group: LibraryGroup, +} + +impl LWJGLEntry { + /// Construct a entry from a LibraryGroup + pub fn from_group(group: LibraryGroup) -> Self { + use sha1::Sha1; + + // compute a human readable hash of the group's contents less the release time + let mut group_copy = group.clone(); + group_copy.release_time = DateTime::default(); // reset so the hash doesn't account for it + let mut hasher = Sha1::new(); + hasher.update( + &serde_json::to_vec(&group_copy) + .expect("library group to serialize"), + ); + + let hash = hasher.hexdigest(); + LWJGLEntry { sha1: hash, group } + } +} + +/// Fetches detailed information about a version from the manifest +pub async fn fetch_version_info( + version: &Version, +) -> Result { + Ok(serde_json::from_slice( + &download_file(&version.url, Some(&version.sha1)).await?, + )?) +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// An asset of the game +pub struct Asset { + /// The SHA1 hash of the asset file + pub hash: String, + /// The size of the asset file + pub size: u32, +} + +#[cfg_attr(feature = "bincode", derive(Encode, Decode))] +#[derive(Serialize, Deserialize, Debug, Clone)] +/// An index containing all assets the game needs +pub struct AssetsIndex { + /// A hashmap containing the filename (key) and asset (value) + pub objects: HashMap, + #[serde(default)] + #[serde(rename = "virtual")] + /// If the index should be reconstructed at a virtual path + pub map_virtual: bool, + #[serde(default)] + /// If the index should be reconstructed in the instance's resource directory + pub map_to_resources: bool, +} + +/// Fetches the assets index from the version info +pub async fn fetch_assets_index( + version: &VersionInfo, +) -> Result { + Ok(serde_json::from_slice( + &download_file( + &version.asset_index.url, + Some(&version.asset_index.sha1), + ) + .await?, + )?) +} diff --git a/daedalus/src/modded.rs b/daedalus/src/modded.rs index fe76d1b..c22b785 100644 --- a/daedalus/src/modded.rs +++ b/daedalus/src/modded.rs @@ -8,11 +8,8 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Deserializer, Serialize}; use std::collections::HashMap; -#[cfg(feature = "bincode")] -use bincode::{Decode, Encode}; /// A data variable entry that depends on the side of the installation -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] pub struct SidedDataEntry { /// The value on the client @@ -35,7 +32,6 @@ where .map_err(serde::de::Error::custom) } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// A partial version returned by fabric meta @@ -45,11 +41,9 @@ pub struct PartialVersionInfo { /// The version ID this partial version inherits from pub inherits_from: String, /// The time that the version was released - #[cfg_attr(feature = "bincode", bincode(with_serde))] #[serde(deserialize_with = "deserialize_date")] pub release_time: DateTime, /// The latest time a file in this version was updated - #[cfg_attr(feature = "bincode", bincode(with_serde))] #[serde(deserialize_with = "deserialize_date")] pub time: DateTime, #[serde(skip_serializing_if = "Option::is_none")] @@ -78,7 +72,6 @@ pub struct PartialVersionInfo { } /// A processor to be ran after downloading the files -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Processor { /// Maven coordinates for the JAR library of this processor. @@ -199,7 +192,6 @@ pub fn merge_partial_version( } } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// A manifest containing information about a mod loader's versions @@ -208,7 +200,6 @@ pub struct Manifest { pub game_versions: Vec, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// A game version of Minecraft pub struct Version { @@ -220,7 +211,6 @@ pub struct Version { pub loaders: Vec, } -#[cfg_attr(feature = "bincode", derive(Encode, Decode))] #[derive(Serialize, Deserialize, Debug, Clone)] /// A version of a Minecraft mod loader pub struct LoaderVersion { diff --git a/daedalus/src/version.rs b/daedalus/src/version.rs index 2d37911..b3f686e 100644 --- a/daedalus/src/version.rs +++ b/daedalus/src/version.rs @@ -239,9 +239,21 @@ impl Ord for MinecraftVersion { } } - // Snapshot vs Release: snapshots are generally "development" versions - // We treat them as lexicographically greater for now - // (This is a heuristic and may need refinement based on usage) + // Snapshot vs Release comparison heuristic + // + // This is a simplified comparison that treats ALL snapshots as greater than + // ALL releases, which matches the general pattern that snapshots are development + // versions that come after the previous release. + // + // Example: 23w51a (snapshot for 1.20.3/1.20.4) > 1.20.2 (release) + // + // This heuristic works for most use cases in daedalus_client where we're + // organizing versions chronologically within a loader. For precise timeline + // ordering, callers should use the Minecraft version manifest ordering. + // + // Note: This does NOT mean 23w51a > 1.20.4, but rather that when comparing + // a snapshot to any release without additional context, we assume the snapshot + // is newer development work. (MinecraftVersion::Snapshot { .. }, MinecraftVersion::Release { .. }) => { Ordering::Greater } diff --git a/daedalus_client/Cargo.toml b/daedalus_client/Cargo.toml index 30fdf3b..bde2f4b 100644 --- a/daedalus_client/Cargo.toml +++ b/daedalus_client/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "daedalus_client" -version = "0.1.21" +version = "5.0.0" authors = ["Jai A "] edition = "2024" diff --git a/daedalus_client/src/common/cas.rs b/daedalus_client/src/common/cas.rs new file mode 100644 index 0000000..0accbac --- /dev/null +++ b/daedalus_client/src/common/cas.rs @@ -0,0 +1,99 @@ +//! Content-Addressable Storage (CAS) utilities +//! +//! This module provides common functions for working with the CAS system, +//! including URL building and hash extraction. + +/// Extract the content hash from a CAS URL +/// +/// CAS URLs have the format: `{base}/v{version}/objects/{hash_prefix}/{hash_suffix}` +/// This function extracts and concatenates the hash components. +/// +/// # Arguments +/// +/// * `url` - The CAS URL to extract the hash from +/// +/// # Returns +/// +/// * `Some(hash)` - The full hash if the URL format is valid +/// * `None` - If the URL doesn't match the expected format +/// +/// # Example +/// +/// ``` +/// let url = "https://example.com/v4/objects/ab/cdef123"; +/// let hash = extract_hash_from_cas_url(url); +/// assert_eq!(hash, Some("abcdef123".to_string())); +/// ``` +pub fn extract_hash_from_cas_url(url: &str) -> Option { + let parts: Vec<&str> = url.rsplitn(3, '/').collect(); + // Valid CAS URL must have exactly 3 parts when split from right: + // [hash_suffix, hash_prefix, "{base}/v{version}/objects"] + // The third part must end with "objects" to be valid + if parts.len() == 3 && parts[2].ends_with("objects") { + let hash_suffix = parts[0]; + let hash_prefix = parts[1]; + Some(format!("{}{}", hash_prefix, hash_suffix)) + } else { + None + } +} + +/// Build a CAS URL from a content hash +/// +/// Constructs a URL in the format: `{base}/v{version}/objects/{hash[..2]}/{hash[2..]}` +/// +/// # Arguments +/// +/// * `hash` - The content hash to build a URL for +/// +/// # Returns +/// +/// The complete CAS URL +/// +/// # Example +/// +/// ``` +/// let hash = "abcdef123456"; +/// let url = build_cas_url(hash); +/// // Returns: "{BASE_URL}/v{CAS_VERSION}/objects/ab/cdef123456" +/// ``` +pub fn build_cas_url(hash: &str) -> String { + let base_url = dotenvy::var("BASE_URL").expect("BASE_URL must be set"); + format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &hash[..2], + &hash[2..] + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extract_hash_from_cas_url() { + // Valid CAS URL + let url = "https://example.com/v4/objects/ab/cdef123456"; + assert_eq!( + extract_hash_from_cas_url(url), + Some("abcdef123456".to_string()) + ); + + // Different hash + let url = "https://example.com/v4/objects/12/34567890abcd"; + assert_eq!( + extract_hash_from_cas_url(url), + Some("1234567890abcd".to_string()) + ); + + // Invalid URL (not enough parts) + let url = "https://example.com/objects/ab"; + assert_eq!(extract_hash_from_cas_url(url), None); + + // Invalid URL (no slashes) + let url = "invalid-url"; + assert_eq!(extract_hash_from_cas_url(url), None); + } +} diff --git a/daedalus_client/src/common/change_detection.rs b/daedalus_client/src/common/change_detection.rs new file mode 100644 index 0000000..a5e2575 --- /dev/null +++ b/daedalus_client/src/common/change_detection.rs @@ -0,0 +1,141 @@ +//! Version change detection utilities +//! +//! This module provides common logic for detecting whether a loader version has changed +//! by comparing content hashes, and logging appropriate messages. + +use super::cas::extract_hash_from_cas_url; +use tracing::info; + +/// Result of version change detection +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChangeResult { + /// Whether the version should be uploaded (true if changed or new) + pub should_upload: bool, + /// The old hash if it existed + pub old_hash: Option, +} + +/// Detect if a loader version has changed by comparing hashes +/// +/// This function: +/// 1. Extracts the old hash from the old version URL (if it exists) +/// 2. Compares it with the new hash +/// 3. Logs an appropriate message: +/// - "✓ {loader} {version} unchanged" if hashes match +/// - "↻ {loader} {version} changed" if hashes differ +/// - "+ {loader} {version} is new" if no old version exists +/// 4. Returns whether the version should be uploaded +/// +/// # Arguments +/// +/// * `loader_name` - Name of the loader (e.g., "Forge", "NeoForge") +/// * `version_id` - Version identifier (e.g., "1.20.1-47.1.0") +/// * `old_version_url` - Optional CAS URL from the previous manifest +/// * `new_hash` - Hash of the newly generated version data +/// +/// # Returns +/// +/// `ChangeResult` indicating whether to upload and the old hash if it existed +/// +/// # Example +/// +/// ``` +/// let result = detect_version_change( +/// "Forge", +/// "1.20.1-47.1.0", +/// Some("https://example.com/v4/objects/ab/cdef123"), +/// "abcdef123" +/// ); +/// assert_eq!(result.should_upload, false); // Unchanged +/// ``` +pub fn detect_version_change( + loader_name: &str, + version_id: &str, + old_version_url: Option<&str>, + new_hash: &str, +) -> ChangeResult { + if let Some(old_url) = old_version_url { + if let Some(old_hash) = extract_hash_from_cas_url(old_url) { + if old_hash == new_hash { + info!( + "✓ {} {} unchanged (hash: {})", + loader_name, + version_id, + &new_hash[..8.min(new_hash.len())] + ); + return ChangeResult { + should_upload: false, + old_hash: Some(old_hash), + }; + } else { + info!( + "↻ {} {} changed (old: {}, new: {})", + loader_name, + version_id, + &old_hash[..8.min(old_hash.len())], + &new_hash[..8.min(new_hash.len())] + ); + return ChangeResult { + should_upload: true, + old_hash: Some(old_hash), + }; + } + } + } + + // No old version or couldn't extract hash + info!("+ {} {} is new", loader_name, version_id); + ChangeResult { + should_upload: true, + old_hash: None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_unchanged_version() { + let result = detect_version_change( + "TestLoader", + "1.0.0", + Some("https://example.com/v4/objects/ab/cdef123"), + "abcdef123", + ); + assert_eq!(result.should_upload, false); + assert_eq!(result.old_hash, Some("abcdef123".to_string())); + } + + #[test] + fn test_changed_version() { + let result = detect_version_change( + "TestLoader", + "1.0.0", + Some("https://example.com/v4/objects/ab/cdef123"), + "xyz789abc", + ); + assert_eq!(result.should_upload, true); + assert_eq!(result.old_hash, Some("abcdef123".to_string())); + } + + #[test] + fn test_new_version() { + let result = detect_version_change("TestLoader", "1.0.0", None, "abcdef123"); + assert_eq!(result.should_upload, true); + assert_eq!(result.old_hash, None); + } + + #[test] + fn test_invalid_old_url() { + // Old URL that doesn't match CAS format + let result = detect_version_change( + "TestLoader", + "1.0.0", + Some("invalid-url"), + "abcdef123", + ); + assert_eq!(result.should_upload, true); + assert_eq!(result.old_hash, None); + } +} diff --git a/daedalus_client/src/common/manifest_merge.rs b/daedalus_client/src/common/manifest_merge.rs new file mode 100644 index 0000000..63d4487 --- /dev/null +++ b/daedalus_client/src/common/manifest_merge.rs @@ -0,0 +1,218 @@ +//! Manifest version merging utilities +//! +//! This module provides common logic for merging old and new loader versions, +//! updating existing loaders, and sorting versions. + +use daedalus::minecraft::VersionManifest; +use daedalus::modded::Version; +use tracing::info; + +/// Merge old and new loader versions +/// +/// This function: +/// 1. Starts with the old versions as a base +/// 2. For each new version: +/// - If the Minecraft version exists in old versions, merge loaders +/// - If the Minecraft version is new, add it +/// 3. When merging loaders: +/// - Update existing loaders if found +/// - Add new loaders if not found +/// +/// # Arguments +/// +/// * `old_versions` - Versions from the previous manifest +/// * `new_versions` - Newly generated versions +/// * `loader_name` - Name of the loader for logging (e.g., "Forge", "NeoForge") +/// +/// # Returns +/// +/// Merged versions with old and new combined +pub fn merge_loader_versions( + mut old_versions: Vec, + new_versions: Vec, + loader_name: &str, +) -> Vec { + for new_version in new_versions { + // Find if this Minecraft version already exists + if let Some(existing) = old_versions + .iter_mut() + .find(|v| v.id == new_version.id) + { + // Merge loaders: keep old loaders + add/update new ones + for new_loader in new_version.loaders { + if let Some(existing_loader) = existing + .loaders + .iter_mut() + .find(|l| l.id == new_loader.id) + { + // Update existing loader + let loader_id = new_loader.id.clone(); + *existing_loader = new_loader; + info!( + "✅ {} - Updated loader: {}/{}", + loader_name, existing.id, loader_id + ); + } else { + // Add new loader + info!( + "✅ {} - Added new loader: {}/{}", + loader_name, existing.id, new_loader.id + ); + existing.loaders.push(new_loader); + } + } + } else { + // Add new Minecraft version + info!( + "✅ {} - Added new Minecraft version: {}", + loader_name, new_version.id + ); + old_versions.push(new_version); + } + } + + old_versions +} + +/// Sort versions by Minecraft version order +/// +/// Sorts the versions based on their position in the Minecraft version manifest. +/// Versions not found in the manifest are placed at the end. +/// +/// # Arguments +/// +/// * `versions` - Versions to sort (modified in place) +/// * `minecraft_manifest` - Minecraft version manifest for ordering reference +pub fn sort_by_minecraft_order( + versions: &mut [Version], + minecraft_manifest: &VersionManifest, +) { + versions.sort_by(|x, y| { + let x_pos = minecraft_manifest + .versions + .iter() + .position(|z| { + // Handle special case for 1.7.10_pre4 -> 1.7.10-pre4 transformation + x.id.replace("1.7.10_pre4", "1.7.10-pre4") == z.id + }) + .unwrap_or(usize::MAX); + + let y_pos = minecraft_manifest + .versions + .iter() + .position(|z| { + // Handle special case for 1.7.10_pre4 -> 1.7.10-pre4 transformation + y.id.replace("1.7.10_pre4", "1.7.10-pre4") == z.id + }) + .unwrap_or(usize::MAX); + + x_pos.cmp(&y_pos) + }); +} + +/// Sort loaders within a version by their position in metadata +/// +/// This is used to maintain a consistent order of loaders based on the original +/// maven metadata or other source ordering. +/// +/// # Arguments +/// +/// * `version` - Version containing loaders to sort (modified in place) +/// * `loader_order` - Ordered list of loader IDs from metadata +pub fn sort_loaders_by_metadata(version: &mut Version, loader_order: &[String]) { + version.loaders.sort_by(|x, y| { + let x_pos = loader_order + .iter() + .position(|z| &y.id == z) + .unwrap_or(usize::MAX); + + let y_pos = loader_order + .iter() + .position(|z| &x.id == z) + .unwrap_or(usize::MAX); + + x_pos.cmp(&y_pos) + }); +} + +#[cfg(test)] +mod tests { + use super::*; + use daedalus::modded::LoaderVersion; + + #[test] + fn test_merge_adds_new_minecraft_version() { + let old_versions = vec![]; + let new_versions = vec![Version { + id: "1.20.1".to_string(), + stable: true, + loaders: vec![LoaderVersion { + id: "forge-47.1.0".to_string(), + url: "test_url".to_string(), + stable: true, + }], + }]; + + let merged = merge_loader_versions(old_versions, new_versions, "TestLoader"); + + assert_eq!(merged.len(), 1); + assert_eq!(merged[0].id, "1.20.1"); + assert_eq!(merged[0].loaders.len(), 1); + } + + #[test] + fn test_merge_updates_existing_loader() { + let old_versions = vec![Version { + id: "1.20.1".to_string(), + stable: true, + loaders: vec![LoaderVersion { + id: "forge-47.1.0".to_string(), + url: "old_url".to_string(), + stable: true, + }], + }]; + + let new_versions = vec![Version { + id: "1.20.1".to_string(), + stable: true, + loaders: vec![LoaderVersion { + id: "forge-47.1.0".to_string(), + url: "new_url".to_string(), + stable: true, + }], + }]; + + let merged = merge_loader_versions(old_versions, new_versions, "TestLoader"); + + assert_eq!(merged.len(), 1); + assert_eq!(merged[0].loaders[0].url, "new_url"); + } + + #[test] + fn test_merge_adds_new_loader_to_existing_version() { + let old_versions = vec![Version { + id: "1.20.1".to_string(), + stable: true, + loaders: vec![LoaderVersion { + id: "forge-47.1.0".to_string(), + url: "url1".to_string(), + stable: true, + }], + }]; + + let new_versions = vec![Version { + id: "1.20.1".to_string(), + stable: true, + loaders: vec![LoaderVersion { + id: "forge-47.2.0".to_string(), + url: "url2".to_string(), + stable: true, + }], + }]; + + let merged = merge_loader_versions(old_versions, new_versions, "TestLoader"); + + assert_eq!(merged.len(), 1); + assert_eq!(merged[0].loaders.len(), 2); + } +} diff --git a/daedalus_client/src/common/mod.rs b/daedalus_client/src/common/mod.rs new file mode 100644 index 0000000..40b36b0 --- /dev/null +++ b/daedalus_client/src/common/mod.rs @@ -0,0 +1,15 @@ +//! Common utilities shared across loader implementations +//! +//! This module contains shared functionality used by multiple loader implementations +//! (Forge, NeoForge, etc.) to avoid code duplication. + +pub mod cas; +pub mod change_detection; +pub mod manifest_merge; + +// Re-export commonly used items for convenience +pub use cas::{build_cas_url, extract_hash_from_cas_url}; +pub use change_detection::{detect_version_change, ChangeResult}; +pub use manifest_merge::{ + merge_loader_versions, sort_by_minecraft_order, sort_loaders_by_metadata, +}; diff --git a/daedalus_client/src/forge/archive.rs b/daedalus_client/src/forge/archive.rs new file mode 100644 index 0000000..4c36fb9 --- /dev/null +++ b/daedalus_client/src/forge/archive.rs @@ -0,0 +1,121 @@ +//! Functions for reading data from Forge installer archives + +use super::types::{ForgeInstallerProfileV1, ForgeInstallerProfileV2}; +use daedalus::modded::PartialVersionInfo; +use std::io::Read; + +/// Read and parse install_profile.json from a V1 Forge installer archive +pub async fn read_install_profile_v1( + mut archive: zip::ZipArchive>, +) -> Result { + tokio::task::spawn_blocking(move || { + let mut install_profile = archive.by_name("install_profile.json")?; + + let mut contents = String::new(); + install_profile.read_to_string(&mut contents)?; + + Ok::( + serde_json::from_str::(&contents)?, + ) + }) + .await? +} + +/// Read and parse install_profile.json from a V2+ Forge installer archive +pub async fn read_install_profile_v2( + mut archive: zip::ZipArchive>, +) -> Result { + tokio::task::spawn_blocking(move || { + let mut install_profile = archive.by_name("install_profile.json")?; + + let mut contents = String::new(); + install_profile.read_to_string(&mut contents)?; + + Ok::( + serde_json::from_str::(&contents)?, + ) + }) + .await? +} + +/// Read and parse version.json from a V2+ Forge installer archive +pub async fn read_version_json( + mut archive: zip::ZipArchive>, +) -> Result { + tokio::task::spawn_blocking(move || { + let mut install_profile = archive.by_name("version.json")?; + + let mut contents = String::new(); + install_profile.read_to_string(&mut contents)?; + + Ok::( + serde_json::from_str::(&contents)?, + ) + }) + .await? +} + +/// Read the Forge universal JAR from the installer archive (V1 format) +pub async fn read_forge_universal( + mut archive: zip::ZipArchive>, + file_path: String, +) -> Result { + tokio::task::spawn_blocking(move || { + let mut forge_universal_file = archive.by_name(&file_path)?; + let mut forge_universal = Vec::new(); + forge_universal_file.read_to_end(&mut forge_universal)?; + + Ok::(bytes::Bytes::from( + forge_universal, + )) + }) + .await? +} + +/// Read a library from the maven/ directory in the installer archive +pub async fn read_library_from_archive( + mut archive: zip::ZipArchive>, + lib_name: daedalus::GradleSpecifier, +) -> Result, crate::infrastructure::error::Error> { + tokio::task::spawn_blocking(move || { + let entry_name = format!("maven/{}", lib_name.path()); + let lib_file = archive.by_name(&entry_name).map_err(|err| { + crate::infrastructure::error::invalid_input(format!( + "Failed to find entry {} in installer jar: {}", + entry_name, err + )) + }); + + // Thank you forge for always making it hard to parse your data + // 1.20.4+ has a local lib that doesn't exist in the installer jar + // Not sure what it does, but it doesn't seem to be needed + if lib_file.is_err() && &*lib_name.artifact == "forge" { + return Ok::<_, crate::infrastructure::error::Error>(None); + } + + let mut lib_file = lib_file?; + + let mut lib_bytes = Vec::new(); + lib_file.read_to_end(&mut lib_bytes)?; + + let result = Some(bytes::Bytes::from(lib_bytes)); + + Ok::<_, crate::infrastructure::error::Error>(result) + }) + .await? +} + +/// Read a data file from the installer archive (for V2+ installer data entries) +pub async fn read_data_file( + mut archive: zip::ZipArchive>, + path: String, +) -> Result { + tokio::task::spawn_blocking(move || { + let mut lib_file = archive.by_name(&path[1..path.len()])?; + let mut lib_bytes = Vec::new(); + lib_file.read_to_end(&mut lib_bytes)?; + + Ok::(bytes::Bytes::from(lib_bytes)) + }) + .await? +} diff --git a/daedalus_client/src/forge/libraries.rs b/daedalus_client/src/forge/libraries.rs new file mode 100644 index 0000000..02e868b --- /dev/null +++ b/daedalus_client/src/forge/libraries.rs @@ -0,0 +1,19 @@ +//! Library processing functions for Forge + +use daedalus::minecraft::Library; + +/// Check if a library should be loaded from the installer archive +/// rather than downloaded from a remote repository +pub fn is_local_lib(lib: &Library) -> bool { + lib.downloads + .as_ref() + .and_then(|x| { + x.artifact.as_ref().and_then(|x| { + x.url + .as_ref() + .map(|lib| lib.is_empty()) + }) + }) + .unwrap_or(false) + || lib.url.is_some() +} diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge/mod.rs similarity index 76% rename from daedalus_client/src/forge.rs rename to daedalus_client/src/forge/mod.rs index 91d60b5..634de56 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge/mod.rs @@ -1,20 +1,32 @@ +//! Forge loader metadata retrieval and processing + +pub mod types; +pub mod archive; +pub mod libraries; +pub mod version; + +// Re-export commonly used types +pub use types::{ + ForgeInstallerProfileV1, + ForgeInstallerProfileV2, + MinecraftVersionLibraryCache, +}; + use crate::{ download_file, download_file_mirrors, format_url, }; use crate::services::upload::BatchUploader; -use chrono::{DateTime, Utc}; use dashmap::DashSet; use daedalus::minecraft::{ - Argument, ArgumentType, Library, VersionManifest, VersionType, + Argument, ArgumentType, Library, VersionManifest, }; use daedalus::modded::{ - LoaderVersion, PartialVersionInfo, Processor, SidedDataEntry, + LoaderVersion, PartialVersionInfo, }; use daedalus::{get_hash, GradleSpecifier}; use tracing::{info, warn}; use semver::{Version, VersionReq}; -use serde::{Deserialize, Serialize}; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::convert::{TryInto, TryFrom}; use std::io::Read; use std::sync::{Arc, LazyLock}; @@ -37,114 +49,11 @@ static FORGE_MANIFEST_V3_QUERY: LazyLock = LazyLock::new(|| { VersionReq::parse(">=37.0.0").unwrap() }); -fn extract_hash_from_cas_url(url: &str) -> Option { - let parts: Vec<&str> = url.rsplitn(3, '/').collect(); - if parts.len() >= 2 { - let hash_suffix = parts[0]; - let hash_prefix = parts[1]; - Some(format!("{}{}", hash_prefix, hash_suffix)) - } else { - None - } -} - -pub async fn fetch_generated_version_info( - version_id: &str, -) -> Result { - let path = format!( - "minecraft/v{}/versions/{}.json", - daedalus::minecraft::CURRENT_FORMAT_VERSION, - version_id - ); - - Ok(serde_json::from_slice( - &daedalus::download_file(&format_url(&path), None).await?, - )?) -} - -#[derive(Clone)] -struct MinecraftVersionCacheEntry { - pub id: String, - pub libraries: HashSet, -} - -#[derive(Clone)] -pub struct MinecraftVersionLibraryCache { - versions: Vec, - max_size: usize, -} - -impl MinecraftVersionLibraryCache { - pub fn new() -> Self { - MinecraftVersionLibraryCache { - versions: Vec::new(), - max_size: 20, - } - } - - pub async fn load_minecraft_version_libs( - &mut self, - version_id: &str, - ) -> Result<&HashSet, crate::infrastructure::error::Error> { - let index = self.versions.iter().position(|ver| ver.id == version_id); - - if let Some(index) = index { - // move found entry to the front of the stack - let entry = self.versions.remove(index); - self.versions.insert(0, entry); - } else { - let generated_version = - fetch_generated_version_info(version_id).await?; - - let libraries: HashSet = generated_version - .libraries - .into_iter() - .map(|lib| lib.name) - .collect(); - self.versions.insert( - 0, - MinecraftVersionCacheEntry { - id: version_id.to_string(), - libraries, - }, - ); - // truncate to drop oldest entry () - self.versions.truncate(self.max_size); - } - - let entry = self - .versions - .first() - .expect("Valid first index as we just inserted it"); - Ok(&entry.libraries) - } -} - -pub fn should_ignore_artifact( - libs: &HashSet, - name: &GradleSpecifier, -) -> bool { - if let Some(ver) = libs.iter().find(|ver| { - ver.package == name.package - && ver.artifact == name.artifact - && ver.identifier == name.identifier - }) { - if ver.version == name.version - || lenient_semver::parse(&ver.version) - > lenient_semver::parse(&name.version) - { - // new version is lower - true - } else { - // no match or new version is higher and this is an upgrade - false - } - } else { - // no match in set - false - } -} +// Re-export version utilities for convenience +pub use version::{extract_hash_from_cas_url, fetch_generated_version_info, should_ignore_artifact}; +// Temporary: Keep retrieve_data here until we refactor it +// This will be broken down in Phase 1.5 pub async fn retrieve_data( minecraft_versions: &VersionManifest, uploader: &BatchUploader, @@ -184,8 +93,8 @@ pub async fn retrieve_data( for loader_version_full in loader_versions { - let is_snapshot = minecraft_version.contains('w') || - minecraft_version.contains("-pre") || + let is_snapshot = minecraft_version.contains('w') || + minecraft_version.contains("-pre") || minecraft_version.contains("-rc"); if is_snapshot { @@ -293,8 +202,6 @@ pub async fn retrieve_data( let forge_universal_bytes = forge_universal_bytes.clone(); let forge_universal_path = forge_universal_path.clone(); let minecraft_libs_filter = minecraft_libs_filter.clone(); - let uploader = uploader; - let s3_client = s3_client; async move { if lib.name.is_lwjgl() || lib.name.is_log4j() || should_ignore_artifact(&minecraft_libs_filter, &lib.name) { @@ -383,22 +290,14 @@ pub async fn retrieve_data( .cloned() }; - let should_upload = if let Some(old_version) = &old_loader_version { - if let Some(old_hash) = extract_hash_from_cas_url(&old_version.url) { - if old_hash == new_hash { - info!("✓ Forge {} unchanged (hash: {})", loader_version_full, &new_hash[..8]); - false - } else { - info!("↻ Forge {} changed (old: {}, new: {})", loader_version_full, &old_hash[..8], &new_hash[..8]); - true - } - } else { - true - } - } else { - info!("+ Forge {} is new", loader_version_full); - true - }; + // Use common change detection logic + let change_result = crate::common::change_detection::detect_version_change( + "Forge", + &loader_version_full, + old_loader_version.as_ref().map(|v| v.url.as_str()), + &new_hash, + ); + let should_upload = change_result.should_upload; let version_hash = if should_upload { uploader.upload_cas( @@ -469,17 +368,13 @@ pub async fn retrieve_data( let mut local_libs : HashMap> = HashMap::new(); - fn is_local_lib(lib: &Library) -> bool { - lib.downloads.as_ref().and_then(|x| x.artifact.as_ref().and_then(|x| x.url.as_ref().map(|lib| lib.is_empty()))).unwrap_or(false) || lib.url.is_some() - } - let mut i = 0; loop { let Some(lib) = &libs.get(i) else { break; }; - - if is_local_lib(lib) { + + if libraries::is_local_lib(lib) { let mut archive_clone = archive.clone(); let lib_name_clone = lib.name.clone(); @@ -507,7 +402,7 @@ pub async fn retrieve_data( }).await??; local_libs.insert(lib.name.to_string(), lib_bytes); - + } i += 1; @@ -581,8 +476,6 @@ pub async fn retrieve_data( let semaphore = semaphore.clone(); let visited_assets = visited_assets.clone(); let local_libs = local_libs.clone(); - let uploader = uploader; - let s3_client = s3_client; async move { let artifact_path = lib.name.path(); @@ -710,22 +603,14 @@ pub async fn retrieve_data( .cloned() }; - let should_upload = if let Some(old_version) = &old_loader_version { - if let Some(old_hash) = extract_hash_from_cas_url(&old_version.url) { - if old_hash == new_hash { - info!("✓ Forge {} unchanged (hash: {})", loader_version_full, &new_hash[..8]); - false - } else { - info!("↻ Forge {} changed (old: {}, new: {})", loader_version_full, &old_hash[..8], &new_hash[..8]); - true - } - } else { - true - } - } else { - info!("+ Forge {} is new", loader_version_full); - true - }; + // Use common change detection logic + let change_result = crate::common::change_detection::detect_version_change( + "Forge", + &loader_version_full, + old_loader_version.as_ref().map(|v| v.url.as_str()), + &new_hash, + ); + let should_upload = change_result.should_upload; let version_hash = if should_upload { uploader.upload_cas( @@ -947,192 +832,3 @@ pub async fn fetch_maven_metadata( .await?, )?) } - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -struct ForgeInstallerProfileInstallDataV1 { - pub mirror_list: String, - pub target: String, - /// Path to the Forge universal library - pub file_path: String, - pub logo: String, - pub welcome: String, - pub version: String, - /// Maven coordinates of the Forge universal library - pub path: String, - pub profile_name: String, - pub minecraft: String, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -struct ForgeInstallerProfileManifestV1 { - pub id: String, - pub libraries: Vec, - pub main_class: Option, - pub minecraft_arguments: Option, - pub release_time: DateTime, - pub time: DateTime, - pub type_: VersionType, - pub assets: Option, - pub inherits_from: Option, - pub jar: Option, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -struct ForgeInstallerProfileV1 { - pub install: ForgeInstallerProfileInstallDataV1, - pub version_info: ForgeInstallerProfileManifestV1, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -struct ForgeInstallerProfileV2 { - pub profile: String, - pub version: String, - pub json: String, - pub path: Option, - pub minecraft: String, - pub data: HashMap, - pub libraries: Vec, - pub processors: Vec, -} - -#[cfg(test)] -mod tests { - use super::*; - use std::str::FromStr; - - #[test] - fn test_should_ignore_artifact() { - // Create test artifacts - let create_spec = |package: &str, artifact: &str, version: &str| -> GradleSpecifier { - GradleSpecifier::from_str(&format!("{}:{}:{}", package, artifact, version)) - .expect("Valid GradleSpecifier") - }; - - // Test case 1: Identical version (should ignore - already have it) - { - let mut libs = HashSet::new(); - libs.insert(create_spec("org.example", "library", "1.0.0")); - - let new_artifact = create_spec("org.example", "library", "1.0.0"); - assert!(should_ignore_artifact(&libs, &new_artifact), - "Should ignore identical version"); - } - - // Test case 2: Lower version in new data (should ignore - keep existing higher version) - { - let mut libs = HashSet::new(); - libs.insert(create_spec("org.example", "library", "2.0.0")); - - let new_artifact = create_spec("org.example", "library", "1.0.0"); - assert!(should_ignore_artifact(&libs, &new_artifact), - "Should ignore lower version"); - } - - // Test case 3: Higher version in new data (should NOT ignore - upgrade needed) - { - let mut libs = HashSet::new(); - libs.insert(create_spec("org.example", "library", "1.0.0")); - - let new_artifact = create_spec("org.example", "library", "2.0.0"); - assert!(!should_ignore_artifact(&libs, &new_artifact), - "Should NOT ignore higher version (upgrade needed)"); - } - - // Test case 4: No match in set (should NOT ignore - new artifact) - { - let mut libs = HashSet::new(); - libs.insert(create_spec("org.example", "other-library", "1.0.0")); - - let new_artifact = create_spec("org.example", "library", "1.0.0"); - assert!(!should_ignore_artifact(&libs, &new_artifact), - "Should NOT ignore new artifact"); - } - - // Test case 5: Different package (should NOT ignore) - { - let mut libs = HashSet::new(); - libs.insert(create_spec("org.example", "library", "1.0.0")); - - let new_artifact = create_spec("com.other", "library", "1.0.0"); - assert!(!should_ignore_artifact(&libs, &new_artifact), - "Should NOT ignore different package"); - } - - // Test case 6: Empty libs set (should NOT ignore) - { - let libs = HashSet::new(); - let new_artifact = create_spec("org.example", "library", "1.0.0"); - assert!(!should_ignore_artifact(&libs, &new_artifact), - "Should NOT ignore when libs is empty"); - } - } - - #[tokio::test] - async fn test_minecraft_version_cache_basic() { - let cache = MinecraftVersionLibraryCache::new(); - - // Verify initial state - assert_eq!(cache.versions.len(), 0, "Cache should start empty"); - assert_eq!(cache.max_size, 20, "Max size should be 20"); - } - - #[test] - fn test_minecraft_version_cache_lru_reordering() { - // Test LRU reordering logic without network calls - let mut cache = MinecraftVersionLibraryCache::new(); - - // Manually populate cache with test entries - let mut libs1 = HashSet::new(); - libs1.insert(GradleSpecifier::from_str("org.example:lib1:1.0.0").unwrap()); - cache.versions.push(MinecraftVersionCacheEntry { - id: "1.19.4".to_string(), - libraries: libs1, - }); - - let mut libs2 = HashSet::new(); - libs2.insert(GradleSpecifier::from_str("org.example:lib2:2.0.0").unwrap()); - cache.versions.push(MinecraftVersionCacheEntry { - id: "1.20.1".to_string(), - libraries: libs2, - }); - - // Verify initial order - assert_eq!(cache.versions[0].id, "1.19.4"); - assert_eq!(cache.versions[1].id, "1.20.1"); - - // Simulate LRU access: access second entry (should move to front) - let index = cache.versions.iter().position(|v| v.id == "1.20.1").unwrap(); - let entry = cache.versions.remove(index); - cache.versions.insert(0, entry); - - // Verify reordering - assert_eq!(cache.versions[0].id, "1.20.1", "Accessed entry should move to front"); - assert_eq!(cache.versions[1].id, "1.19.4"); - } - - #[test] - fn test_minecraft_version_cache_eviction() { - let mut cache = MinecraftVersionLibraryCache::new(); - - // Fill cache beyond max_size - for i in 0..25 { - let mut libs = HashSet::new(); - libs.insert(GradleSpecifier::from_str(&format!("org.example:lib{}:1.0.0", i)).unwrap()); - cache.versions.push(MinecraftVersionCacheEntry { - id: format!("1.{}.0", i), - libraries: libs, - }); - } - - // Simulate truncation (what happens in load_minecraft_version_libs) - cache.versions.truncate(cache.max_size); - - // Verify eviction - assert_eq!(cache.versions.len(), 20, "Cache should be truncated to max_size"); - assert_eq!(cache.versions[0].id, "1.0.0", "First entry should remain"); - } -} diff --git a/daedalus_client/src/forge/types.rs b/daedalus_client/src/forge/types.rs new file mode 100644 index 0000000..11c1fbe --- /dev/null +++ b/daedalus_client/src/forge/types.rs @@ -0,0 +1,137 @@ +//! Type definitions for Forge loader processing + +use crate::services::upload::BatchUploader; +use chrono::{DateTime, Utc}; +use dashmap::DashSet; +use daedalus::minecraft::{Library, VersionType}; +use daedalus::modded::{Processor, SidedDataEntry}; +use daedalus::GradleSpecifier; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; +use tokio::sync::{Mutex, Semaphore}; + +/// Forge installer profile (v1 format) - install section +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ForgeInstallerProfileInstallDataV1 { + pub mirror_list: String, + pub target: String, + /// Path to the Forge universal library + pub file_path: String, + pub logo: String, + pub welcome: String, + pub version: String, + /// Maven coordinates of the Forge universal library + pub path: String, + pub profile_name: String, + pub minecraft: String, +} + +/// Forge installer profile (v1 format) - version info section +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ForgeInstallerProfileManifestV1 { + pub id: String, + pub libraries: Vec, + pub main_class: Option, + pub minecraft_arguments: Option, + pub release_time: DateTime, + pub time: DateTime, + pub type_: VersionType, + pub assets: Option, + pub inherits_from: Option, + pub jar: Option, +} + +/// Forge installer profile (v1 format) +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ForgeInstallerProfileV1 { + pub install: ForgeInstallerProfileInstallDataV1, + pub version_info: ForgeInstallerProfileManifestV1, +} + +/// Forge installer profile (v2+ format) +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ForgeInstallerProfileV2 { + pub profile: String, + pub version: String, + pub json: String, + pub path: Option, + pub minecraft: String, + pub data: HashMap, + pub libraries: Vec, + pub processors: Vec, +} + +/// Cache entry for Minecraft version libraries +#[derive(Clone)] +pub struct MinecraftVersionCacheEntry { + pub id: String, + pub libraries: HashSet, +} + +/// LRU cache for Minecraft version library sets +#[derive(Clone)] +pub struct MinecraftVersionLibraryCache { + pub versions: Vec, + pub max_size: usize, +} + +impl MinecraftVersionLibraryCache { + pub fn new() -> Self { + MinecraftVersionLibraryCache { + versions: Vec::new(), + max_size: 20, + } + } + + pub async fn load_minecraft_version_libs( + &mut self, + version_id: &str, + ) -> Result<&HashSet, crate::infrastructure::error::Error> { + let index = self.versions.iter().position(|ver| ver.id == version_id); + + if let Some(index) = index { + // move found entry to the front of the stack + let entry = self.versions.remove(index); + self.versions.insert(0, entry); + } else { + let generated_version = + super::fetch_generated_version_info(version_id).await?; + + let libraries: HashSet = generated_version + .libraries + .into_iter() + .map(|lib| lib.name) + .collect(); + self.versions.insert( + 0, + MinecraftVersionCacheEntry { + id: version_id.to_string(), + libraries, + }, + ); + // truncate to drop oldest entry + self.versions.truncate(self.max_size); + } + + let entry = self + .versions + .first() + .expect("Valid first index as we just inserted it"); + Ok(&entry.libraries) + } +} + +/// Context shared across Forge processing operations +pub struct ForgeProcessingContext<'a> { + pub uploader: &'a BatchUploader, + pub s3_client: &'a s3::Bucket, + pub semaphore: Arc, + pub visited_assets: Arc>, + pub mc_library_cache: Arc>, + pub old_versions: Arc>>, +} diff --git a/daedalus_client/src/forge/version.rs b/daedalus_client/src/forge/version.rs new file mode 100644 index 0000000..0109e47 --- /dev/null +++ b/daedalus_client/src/forge/version.rs @@ -0,0 +1,134 @@ +//! Version-related utilities for Forge processing + +use crate::format_url; +use daedalus::GradleSpecifier; +use std::collections::HashSet; + +// Re-export CAS utilities from common module +pub use crate::common::cas::extract_hash_from_cas_url; + +/// Fetch generated version info from the CAS +pub async fn fetch_generated_version_info( + version_id: &str, +) -> Result { + let path = format!( + "minecraft/v{}/versions/{}.json", + daedalus::minecraft::CURRENT_FORMAT_VERSION, + version_id + ); + + Ok(serde_json::from_slice( + &daedalus::download_file(&format_url(&path), None).await?, + )?) +} + +/// Check if an artifact should be ignored based on version comparison +/// Returns true if: +/// - The artifact already exists with the same or higher version in libs +/// - This prevents downgrading libraries +pub fn should_ignore_artifact(libs: &HashSet, name: &GradleSpecifier) -> bool { + if let Some(ver) = libs.iter().find(|ver| { + ver.package == name.package + && ver.artifact == name.artifact + && ver.identifier == name.identifier + }) { + if ver.version == name.version + || lenient_semver::parse(&ver.version) > lenient_semver::parse(&name.version) + { + // new version is lower or equal + true + } else { + // no match or new version is higher and this is an upgrade + false + } + } else { + // no match in set + false + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::str::FromStr; + + #[test] + fn test_should_ignore_artifact() { + // Create test artifacts + let create_spec = + |package: &str, artifact: &str, version: &str| -> GradleSpecifier { + GradleSpecifier::from_str(&format!("{}:{}:{}", package, artifact, version)) + .expect("Valid GradleSpecifier") + }; + + // Test case 1: Identical version (should ignore - already have it) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "1.0.0")); + + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!( + should_ignore_artifact(&libs, &new_artifact), + "Should ignore identical version" + ); + } + + // Test case 2: Lower version in new data (should ignore - keep existing higher version) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "2.0.0")); + + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!( + should_ignore_artifact(&libs, &new_artifact), + "Should ignore lower version" + ); + } + + // Test case 3: Higher version in new data (should NOT ignore - upgrade needed) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "1.0.0")); + + let new_artifact = create_spec("org.example", "library", "2.0.0"); + assert!( + !should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore higher version (upgrade needed)" + ); + } + + // Test case 4: No match in set (should NOT ignore - new artifact) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "other-library", "1.0.0")); + + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!( + !should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore new artifact" + ); + } + + // Test case 5: Different package (should NOT ignore) + { + let mut libs = HashSet::new(); + libs.insert(create_spec("org.example", "library", "1.0.0")); + + let new_artifact = create_spec("com.other", "library", "1.0.0"); + assert!( + !should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore different package" + ); + } + + // Test case 6: Empty libs set (should NOT ignore) + { + let libs = HashSet::new(); + let new_artifact = create_spec("org.example", "library", "1.0.0"); + assert!( + !should_ignore_artifact(&libs, &new_artifact), + "Should NOT ignore when libs is empty" + ); + } + } +} diff --git a/daedalus_client/src/infrastructure/circuit_breaker.rs b/daedalus_client/src/infrastructure/circuit_breaker.rs index 75d505d..28b5345 100644 --- a/daedalus_client/src/infrastructure/circuit_breaker.rs +++ b/daedalus_client/src/infrastructure/circuit_breaker.rs @@ -69,6 +69,19 @@ impl CircuitBreaker { /// Executes a future with circuit breaker protection /// + /// # Concurrency Model + /// + /// This implementation intentionally releases the state lock before executing + /// the future to avoid holding the lock during potentially long-running I/O + /// operations. This means: + /// + /// - Multiple requests may execute concurrently when the circuit is closed + /// - In half-open state, multiple test requests may execute if they arrive + /// while another is in flight (this is acceptable for our use case) + /// - State changes are atomic but not synchronized with request execution + /// + /// This design prioritizes throughput over strict serialization of requests. + /// /// # Returns /// - `Ok(T)` if the operation succeeded /// - `Err(CircuitBreakerError::Open)` if the circuit is open diff --git a/daedalus_client/src/loaders/mod.rs b/daedalus_client/src/loaders/mod.rs index b827a71..43905d2 100644 --- a/daedalus_client/src/loaders/mod.rs +++ b/daedalus_client/src/loaders/mod.rs @@ -24,6 +24,21 @@ fn extract_hash_from_cas_url(url: &str) -> Option { } } +/// Determines if a library is an intermediary/hashed mapping library +/// +/// Intermediary libraries are the ONLY libraries that are truly game-version-specific +/// in Fabric/Quilt loaders. All other libraries (asm, mixin, loader itself) are +/// version-agnostic and should be downloaded once. +/// +/// Examples: +/// - `net.fabricmc:intermediary:1.21` → true +/// - `org.quiltmc:hashed:1.20.4` → true +/// - `org.ow2.asm:asm:9.7.1` → false +/// - `net.fabricmc:fabric-loader:0.16.10` → false +fn is_intermediary_library(library_name: &str) -> bool { + library_name.contains("intermediary") || library_name.contains("hashed") +} + /// Strategy trait for loader-specific behavior /// /// This trait abstracts the differences between loaders like Fabric and Quilt, @@ -377,8 +392,6 @@ impl LoaderProcessor { let visited_artifacts = visited_artifacts.clone(); let list_game = list.game().to_vec(); let maven_fallback = self.strategy.maven_fallback().to_string(); - let uploader = uploader; - let s3_client = s3_client; async move { // Check if we've already processed this artifact (lock-free) @@ -402,7 +415,7 @@ impl LoaderProcessor { let name = lib.name.to_string(); if name.contains(DUMMY_GAME_VERSION) { - // This library is game-version-specific, download for all game versions + // Replace dummy game version with placeholder lib.name = name .replace( DUMMY_GAME_VERSION, @@ -410,53 +423,90 @@ impl LoaderProcessor { ) .parse()?; - // Collect hashes for all game versions - let version_hash_results = futures::future::try_join_all(list_game.iter().map(|game_version| { - let semaphore = semaphore.clone(); - let lib_name = lib.name.to_string(); - let lib_url = lib.url.clone(); - let maven_fallback = maven_fallback.clone(); - let game_version_str = game_version.version().to_string(); - let uploader = uploader; - let s3_client = s3_client; - - async move { - let artifact_path = daedalus::get_path_from_artifact( - &lib_name.replace( - &BRANDING.get_or_init(Branding::default).dummy_replace_string, - &game_version_str, - ), - )?; - - let artifact = download_file( - &format!( - "{}{}", - lib_url.as_deref() - .unwrap_or(&maven_fallback), - artifact_path - ), - None, - semaphore.clone(), - ) - .await?; - - // Upload to CAS and get hash - let hash = uploader.upload_cas( - artifact.to_vec(), - Some("application/java-archive".to_string()), - s3_client, - semaphore.clone(), - ).await?; - - Ok::<(String, String), crate::infrastructure::error::Error>((game_version_str, hash)) - } - })) - .await?; - - // Build version_hashes map from results - let version_hashes: HashMap = version_hash_results.into_iter().collect(); - lib.version_hashes = Some(version_hashes); - lib.url = None; + // Check if this is an intermediary library + // Only intermediary libraries are truly game-version-specific and need version_hashes + if is_intermediary_library(&lib.name.to_string()) { + // Intermediary library: Download for all game versions and create version_hashes map + let version_hash_results = futures::future::try_join_all(list_game.iter().map(|game_version| { + let semaphore = semaphore.clone(); + let lib_name = lib.name.to_string(); + let lib_url = lib.url.clone(); + let maven_fallback = maven_fallback.clone(); + let game_version_str = game_version.version().to_string(); + + async move { + let artifact_path = daedalus::get_path_from_artifact( + &lib_name.replace( + &BRANDING.get_or_init(Branding::default).dummy_replace_string, + &game_version_str, + ), + )?; + + let artifact = download_file( + &format!( + "{}{}", + lib_url.as_deref() + .unwrap_or(&maven_fallback), + artifact_path + ), + None, + semaphore.clone(), + ) + .await?; + + // Upload to CAS and get hash + let hash = uploader.upload_cas( + artifact.to_vec(), + Some("application/java-archive".to_string()), + s3_client, + semaphore.clone(), + ).await?; + + Ok::<(String, String), crate::infrastructure::error::Error>((game_version_str, hash)) + } + })) + .await?; + + // Build version_hashes map from results + let version_hashes: HashMap = version_hash_results.into_iter().collect(); + lib.version_hashes = Some(version_hashes); + lib.url = None; + } else { + // Regular library with placeholder: Download ONCE and use single URL + // The dummy game version was only used to fetch the manifest, but this library + // itself is version-agnostic (e.g., org.ow2.asm:asm, fabric-loader, etc.) + let artifact_path = lib.name.path(); + + let artifact = download_file( + &format!( + "{}{}", + lib.url.as_deref() + .unwrap_or(&maven_fallback), + artifact_path + ), + None, + semaphore.clone(), + ) + .await?; + + // Upload to CAS and get hash + let hash = uploader.upload_cas( + artifact.to_vec(), + Some("application/java-archive".to_string()), + s3_client, + semaphore.clone(), + ).await?; + + // Store full CAS URL + let base_url = dotenvy::var("BASE_URL").unwrap(); + lib.url = Some(format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &hash[..2], + &hash[2..] + )); + } return Ok(lib); } diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index 38fec57..a0345d8 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -14,6 +14,21 @@ use tracing_subscriber::EnvFilter; #[cfg(unix)] use tokio::signal::unix::{signal, SignalKind}; +/// Configuration constants +/// Update interval for fetching new metadata (1 hour) +const UPDATE_INTERVAL_SECS: u64 = 60 * 60; +/// Maximum number of concurrent upload operations +const MAX_CONCURRENT_UPLOADS: usize = 10; +/// Circuit breaker: number of consecutive failures before opening +const CIRCUIT_BREAKER_FAILURE_THRESHOLD: u32 = 5; +/// Circuit breaker: duration to wait before retrying (5 minutes) +const CIRCUIT_BREAKER_RESET_TIMEOUT_SECS: u64 = 300; +/// Maximum number of retry attempts for uploads +const MAX_UPLOAD_RETRIES: usize = 10; +/// Maximum delay between retries (30 minutes) +const MAX_RETRY_DELAY_SECS: u64 = 1800; + +mod common; mod fabric; mod infrastructure; mod forge; @@ -185,8 +200,8 @@ fn main() -> Result<(), crate::infrastructure::error::Error> { )) .unwrap(); - let mut timer = tokio::time::interval(Duration::from_secs(60 * 60)); - let semaphore = Arc::new(Semaphore::new(10)); + let mut timer = tokio::time::interval(Duration::from_secs(UPDATE_INTERVAL_SECS)); + let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENT_UPLOADS)); { let uploaded_files = Arc::new(Mutex::new(Vec::new())); @@ -570,23 +585,23 @@ static CLIENT: LazyLock = LazyLock::new(|| { }); static MINECRAFT_BREAKER: LazyLock = LazyLock::new(|| { - crate::infrastructure::circuit_breaker::CircuitBreaker::new("minecraft", 5, Duration::from_secs(300)) + crate::infrastructure::circuit_breaker::CircuitBreaker::new("minecraft", CIRCUIT_BREAKER_FAILURE_THRESHOLD, Duration::from_secs(CIRCUIT_BREAKER_RESET_TIMEOUT_SECS)) }); static FORGE_BREAKER: LazyLock = LazyLock::new(|| { - crate::infrastructure::circuit_breaker::CircuitBreaker::new("forge", 5, Duration::from_secs(300)) + crate::infrastructure::circuit_breaker::CircuitBreaker::new("forge", CIRCUIT_BREAKER_FAILURE_THRESHOLD, Duration::from_secs(CIRCUIT_BREAKER_RESET_TIMEOUT_SECS)) }); static FABRIC_BREAKER: LazyLock = LazyLock::new(|| { - crate::infrastructure::circuit_breaker::CircuitBreaker::new("fabric", 5, Duration::from_secs(300)) + crate::infrastructure::circuit_breaker::CircuitBreaker::new("fabric", CIRCUIT_BREAKER_FAILURE_THRESHOLD, Duration::from_secs(CIRCUIT_BREAKER_RESET_TIMEOUT_SECS)) }); static QUILT_BREAKER: LazyLock = LazyLock::new(|| { - crate::infrastructure::circuit_breaker::CircuitBreaker::new("quilt", 5, Duration::from_secs(300)) + crate::infrastructure::circuit_breaker::CircuitBreaker::new("quilt", CIRCUIT_BREAKER_FAILURE_THRESHOLD, Duration::from_secs(CIRCUIT_BREAKER_RESET_TIMEOUT_SECS)) }); static NEOFORGE_BREAKER: LazyLock = LazyLock::new(|| { - crate::infrastructure::circuit_breaker::CircuitBreaker::new("neoforge", 5, Duration::from_secs(300)) + crate::infrastructure::circuit_breaker::CircuitBreaker::new("neoforge", CIRCUIT_BREAKER_FAILURE_THRESHOLD, Duration::from_secs(CIRCUIT_BREAKER_RESET_TIMEOUT_SECS)) }); #[instrument(skip(bytes, uploaded_files, semaphore), fields(size = bytes.len()))] @@ -619,10 +634,10 @@ pub async fn upload_file_to_bucket( match result { Ok(_) => { { - info!(path = %path, "Upload completed"); let mut uploaded_files = uploaded_files.lock().await; uploaded_files.push(key); } + info!(path = %path, "Upload completed"); Ok(()) } @@ -634,8 +649,8 @@ pub async fn upload_file_to_bucket( }) .retry( ExponentialBuilder::default() - .with_max_times(10) - .with_max_delay(Duration::from_secs(1800)), + .with_max_times(MAX_UPLOAD_RETRIES) + .with_max_delay(Duration::from_secs(MAX_RETRY_DELAY_SECS)), ) .await } diff --git a/daedalus_client/src/minecraft.rs b/daedalus_client/src/minecraft.rs deleted file mode 100644 index 68c8d76..0000000 --- a/daedalus_client/src/minecraft.rs +++ /dev/null @@ -1,1058 +0,0 @@ -use crate::download_file; -use crate::format_url; -use crate::services::upload::BatchUploader; -use dashmap::DashSet; -use daedalus::minecraft::{ - merge_partial_library, Dependency, DependencyRule, JavaVersion, LWJGLEntry, - Library, LibraryDownload, LibraryDownloads, LibraryGroup, - MinecraftJavaProfile, Os, PartialLibrary, Rule, RuleAction, VersionInfo, - VersionManifest, VersionType, -}; -use daedalus::{get_hash, GradleSpecifier}; -use tracing::{debug, error, info, warn}; -use serde::Deserialize; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::convert::TryFrom; -use std::sync::Arc; -use std::time::Instant; -use tokio::sync::{Mutex, Semaphore}; - -fn patch_library( - patches: &Vec, - mut library: Library, -) -> Vec { - let mut val = Vec::new(); - - let actual_patches = patches - .iter() - .filter(|x| x.match_.contains(&library.name.to_string())) - .collect::>(); - - if !actual_patches.is_empty() { - for patch in actual_patches { - info!( - "patching {} with library patch {}", - library.name, patch._comment - ); - - if let Some(override_) = &patch.override_ { - library = merge_partial_library(override_.clone(), library); - } - - if let Some(additional_libraries) = &patch.additional_libraries { - for additional_library in additional_libraries { - if patch.patch_additional_libraries.unwrap_or(false) { - let mut libs = - patch_library(patches, additional_library.clone()); - val.append(&mut libs) - } else { - let mut new_lib = additional_library.clone(); - new_lib.patched = true; - val.push(new_lib); - } - } - } - } - - val.push(library); - } else { - val.push(library); - } - - val -} - -fn process_single_lwjgl_variant( - variant: &LibraryGroup, - patches: &Vec, -) -> Result, crate::infrastructure::error::Error> { - let lwjgl_version = variant.version.clone(); - - info!("Processing LWJGL variant {}", lwjgl_version); - - let mut lwjgl = variant.clone(); - - let mut new_libraries = Vec::new(); - - for library in lwjgl.libraries.clone() { - let mut libs = patch_library(patches, library); - new_libraries.append(&mut libs); - } - lwjgl.libraries = new_libraries; - - let version_path = if lwjgl_version.starts_with("2") { - lwjgl.id = "LWJGL 2".to_string(); - lwjgl.uid = "org.lwjgl2".to_string(); - lwjgl.conflicts = Some(vec![Dependency { - name: "lwjgl".to_string(), - uid: "org.lwjgl3".to_string(), - rule: None, - }]); - - format!( - "minecraft/v{}/libraries/org.lwjgl2/{}.json", - daedalus::minecraft::CURRENT_FORMAT_VERSION, - lwjgl_version - ) - } else if lwjgl_version.starts_with('3') { - lwjgl.id = "LWJGL 3".to_string(); - lwjgl.uid = "org.lwjgl3".to_string(); - lwjgl.conflicts = Some(vec![Dependency { - name: "lwjgl".to_string(), - uid: "org.lwjgl2".to_string(), - rule: None, - }]); - - let unneeded: HashSet<&str> = - vec!["jutils", "jinput"].into_iter().collect(); - let filtered_libs = lwjgl - .libraries - .into_iter() - .filter(|lib| !unneeded.contains(lib.name.artifact.as_str())) - .collect::>(); - lwjgl.libraries = filtered_libs; - - format!( - "minecraft/v{}/libraries/org.lwjgl3/{}.json", - daedalus::minecraft::CURRENT_FORMAT_VERSION, - lwjgl_version - ) - } else { - return Err(crate::infrastructure::error::invalid_input(format!("Unknown LWJGL version {}", lwjgl_version))); - }; - - let mut good = true; - for lib in &lwjgl.libraries { - if lib.patched { - continue; - } - if let Some(natives) = &lib.natives { - let checked: HashSet<&Os> = - vec![&Os::Linux, &Os::Windows, &Os::Osx] - .into_iter() - .collect(); - if !checked.is_subset(&natives.clone().keys().collect()) { - warn!("LWJGL variant library missing system classifier: {} {} {:?}", lwjgl.version, lib.name, natives.keys()); - good = false; - break; - } - if lib.downloads.is_some() { - if let Some(classifiers) = &lib - .downloads - .clone() - .expect("Unwrap to be safe inside is_some") - .classifiers - { - for entry in checked { - let baked_entry = natives.get(entry); - if let Some(baked_entry) = baked_entry { - if !classifiers.contains_key(baked_entry) { - warn!("LWJGL variant library missing download for classifier: {} {} {:?} {:?}", lwjgl.version, lib.name, baked_entry, classifiers.keys().collect::>()); - good = false; - break; - } - } - } - } else { - warn!("LWJGL variant library missing downloads classifiers: {} {}", lwjgl.version, lib.name); - good = false; - break; - } - } - } - } - if good { - Ok(Some((version_path, lwjgl))) - } else { - Ok(None) - } -} - -/// Patch CVE-2021-44228, CVE-2021-44832, CVE-2021-45046 -fn map_log4j_artifact( - version: &str, -) -> Result, crate::infrastructure::error::Error> { - debug!("log4j version: {}", version); - let x = lenient_semver::parse(version); - if x <= lenient_semver::parse("2.0") { - debug!("log4j use beta9 patch"); - return Ok(Some(("2.0-beta9-fixed".to_string(), format_url("maven/")))); - } - if x < lenient_semver::parse("2.17.1") { - debug!("bump log4j to 2.17.1"); - return Ok(Some(( - "2.17.1".to_string(), - "https://repo1.maven.org/maven2/".to_string(), - ))); - } - debug!("no log4j match!"); - Ok(None) -} - -pub async fn retrieve_data( - uploader: &BatchUploader, - manifest_builder: &crate::services::cas::ManifestBuilder, - s3_client: &s3::Bucket, - semaphore: Arc, - is_first_run: bool, -) -> Result { - - - info!(is_first_run = is_first_run, "Retrieving Minecraft data"); - - // TODO: Old manifest doesn't take LWJGL meta into account - let old_manifest = if is_first_run { - None - } else { - daedalus::minecraft::fetch_version_manifest(Some( - &format_url(&format!( - "minecraft/v{}/manifest.json", - daedalus::minecraft::CURRENT_FORMAT_VERSION - )), - )) - .await - .ok() - }; - - let mut manifest = - daedalus::minecraft::fetch_version_manifest(None).await?; - - let cloned_manifest = Arc::new(Mutex::new(manifest.clone())); - - let patches = get_library_patches().await?; - let cloned_patches = Arc::new(&patches); - - let lwjgl_config = get_lwjgl_config().await?; - - let visited_assets = Arc::new(DashSet::new()); - - let lwjgl_version_variants_mutex: Arc< - Mutex>>, - > = Arc::new(Mutex::new(BTreeMap::new())); - let lwjgl_reject_reasons: HashMap> = lwjgl_config - .reject - .clone() - .into_iter() - .map(|mark| (mark.match_, mark.reason)) - .collect(); - let reject_lwjgl_variants: HashSet = lwjgl_config - .reject - .into_iter() - .map(|mark| mark.match_) - .collect(); - let accept_lwjgl_variants: HashSet = lwjgl_config - .accept - .into_iter() - .map(|mark| mark.match_) - .collect(); - - async fn add_lwjgl_version( - variants_mutex: Arc>>>, - lwjgl: &LibraryGroup, - ) { - let mut lwjgl_copy = lwjgl.clone(); - lwjgl_copy.libraries.sort_by(|x, y| x.name.cmp(&y.name)); - - let entry = LWJGLEntry::from_group(lwjgl_copy); - let current_sha1 = entry.sha1.clone(); - let version = entry.group.version.clone(); - let mut found = false; - - let mut version_variants = variants_mutex.lock().await; - - let variants = version_variants - .entry(version.clone()) - .or_insert_with(Vec::new); - for variant in variants.iter_mut() { - if entry.sha1 == variant.sha1 { - found = true; - if entry.group.release_time > variant.group.release_time { - variant.group.release_time = entry.group.release_time; - } - break; - } - } - - if !found { - info!( - "!! New variant for LWJGL version {:?} : {}", - version, current_sha1 - ); - debug!("New LWLGL variant {:?}", &lwjgl); - variants.push(entry); - } - } - - let now = Instant::now(); - - let mut version_futures = Vec::new(); - - for version in manifest.versions.iter_mut().rev() { - version_futures.push(async { - let old_version = if let Some(old_manifest) = &old_manifest { - old_manifest.versions.iter().find(|x| x.id == version.id) - } else { - None - }; - - if let Some(old_version) = old_version { - if old_version.sha1 == version.sha1 { - return Ok(()); - } - } - - let visited_assets = Arc::clone(&visited_assets); - let cloned_manifest_mutex = Arc::clone(&cloned_manifest); - let semaphore = Arc::clone(&semaphore); - let patches = Arc::clone(&cloned_patches); - - let lwjgl_version_variants_mutex = Arc::clone(&lwjgl_version_variants_mutex); - - let assets_hash = - old_version.and_then(|x| x.assets_index_sha1.clone()); - - async move { - let mut version_info = - daedalus::minecraft::fetch_version_info(version).await?; - - fn lib_is_split_natives(lib: &Library) -> bool { - lib.name.identifier.as_ref().is_some_and(|data| data.starts_with("natives-")) - } - - fn version_has_split_natives(ver: &VersionInfo) -> bool { - ver.libraries.iter().any(lib_is_split_natives) - } - - fn is_macos_only(rules: &Option>) -> bool { - let mut allows_osx = false; - let mut allows_all = false; - if let Some(rules) = rules { - for rule in rules { - if rule.action == RuleAction::Allow && rule.os.is_some() && rule.os.clone().expect("Unwrap to be safe with boolean short circuit").name.is_some_and(|os| os == Os::Osx) { - allows_osx = true; - } - if rule.action == RuleAction::Allow && rule.os.is_none() { - allows_all = false; - } - } - - allows_osx && !allows_all - } else { - false - } - } - - let has_split_natives = version_has_split_natives(&version_info); - let mut is_lwjgl_3 = false; - let mut lwjgl_buckets: HashMap>, LibraryGroup> = HashMap::new(); - - - let mut new_libraries = Vec::new(); - info!("Processing libraries for version {}", version_info.id); - for library in version_info.libraries.iter_mut() { - - if lib_is_split_natives(library) { - if let Some(identifier) = &library.name.identifier { - info!("Splitting library {} into artifact {}", library.name, identifier); - library.name.artifact = format!("{}-{}", library.name.artifact, identifier); - library.name.identifier = None; - } - } - let spec = &mut library.name; - - if spec.is_lwjgl() { - - let mut rules = None; - let set_version: Option = if has_split_natives { // implies lwjgl3 - is_lwjgl_3 = true; - debug!("lwlgl library {} has split natives, version {}", spec, spec.version); - - Some(spec.version.clone()) - } else { - debug!("lwlgl library {} is not split, package: {} artifact:{} version: {}", spec, spec.package, spec.artifact, spec.version); - rules = library.rules.clone(); - library.rules = None; - if is_macos_only(&rules) { - info!("Candidate library {} is only for macOS and is therefore ignored", spec); - continue; - } - if spec.package == "org.lwjgl.lwjgl" && spec.artifact == "lwjgl" { - Some(spec.version.clone()) - } else if spec.package == "org.lwjgl" && spec.artifact == "lwjgl" { - is_lwjgl_3 = true; - Some(spec.version.clone()) - } else { - None - } - }; - debug!("lwjgl library {} is setting version {:?}", spec, set_version); - - let version_id = &version_info.id; - let version_release_time = version_info.release_time; - - info!("Setting lwjgl bucket {:?} for {} with release {}", &rules, version_id, version_release_time); - let bucket = lwjgl_buckets.entry(rules.clone()).or_insert_with(|| { - LibraryGroup { - id: "LWJGL".to_string(), - version: "undetermined".to_string(), - uid: "org.lwjgl".to_string(), - release_time: version_release_time, - libraries: Vec::new(), - requires: None, - conflicts: None, - type_: VersionType::Release, - has_split_natives: Some(has_split_natives), - } - }); - bucket.has_split_natives = Some(has_split_natives); - - if let Some(version) = set_version { - debug!("Setting bucket version {} for {}", version, version_info.id); - bucket.version = version; - } - bucket.libraries.push(library.clone()); - if version_info.release_time > bucket.release_time { - bucket.release_time = version_info.release_time; - } - } else if spec.is_log4j() { - if let Some((version_override, maven_override)) = map_log4j_artifact(&spec.version)? { - let replacement_name = GradleSpecifier { - package: "org.apache.logging.log4j".to_string(), - artifact: spec.artifact.clone(), - identifier: None, - version: version_override.clone(), - extension: "jar".to_string() - }; - let (sha1, size) = match version_override.as_str() { - "2.0-beta9-fixed" => { - match spec.artifact.as_str() { - "log4j-api" => { - Ok(("b61eaf2e64d8b0277e188262a8b771bbfa1502b3", 107347)) - } - "log4j-core" => { - Ok(("677991ea2d7426f76309a73739cecf609679492c", 677588)) - } - _ => { - Err(crate::infrastructure::error::invalid_input(format!("Unhandled log4j artifact {} for overridden version {}", spec.artifact, version_override))) - } - } - } - "2.17.1" => { - match spec.artifact.as_str() { - "log4j-api" => { - Ok(("d771af8e336e372fb5399c99edabe0919aeaf5b2", 301872)) - }, - "log4j-core" => { - Ok(("779f60f3844dadc3ef597976fcb1e5127b1f343d", 1790452)) - }, - "log4j-slf4j18-impl" => { - Ok(("ca499d751f4ddd8afb016ef698c30be0da1d09f7", 21268)) - } - _ => { - Err(crate::infrastructure::error::invalid_input(format!("Unhandled log4j artifact {} for overridden version {}", spec.artifact, version_override))) - } - } - } - _ => { - Err(crate::infrastructure::error::invalid_input(format!("Unhandled log4j version {}", version_override))) - } - }?; - let artifact = LibraryDownload { - path: replacement_name.path(), - sha1: sha1.to_string(), - size, - url: Some(format!("{}{}", maven_override, replacement_name.path())), - - }; - new_libraries.push( - Library { - name: replacement_name, - downloads: Some(LibraryDownloads { artifact: Some(artifact), classifiers: None }), - extract: None, - url: None, - natives: None, - rules: None, - checksums: None, - include_in_classpath: library.include_in_classpath, - version_hashes: None, - patched: true, - } - ); - } else { - new_libraries.push(library.clone()) - } - } else { - let mut libs = - patch_library(&patches, library.clone()); - new_libraries.append(&mut libs) - } - } - - if lwjgl_buckets.len() == 1 { - for (key, lwjgl) in lwjgl_buckets.iter_mut() { - lwjgl.libraries.sort_by_key(|lib| lib.name.clone() ); - add_lwjgl_version(lwjgl_version_variants_mutex.clone(), lwjgl).await; - info!("Found only candidate LWJGL {:?} {:?}", lwjgl.version, key); - } - } else { - let common_bucket = lwjgl_buckets.get(&None).cloned(); - for (key, lwjgl) in lwjgl_buckets.iter_mut() { - if key.is_none() { - continue - } - if let Some(mut common_bucket) = common_bucket.clone() { - lwjgl.libraries.append(&mut common_bucket.libraries); - } - lwjgl.libraries.sort_by_key(|lib| lib.name.clone() ); - add_lwjgl_version(lwjgl_version_variants_mutex.clone(), lwjgl).await; - info!("Found candidate LWJGL {:?} {:?}", lwjgl.version, key); - } - lwjgl_buckets.remove(&None); - } - - version_info.libraries = new_libraries; - - let suggested_lwjgl_version = if lwjgl_buckets.len() == 1 { - if is_lwjgl_3 { - Ok(lwjgl_buckets.values().next().expect("Safe to unwrap because there is one item present").version.clone()) - } else { - Ok("2.9.4-nightly-20150209".to_string()) - } - } else { - let bad_versions: HashSet<&str> = vec!["3.1.6", "3.2.1"].into_iter().collect(); - let our_versions: HashSet<&str> = lwjgl_buckets.values().map(|lwjgl| lwjgl.version.as_str()).collect(); - - if our_versions == bad_versions { - info!("Found broken 3.1.6/3.2.1 LWJGL combo in version {} , forcing LWJGL. 3.2.1", &version_info.id); - Ok("3.2.1".to_string()) - } else { - Err(crate::infrastructure::error::invalid_input(format!("Can not determine a single suggested LWJGL version in version {} from among {:?}", &version_info.id, our_versions))) - } - - }?; - - let lwjgl_dependency = if is_lwjgl_3 { - Dependency { - name: "lwjgl".to_string(), - uid: "org.lwjgl3".to_string(), - rule: Some(DependencyRule::Suggests(suggested_lwjgl_version)), - } - } else { - Dependency { - name: "lwjgl".to_string(), - uid: "org.lwjgl2".to_string(), - rule: Some(DependencyRule::Suggests(suggested_lwjgl_version)), - } - }; - - if version_info.requires.is_none() { - version_info.requires = Some(Vec::new()); - } - version_info.requires.as_mut().expect("Safe to unwrap because we just ensured it's creation").push(lwjgl_dependency); - - // Patch java version - version_info.java_version = { - if let Some(java_version) = &version_info.java_version { - match MinecraftJavaProfile::try_from( - &*java_version.component, - ) { - Ok(java_version) => Some(JavaVersion { - component: java_version.as_str().expect("MinecraftJavaProfile::try_from is not handling unknown variant as error").to_string(), - major_version: 0, - }), - Err(err) => { - #[cfg(feature = "sentry")] - sentry::capture_message( - &format!( - "Unknown java version \"{}\": {}", - java_version.component, err - ), - sentry::Level::Warning, - ); - println!( - "Unknown java version \"{}\": {}", - java_version.component, err - ); - None - } - } - } else { - Some(JavaVersion { - component: MinecraftJavaProfile::JreLegacy - .as_str() - .unwrap() - .to_string(), - major_version: 0, - }) - } - }; - - let version_info_hash = get_hash(bytes::Bytes::from( - serde_json::to_vec(&version_info)?, - )) - .await?; - - let version_path = format!( - "minecraft/v{}/versions/{}.json", - daedalus::minecraft::CURRENT_FORMAT_VERSION, - version.id - ); - let assets_path = format!( - "minecraft/v{}/assets/{}.json", - daedalus::minecraft::CURRENT_FORMAT_VERSION, - version_info.asset_index.id - ); - let assets_index_url = version_info.asset_index.url.clone(); - - { - let mut cloned_manifest = - cloned_manifest_mutex.lock().await; - - if let Some(position) = cloned_manifest - .versions - .iter() - .position(|x| version.id == x.id) - { - cloned_manifest.versions[position].url = - format_url(&version_path); - cloned_manifest.versions[position].assets_index_sha1 = - Some(version_info.asset_index.sha1.clone()); - cloned_manifest.versions[position].assets_index_url = - Some(format_url(&assets_path)); - cloned_manifest.versions[position].java_profile = - version_info.java_version.as_ref().map(|x| { - MinecraftJavaProfile::try_from(&*x.component) - .expect("Safe to unwrap since we ensure it's valid in version_json already") - }); - cloned_manifest.versions[position].sha1 = - version_info_hash; - } else { - cloned_manifest.versions.insert( - 0, - daedalus::minecraft::Version { - id: version_info.id.clone(), - type_: version_info.type_.clone(), - url: format_url(&version_path), - time: version_info.time, - release_time: version_info.release_time, - sha1: version_info_hash, - java_profile: version_info.java_version.as_ref().map(|x| { - MinecraftJavaProfile::try_from(&*x.component) - .expect("Safe to unwrap since we ensure it's valid in version_json already") - }), - compliance_level: 1, - assets_index_url: Some(format_url(&assets_path)), - assets_index_sha1: Some( - version_info.asset_index.sha1.clone(), - ), - }, - ) - } - } - - let mut download_assets = false; - - if visited_assets.insert(version_info.asset_index.id.clone()) { - if let Some(assets_hash) = assets_hash { - if version_info.asset_index.sha1 != assets_hash { - download_assets = true; - } - } else { - download_assets = true; - } - } - - if download_assets { - let assets_index = download_file( - &assets_index_url, - Some(&version_info.asset_index.sha1), - semaphore.clone(), - ) - .await?; - - let asset_bytes = assets_index.to_vec(); - let asset_hash = uploader.upload_cas( - asset_bytes.clone(), - Some("application/json".to_string()), - s3_client, - semaphore.clone(), - ).await?; - - let base_url = dotenvy::var("BASE_URL").unwrap(); - version_info.asset_index.url = format!( - "{}/v{}/objects/{}/{}", - base_url, - crate::services::cas::CAS_VERSION, - &asset_hash[..2], - &asset_hash[2..] - ); - } - - let version_bytes = serde_json::to_vec(&version_info)?; - let _version_hash = uploader.upload_cas( - version_bytes.clone(), - Some("application/json".to_string()), - s3_client, - semaphore.clone(), - ).await?; - - // NOTE: We don't call manifest_builder.add_version() for minecraft here. - // Instead, we use set_loader_versions() with the full VersionManifest at the end - // to preserve rich metadata (type, url, time, releaseTime, sha1, etc.) - - Ok::<(), crate::infrastructure::error::Error>(()) - } - .await?; - - Ok::<(), crate::infrastructure::error::Error>(()) - }) - } - - { - let mut versions = version_futures.into_iter().peekable(); - let mut chunk_index = 0; - let mut successful = 0; - let mut failed = 0; - - while versions.peek().is_some() { - let now = Instant::now(); - - let chunk: Vec<_> = versions.by_ref().take(100).collect(); - - for future in chunk { - match future.await { - Ok(_) => { - successful += 1; - } - Err(e) => { - warn!("⚠️ Minecraft - Failed to process version: {}", e); - failed += 1; - } - } - } - - chunk_index += 1; - - let elapsed = now.elapsed(); - info!("Chunk {} Elapsed: {:.2?} (✓ {} ✗ {})", chunk_index, elapsed, successful, failed); - } - - info!("📊 Minecraft - Processing complete: {} successful, {} failed", successful, failed); - } - //futures::future::try_join_all(version_futures).await?; - - { - let lwjgl_version_variants = lwjgl_version_variants_mutex.lock().await; - - info!("Processing LWJGL variants"); - for (lwjgl_version_variant, lwjgl_variant_entries) in - lwjgl_version_variants.iter() - { - info!( - "{} variant(s) for LWJGL {}", - lwjgl_variant_entries.len(), - lwjgl_version_variant - ); - - let mut decided_variant = None; - let mut accepted_variants = 0; - let mut unknown_variants = 0; - - for variant in lwjgl_variant_entries { - if reject_lwjgl_variants.contains(&variant.sha1) { - let reason = lwjgl_reject_reasons - .get(&variant.sha1) - .expect( - "Unwrap to be safe because sha was present in config", - ) - .clone() - .unwrap_or("unspecified".to_string()); - info!("LWJGL Variant {} for version {} ignored because it was marked as bad. Reason: {}", variant.sha1, lwjgl_version_variant, &reason); - continue; - } - if accept_lwjgl_variants.contains(&variant.sha1) { - info!( - "LWJGL Variant {} for version {} accepted", - variant.sha1, lwjgl_version_variant - ); - decided_variant = Some(variant); - accepted_variants += 1; - continue; - } - - let natives = variant - .group - .libraries - .iter() - .filter_map(|lib| { - lib.natives.as_ref().map(|natives| { - natives.keys().cloned().collect::>() - }) - }) - .collect::>(); - - #[cfg(feature = "sentry")] - sentry::capture_message( - &format!( - "Unmarked LWJGL variant {}, #{} ({}) natives: {:?} Split: {}", - variant.sha1, - lwjgl_version_variant, - variant.group.release_time, - natives, - variant - .group - .has_split_natives - .map_or("unknown".to_string(), |b| b.to_string()), - ), - sentry::Level::Warning, - ); - - warn!( - "Unmarked LWJGL variant {}, #{} ({}) natives: {:?} Split: {}", - variant.sha1, - lwjgl_version_variant, - variant.group.release_time, - natives, - variant - .group - .has_split_natives - .map_or("unknown".to_string(), |b| b.to_string()), - ); - unknown_variants += 1; - } - - let patches = Arc::clone(&cloned_patches); - let semaphore = semaphore.clone(); - let uploader = uploader; - let s3_client = s3_client; - - async move { - - if decided_variant.is_some() - && accepted_variants == 1 - && unknown_variants == 0 - { - if let Some((lwjgl_path, lwjgl)) = process_single_lwjgl_variant(&decided_variant.expect("Unwrap to be safe inside is_some").group, &patches)? { - debug!("Uploading {}", lwjgl_path); - - let lwjgl_bytes = serde_json::to_vec(&lwjgl)?; - let lwjgl_hash = uploader.upload_cas( - lwjgl_bytes.clone(), - Some("application/json".to_string()), - s3_client, - semaphore.clone(), - ).await?; - - let loader = if lwjgl.version.starts_with("2") { - "minecraft-lwjgl2" - } else if lwjgl.version.starts_with("3") { - "minecraft-lwjgl3" - } else { - return Err(crate::infrastructure::error::invalid_input(format!("Unknown LWJGL version {}", lwjgl.version))); - }; - - manifest_builder.add_version( - loader, - lwjgl.version.clone(), - lwjgl_hash, - lwjgl_bytes.len() as u64, - lwjgl.release_time, - ); - - } else { - info!("Skipped LWJGL {}", &decided_variant.expect("Unwrap to be safe inside is_some").group.version); - } - } else { - #[cfg(feature = "sentry")] - sentry::capture_message( - &format!( - "No variant decided for version {} of out {} possible and {} unknown", - lwjgl_version_variant, accepted_variants, unknown_variants - ), - sentry::Level::Warning, - ); - error!("No variant decided for version {} of out {} possible and {} unknown", lwjgl_version_variant, accepted_variants, unknown_variants); - } - - Ok::<(), crate::infrastructure::error::Error>(()) - } - .await? - } - } - - let elapsed = now.elapsed(); - info!("Elapsed: {:.2?}", elapsed); - - // Get the final manifest with all processed versions - let final_manifest = Arc::try_unwrap(cloned_manifest) - .map_err(|err| { - crate::infrastructure::error::invalid_input( - format!("Failed to unwrap Arc>: {:?}", err) - ) - })? - .into_inner(); - - // Set the full Minecraft versions JSON in manifest_builder - // This preserves rich metadata (type, url, time, releaseTime, sha1, complianceLevel, etc.) - let versions_json = serde_json::to_value(&final_manifest.versions)?; - manifest_builder.set_loader_versions("minecraft", versions_json); - info!(version_count = final_manifest.versions.len(), "Set Minecraft versions with rich metadata in CAS manifest builder"); - - Ok(final_manifest) -} - -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -/// A version of the fabric loader -struct LibraryPatch { - #[serde(rename = "_comment")] - pub _comment: String, - #[serde(rename = "match")] - pub match_: Vec, - pub additional_libraries: Option>, - #[serde(rename = "override")] - pub override_: Option, - pub patch_additional_libraries: Option, -} - -/// Fetches the list of library patches -async fn get_library_patches() -> Result, crate::infrastructure::error::Error> { - let patches = include_bytes!("../patched-library-patches.json"); - let unprocessed_patches: Vec = - serde_json::from_slice(patches)?; - Ok(unprocessed_patches.iter().map(pre_process_patch).collect()) -} - -fn pre_process_patch(patch: &LibraryPatch) -> LibraryPatch { - fn patch_url(url: &mut String) { - *url = url.replace("${BASE_URL}", &dotenvy::var("BASE_URL").unwrap()); - } - - fn patch_downloads(downloads: &mut LibraryDownloads) { - if let Some(artifact) = downloads.artifact.as_mut() { - if let Some(url) = artifact.url.as_mut() { - patch_url(url); - } - } - if let Some(classifiers) = downloads.classifiers.as_mut() { - for (_, artifact) in classifiers.iter_mut() { - if let Some(url) = artifact.url.as_mut() { - patch_url(url); - } - } - } - } - - let mut patch_copy: LibraryPatch = patch.clone(); - if let Some(libraries) = patch_copy.additional_libraries.as_mut() { - for lib in libraries.iter_mut() { - if let Some(downloads) = lib.downloads.as_mut() { - patch_downloads(downloads); - } - } - } - if let Some(override_) = patch_copy.override_.as_mut() { - if let Some(url) = override_.url.as_mut() { - patch_url(url); - } - if let Some(downloads) = override_.downloads.as_mut() { - patch_downloads(downloads); - } - } - patch_copy -} - -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct LWJGLVariantMarker { - #[serde(rename = "match")] - pub match_: String, - #[serde(rename = "_comment")] - pub _comment: String, - pub reason: Option, -} - -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct LWJGLVariantConfig { - pub accept: Vec, - pub reject: Vec, -} - -/// Fetches -async fn get_lwjgl_config() -> Result { - let config = include_bytes!("../lwjgl-config.json"); - Ok(serde_json::from_slice(config)?) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_lenient_semver_comparison() { - // Test basic version comparisons - assert!(lenient_semver::parse("1.0.0") < lenient_semver::parse("2.0.0")); - assert!(lenient_semver::parse("2.0.0") > lenient_semver::parse("1.0.0")); - assert!(lenient_semver::parse("2.0.0") == lenient_semver::parse("2.0.0")); - - // Test beta/pre-release versions (critical for Log4j patching) - assert!(lenient_semver::parse("2.0-beta9") <= lenient_semver::parse("2.0")); - assert!(lenient_semver::parse("2.0-beta9") < lenient_semver::parse("2.1.0")); - assert!(lenient_semver::parse("2.0-rc2") <= lenient_semver::parse("2.0")); - - // Test Log4j security threshold (CVE-2021-44832 fixed in 2.17.1) - assert!(lenient_semver::parse("2.0") <= lenient_semver::parse("2.17.1")); - assert!(lenient_semver::parse("2.15.0") <= lenient_semver::parse("2.17.1")); - assert!(lenient_semver::parse("2.16.0") <= lenient_semver::parse("2.17.1")); - assert!(lenient_semver::parse("2.17.0") <= lenient_semver::parse("2.17.1")); - assert!(lenient_semver::parse("2.17.1") <= lenient_semver::parse("2.17.1")); - assert!(lenient_semver::parse("2.18.0") > lenient_semver::parse("2.17.1")); - - // Test actual Log4j versions that have been patched - assert!(lenient_semver::parse("2.0-beta9") <= lenient_semver::parse("2.0")); - assert!(lenient_semver::parse("2.12.1") <= lenient_semver::parse("2.17.1")); - assert!(lenient_semver::parse("2.14.1") <= lenient_semver::parse("2.17.1")); - } - - #[test] - fn test_log4j_artifact_mapping() { - // Test versions below 2.0 (should use beta9 patch) - let result = map_log4j_artifact("1.2.17").unwrap(); - assert!(result.is_some()); - let (version, _url) = result.unwrap(); - assert_eq!(version, "2.0-beta9-fixed"); - - let result = map_log4j_artifact("2.0-beta9").unwrap(); - assert!(result.is_some()); - let (version, _url) = result.unwrap(); - assert_eq!(version, "2.0-beta9-fixed"); - - // Test versions between 2.0 and 2.17.1 (should bump to 2.17.1) - let result = map_log4j_artifact("2.12.1").unwrap(); - assert!(result.is_some()); - let (version, url) = result.unwrap(); - assert_eq!(version, "2.17.1"); - assert_eq!(url, "https://repo1.maven.org/maven2/"); - - let result = map_log4j_artifact("2.15.0").unwrap(); - assert!(result.is_some()); - let (version, _url) = result.unwrap(); - assert_eq!(version, "2.17.1"); - - let result = map_log4j_artifact("2.17.0").unwrap(); - assert!(result.is_some()); - let (version, _url) = result.unwrap(); - assert_eq!(version, "2.17.1"); - - // Test versions at or above 2.17.1 (no patching needed) - let result = map_log4j_artifact("2.17.1").unwrap(); - assert!(result.is_none()); - - let result = map_log4j_artifact("2.18.0").unwrap(); - assert!(result.is_none()); - - let result = map_log4j_artifact("2.19.0").unwrap(); - assert!(result.is_none()); - } -} diff --git a/daedalus_client/src/minecraft/helpers.rs b/daedalus_client/src/minecraft/helpers.rs new file mode 100644 index 0000000..d5e2e66 --- /dev/null +++ b/daedalus_client/src/minecraft/helpers.rs @@ -0,0 +1,72 @@ +//! Helper functions for Minecraft version processing +//! +//! This module contains utility functions used during version processing +//! to check library and version properties. + +use daedalus::minecraft::{Library, Os, Rule, RuleAction, VersionInfo}; + +/// Check if a library uses split natives +/// +/// Split natives are identified by an identifier starting with "natives-" +/// (e.g., "natives-linux", "natives-windows", "natives-osx") +/// +/// # Arguments +/// - `lib`: The library to check +/// +/// # Returns +/// `true` if the library has a split natives identifier, `false` otherwise +pub fn lib_is_split_natives(lib: &Library) -> bool { + lib.name + .identifier + .as_ref() + .is_some_and(|data| data.starts_with("natives-")) +} + +/// Check if a Minecraft version has any libraries with split natives +/// +/// # Arguments +/// - `ver`: The version info to check +/// +/// # Returns +/// `true` if any library in the version has split natives, `false` otherwise +pub fn version_has_split_natives(ver: &VersionInfo) -> bool { + ver.libraries.iter().any(lib_is_split_natives) +} + +/// Check if a library's rules indicate it is macOS-only +/// +/// A library is considered macOS-only if: +/// - It has rules that allow macOS (Os::Osx) +/// - It does NOT have rules that allow all platforms +/// +/// # Arguments +/// - `rules`: The optional rules to check +/// +/// # Returns +/// `true` if the rules indicate macOS-only, `false` otherwise +pub fn is_macos_only(rules: &Option>) -> bool { + let mut allows_osx = false; + let mut allows_all = false; + if let Some(rules) = rules { + for rule in rules { + if rule.action == RuleAction::Allow + && rule.os.is_some() + && rule + .os + .clone() + .expect("Unwrap to be safe with boolean short circuit") + .name + .is_some_and(|os| os == Os::Osx) + { + allows_osx = true; + } + if rule.action == RuleAction::Allow && rule.os.is_none() { + allows_all = false; + } + } + + allows_osx && !allows_all + } else { + false + } +} diff --git a/daedalus_client/src/minecraft/library_patches.rs b/daedalus_client/src/minecraft/library_patches.rs new file mode 100644 index 0000000..8b32e74 --- /dev/null +++ b/daedalus_client/src/minecraft/library_patches.rs @@ -0,0 +1,107 @@ +//! Library patching system for Minecraft version processing +//! +//! This module handles loading and applying patches to library definitions, +//! allowing for overrides and additional libraries to be injected. + +use crate::minecraft::types::LibraryPatch; +use daedalus::minecraft::{merge_partial_library, Library, LibraryDownloads}; +use tracing::info; + +/// Apply library patches recursively +/// +/// Patches can: +/// - Override library properties +/// - Add additional libraries +/// - Recursively patch the additional libraries +pub fn patch_library(patches: &[LibraryPatch], mut library: Library) -> Vec { + let mut val = Vec::new(); + + let actual_patches = patches + .iter() + .filter(|x| x.match_.contains(&library.name.to_string())) + .collect::>(); + + if !actual_patches.is_empty() { + for patch in actual_patches { + info!( + "patching {} with library patch {}", + library.name, patch._comment + ); + + if let Some(override_) = &patch.override_ { + library = merge_partial_library(override_.clone(), library); + } + + if let Some(additional_libraries) = &patch.additional_libraries { + for additional_library in additional_libraries { + if patch.patch_additional_libraries.unwrap_or(false) { + // Recursive patching + let mut libs = patch_library(patches, additional_library.clone()); + val.append(&mut libs) + } else { + let mut new_lib = additional_library.clone(); + new_lib.patched = true; + val.push(new_lib); + } + } + } + } + + val.push(library); + } else { + val.push(library); + } + + val +} + +/// Fetch library patches from embedded JSON file +pub async fn get_library_patches( +) -> Result, crate::infrastructure::error::Error> { + let patches = include_bytes!("../../patched-library-patches.json"); + let unprocessed_patches: Vec = serde_json::from_slice(patches)?; + Ok(unprocessed_patches.iter().map(pre_process_patch).collect()) +} + +/// Pre-process a patch by replacing ${BASE_URL} placeholders +fn pre_process_patch(patch: &LibraryPatch) -> LibraryPatch { + fn patch_url(url: &mut String) { + *url = url.replace( + "${BASE_URL}", + &dotenvy::var("BASE_URL").expect("BASE_URL must be set"), + ); + } + + fn patch_downloads(downloads: &mut LibraryDownloads) { + if let Some(artifact) = downloads.artifact.as_mut() { + if let Some(url) = artifact.url.as_mut() { + patch_url(url); + } + } + if let Some(classifiers) = downloads.classifiers.as_mut() { + for (_, artifact) in classifiers.iter_mut() { + if let Some(url) = artifact.url.as_mut() { + patch_url(url); + } + } + } + } + + let mut patch_copy: LibraryPatch = patch.clone(); + if let Some(libraries) = patch_copy.additional_libraries.as_mut() { + for lib in libraries.iter_mut() { + if let Some(downloads) = lib.downloads.as_mut() { + patch_downloads(downloads); + } + } + } + if let Some(override_) = patch_copy.override_.as_mut() { + if let Some(url) = override_.url.as_mut() { + patch_url(url); + } + if let Some(downloads) = override_.downloads.as_mut() { + patch_downloads(downloads); + } + } + patch_copy +} diff --git a/daedalus_client/src/minecraft/log4j.rs b/daedalus_client/src/minecraft/log4j.rs new file mode 100644 index 0000000..9a5b27a --- /dev/null +++ b/daedalus_client/src/minecraft/log4j.rs @@ -0,0 +1,261 @@ +//! Log4j security patching for CVE-2021-44228, CVE-2021-44832, CVE-2021-45046 +//! +//! This module handles detection and replacement of vulnerable Log4j versions +//! with patched versions. This is SECURITY-CRITICAL code. + +use crate::format_url; +use daedalus::minecraft::{Library, LibraryDownload, LibraryDownloads}; +use daedalus::GradleSpecifier; +use tracing::debug; + +/// Determine if a Log4j version needs patching and return the replacement version and Maven URL +/// +/// Returns `Some((replacement_version, maven_url))` if patching is needed, `None` otherwise +/// +/// # Security +/// - CVE-2021-44228: Log4Shell vulnerability +/// - CVE-2021-44832: Remote code execution +/// - CVE-2021-45046: Information disclosure +/// +/// All versions < 2.17.1 are vulnerable and should be patched. +pub fn map_log4j_artifact( + version: &str, +) -> Result, crate::infrastructure::error::Error> { + debug!("log4j version: {}", version); + let x = lenient_semver::parse(version); + if x <= lenient_semver::parse("2.0") { + debug!("log4j use beta9 patch"); + return Ok(Some(("2.0-beta9-fixed".to_string(), format_url("maven/")))); + } + if x < lenient_semver::parse("2.17.1") { + debug!("bump log4j to 2.17.1"); + return Ok(Some(( + "2.17.1".to_string(), + "https://repo1.maven.org/maven2/".to_string(), + ))); + } + debug!("no log4j match!"); + Ok(None) +} + +/// Create a replacement Log4j library with patched version +/// +/// # Security +/// The SHA1 hashes and sizes are hardcoded for security verification. +/// These values are for the patched Log4j versions that fix CVEs. +/// +/// # Arguments +/// - `artifact_name`: The artifact name (e.g., "log4j-api", "log4j-core") +/// - `version_override`: The patched version to use ("2.0-beta9-fixed" or "2.17.1") +/// - `maven_override`: The Maven repository URL +/// - `include_in_classpath`: Whether to include in classpath (from original library) +/// +/// # Returns +/// A `Library` struct with the replacement Log4j artifact +pub fn create_log4j_replacement_library( + artifact_name: &str, + version_override: &str, + maven_override: &str, + include_in_classpath: bool, +) -> Result { + let replacement_name = GradleSpecifier { + package: "org.apache.logging.log4j".to_string(), + artifact: artifact_name.to_string(), + identifier: None, + version: version_override.to_string(), + extension: "jar".to_string(), + }; + + // Hardcoded SHA1 hashes and sizes for security verification + // DO NOT MODIFY unless you've verified the new hashes + let (sha1, size) = match version_override { + "2.0-beta9-fixed" => match artifact_name { + "log4j-api" => ("b61eaf2e64d8b0277e188262a8b771bbfa1502b3", 107347), + "log4j-core" => ("677991ea2d7426f76309a73739cecf609679492c", 677588), + _ => { + return Err(crate::infrastructure::error::invalid_input(format!( + "Unhandled log4j artifact {} for overridden version {}", + artifact_name, version_override + ))) + } + }, + "2.17.1" => match artifact_name { + "log4j-api" => ("d771af8e336e372fb5399c99edabe0919aeaf5b2", 301872), + "log4j-core" => ("779f60f3844dadc3ef597976fcb1e5127b1f343d", 1790452), + "log4j-slf4j18-impl" => ("ca499d751f4ddd8afb016ef698c30be0da1d09f7", 21268), + _ => { + return Err(crate::infrastructure::error::invalid_input(format!( + "Unhandled log4j artifact {} for overridden version {}", + artifact_name, version_override + ))) + } + }, + _ => { + return Err(crate::infrastructure::error::invalid_input(format!( + "Unhandled log4j version {}", + version_override + ))) + } + }; + + let artifact = LibraryDownload { + path: replacement_name.path(), + sha1: sha1.to_string(), + size, + url: Some(format!("{}{}", maven_override, replacement_name.path())), + }; + + Ok(Library { + name: replacement_name, + downloads: Some(LibraryDownloads { + artifact: Some(artifact), + classifiers: None, + }), + extract: None, + url: None, + natives: None, + rules: None, + checksums: None, + include_in_classpath, + version_hashes: None, + patched: true, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_lenient_semver_comparison() { + // Test basic version comparisons + assert!(lenient_semver::parse("1.0.0") < lenient_semver::parse("2.0.0")); + assert!(lenient_semver::parse("2.0.0") > lenient_semver::parse("1.0.0")); + assert!(lenient_semver::parse("2.0.0") == lenient_semver::parse("2.0.0")); + + // Test beta/pre-release versions (critical for Log4j patching) + assert!(lenient_semver::parse("2.0-beta9") <= lenient_semver::parse("2.0")); + assert!(lenient_semver::parse("2.0-beta9") < lenient_semver::parse("2.1.0")); + assert!(lenient_semver::parse("2.0-rc2") <= lenient_semver::parse("2.0")); + + // Test Log4j security threshold (CVE-2021-44832 fixed in 2.17.1) + assert!(lenient_semver::parse("2.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.15.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.16.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.17.0") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.17.1") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.18.0") > lenient_semver::parse("2.17.1")); + + // Test actual Log4j versions that have been patched + assert!(lenient_semver::parse("2.0-beta9") <= lenient_semver::parse("2.0")); + assert!(lenient_semver::parse("2.12.1") <= lenient_semver::parse("2.17.1")); + assert!(lenient_semver::parse("2.14.1") <= lenient_semver::parse("2.17.1")); + } + + #[test] + fn test_log4j_artifact_mapping() { + // Test versions below 2.0 (should use beta9 patch) + let result = map_log4j_artifact("1.2.17").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.0-beta9-fixed"); + + let result = map_log4j_artifact("2.0-beta9").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.0-beta9-fixed"); + + // Test versions between 2.0 and 2.17.1 (should bump to 2.17.1) + let result = map_log4j_artifact("2.12.1").unwrap(); + assert!(result.is_some()); + let (version, url) = result.unwrap(); + assert_eq!(version, "2.17.1"); + assert_eq!(url, "https://repo1.maven.org/maven2/"); + + let result = map_log4j_artifact("2.15.0").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.17.1"); + + let result = map_log4j_artifact("2.17.0").unwrap(); + assert!(result.is_some()); + let (version, _url) = result.unwrap(); + assert_eq!(version, "2.17.1"); + + // Test versions at or above 2.17.1 (no patching needed) + let result = map_log4j_artifact("2.17.1").unwrap(); + assert!(result.is_none()); + + let result = map_log4j_artifact("2.18.0").unwrap(); + assert!(result.is_none()); + + let result = map_log4j_artifact("2.19.0").unwrap(); + assert!(result.is_none()); + } + + #[test] + fn test_create_log4j_replacement_library() { + // Test creating a replacement for log4j-api with 2.0-beta9-fixed + let lib = create_log4j_replacement_library( + "log4j-api", + "2.0-beta9-fixed", + "https://test-maven.org/", + true, + ) + .unwrap(); + + assert_eq!(lib.name.package, "org.apache.logging.log4j"); + assert_eq!(lib.name.artifact, "log4j-api"); + assert_eq!(lib.name.version, "2.0-beta9-fixed"); + assert!(lib.patched); + assert_eq!(lib.include_in_classpath, true); + + let downloads = lib.downloads.unwrap(); + let artifact = downloads.artifact.unwrap(); + assert_eq!(artifact.sha1, "b61eaf2e64d8b0277e188262a8b771bbfa1502b3"); + assert_eq!(artifact.size, 107347); + + // Test creating a replacement for log4j-core with 2.17.1 + let lib2 = create_log4j_replacement_library( + "log4j-core", + "2.17.1", + "https://repo1.maven.org/maven2/", + false, + ) + .unwrap(); + + assert_eq!(lib2.name.version, "2.17.1"); + assert_eq!(lib2.include_in_classpath, false); + + let downloads2 = lib2.downloads.unwrap(); + let artifact2 = downloads2.artifact.unwrap(); + assert_eq!(artifact2.sha1, "779f60f3844dadc3ef597976fcb1e5127b1f343d"); + assert_eq!(artifact2.size, 1790452); + } + + #[test] + fn test_create_log4j_replacement_unknown_artifact() { + // Test that unknown artifacts return an error + let result = create_log4j_replacement_library( + "log4j-unknown", + "2.17.1", + "https://repo1.maven.org/maven2/", + true, + ); + + assert!(result.is_err()); + } + + #[test] + fn test_create_log4j_replacement_unknown_version() { + // Test that unknown versions return an error + let result = create_log4j_replacement_library( + "log4j-api", + "9.9.9", + "https://repo1.maven.org/maven2/", + true, + ); + + assert!(result.is_err()); + } +} diff --git a/daedalus_client/src/minecraft/lwjgl.rs b/daedalus_client/src/minecraft/lwjgl.rs new file mode 100644 index 0000000..206a31f --- /dev/null +++ b/daedalus_client/src/minecraft/lwjgl.rs @@ -0,0 +1,210 @@ +//! LWJGL variant processing and validation system +//! +//! This module handles: +//! - Processing LWJGL variant libraries +//! - Validating native classifiers for different operating systems +//! - Managing LWJGL version variants across Minecraft versions +//! - Filtering and identifying compatible LWJGL configurations + +use crate::minecraft::library_patches::patch_library; +use crate::minecraft::types::{LibraryPatch, LWJGLVariantConfig}; +use daedalus::minecraft::{Dependency, LWJGLEntry, LibraryGroup, Os}; +use std::collections::{BTreeMap, HashSet}; +use std::sync::Arc; +use tokio::sync::Mutex; +use tracing::{debug, info, warn}; + +/// Process a single LWJGL variant and validate its structure +/// +/// This function: +/// 1. Applies library patches to the variant +/// 2. Sets up LWJGL 2 vs LWJGL 3 configuration with conflicts +/// 3. Filters out unneeded libraries (jutils, jinput for LWJGL 3) +/// 4. Validates native classifiers for all platforms (Linux, Windows, macOS) +/// +/// # Arguments +/// - `variant`: The LWJGL library group to process +/// - `patches`: Library patches to apply +/// +/// # Returns +/// - `Ok(Some((path, library_group)))` if the variant is valid and should be uploaded +/// - `Ok(None)` if the variant is invalid (missing classifiers or downloads) +/// - `Err` if an unknown LWJGL version is encountered +pub fn process_single_lwjgl_variant( + variant: &LibraryGroup, + patches: &Vec, +) -> Result, crate::infrastructure::error::Error> { + let lwjgl_version = variant.version.clone(); + + info!("Processing LWJGL variant {}", lwjgl_version); + + let mut lwjgl = variant.clone(); + + let mut new_libraries = Vec::new(); + + for library in lwjgl.libraries.clone() { + let mut libs = patch_library(patches, library); + new_libraries.append(&mut libs); + } + lwjgl.libraries = new_libraries; + + let version_path = if lwjgl_version.starts_with("2") { + lwjgl.id = "LWJGL 2".to_string(); + lwjgl.uid = "org.lwjgl2".to_string(); + lwjgl.conflicts = Some(vec![Dependency { + name: "lwjgl".to_string(), + uid: "org.lwjgl3".to_string(), + rule: None, + }]); + + format!( + "minecraft/v{}/libraries/org.lwjgl2/{}.json", + daedalus::minecraft::CURRENT_FORMAT_VERSION, + lwjgl_version + ) + } else if lwjgl_version.starts_with('3') { + lwjgl.id = "LWJGL 3".to_string(); + lwjgl.uid = "org.lwjgl3".to_string(); + lwjgl.conflicts = Some(vec![Dependency { + name: "lwjgl".to_string(), + uid: "org.lwjgl2".to_string(), + rule: None, + }]); + + let unneeded: HashSet<&str> = vec!["jutils", "jinput"].into_iter().collect(); + let filtered_libs = lwjgl + .libraries + .into_iter() + .filter(|lib| !unneeded.contains(lib.name.artifact.as_str())) + .collect::>(); + lwjgl.libraries = filtered_libs; + + format!( + "minecraft/v{}/libraries/org.lwjgl3/{}.json", + daedalus::minecraft::CURRENT_FORMAT_VERSION, + lwjgl_version + ) + } else { + return Err(crate::infrastructure::error::invalid_input(format!( + "Unknown LWJGL version {}", + lwjgl_version + ))); + }; + + let mut good = true; + for lib in &lwjgl.libraries { + if lib.patched { + continue; + } + if let Some(natives) = &lib.natives { + let checked: HashSet<&Os> = vec![&Os::Linux, &Os::Windows, &Os::Osx] + .into_iter() + .collect(); + if !checked.is_subset(&natives.clone().keys().collect()) { + warn!( + "LWJGL variant library missing system classifier: {} {} {:?}", + lwjgl.version, + lib.name, + natives.keys() + ); + good = false; + break; + } + if lib.downloads.is_some() { + if let Some(classifiers) = &lib + .downloads + .clone() + .expect("Unwrap to be safe inside is_some") + .classifiers + { + for entry in checked { + let baked_entry = natives.get(entry); + if let Some(baked_entry) = baked_entry { + if !classifiers.contains_key(baked_entry) { + warn!( + "LWJGL variant library missing download for classifier: {} {} {:?} {:?}", + lwjgl.version, + lib.name, + baked_entry, + classifiers.keys().collect::>() + ); + good = false; + break; + } + } + } + } else { + warn!( + "LWJGL variant library missing downloads classifiers: {} {}", + lwjgl.version, lib.name + ); + good = false; + break; + } + } + } + } + if good { + Ok(Some((version_path, lwjgl))) + } else { + Ok(None) + } +} + +/// Add an LWJGL version to the variants collection +/// +/// This function tracks different LWJGL variants (with the same version but different +/// library configurations) and updates release times as newer variants are discovered. +/// +/// # Arguments +/// - `variants_mutex`: Shared map of LWJGL version to variant entries +/// - `lwjgl`: The library group to add as a variant +pub async fn add_lwjgl_version( + variants_mutex: Arc>>>, + lwjgl: &LibraryGroup, +) { + let mut lwjgl_copy = lwjgl.clone(); + lwjgl_copy.libraries.sort_by(|x, y| x.name.cmp(&y.name)); + + let entry = LWJGLEntry::from_group(lwjgl_copy); + let current_sha1 = entry.sha1.clone(); + let version = entry.group.version.clone(); + let mut found = false; + + let mut version_variants = variants_mutex.lock().await; + + let variants = version_variants + .entry(version.clone()) + .or_insert_with(Vec::new); + for variant in variants.iter_mut() { + if entry.sha1 == variant.sha1 { + found = true; + if entry.group.release_time > variant.group.release_time { + variant.group.release_time = entry.group.release_time; + } + break; + } + } + + if !found { + info!( + "!! New variant for LWJGL version {:?} : {}", + version, current_sha1 + ); + debug!("New LWLGL variant {:?}", &lwjgl); + variants.push(entry); + } +} + +/// Fetch LWJGL variant configuration from embedded JSON file +/// +/// The configuration contains lists of accepted and rejected LWJGL variant SHA1 hashes, +/// used to filter out known bad variants and only accept validated ones. +/// +/// # Returns +/// The LWJGL variant configuration with accept/reject lists +pub async fn get_lwjgl_config( +) -> Result { + let config = include_bytes!("../../lwjgl-config.json"); + Ok(serde_json::from_slice(config)?) +} diff --git a/daedalus_client/src/minecraft/mod.rs b/daedalus_client/src/minecraft/mod.rs new file mode 100644 index 0000000..9f2784c --- /dev/null +++ b/daedalus_client/src/minecraft/mod.rs @@ -0,0 +1,700 @@ +//! Minecraft version processing and metadata management +//! +//! This module handles the complete Minecraft version processing pipeline: +//! - Fetching and processing vanilla Minecraft versions +//! - LWJGL library variant detection and validation +//! - Log4j security patching (CVE-2021-44228, CVE-2021-44832, CVE-2021-45046) +//! - Library patching and dependency management +//! - Split natives handling +//! - Assets index processing and CAS upload +//! +//! # Module Structure +//! +//! - `types`: Type definitions for patches and LWJGL configuration +//! - `log4j`: Security patching for Log4j vulnerabilities +//! - `library_patches`: Library patching system with override and injection +//! - `helpers`: Utility functions for version and library processing +//! - `lwjgl`: LWJGL variant processing and validation +//! +//! # Main Entry Point +//! +//! The primary function is `retrieve_data()` which orchestrates the entire +//! Minecraft version processing pipeline. + +pub mod helpers; +pub mod library_patches; +pub mod log4j; +pub mod lwjgl; +pub mod types; + +// Re-export commonly used types +pub use types::{LibraryPatch, LWJGLVariantConfig, LWJGLVariantMarker}; + +use crate::download_file; +use crate::format_url; +use crate::services::upload::BatchUploader; +use dashmap::DashSet; +use daedalus::minecraft::{ + Dependency, DependencyRule, JavaVersion, LWJGLEntry, Library, LibraryDownload, + LibraryDownloads, LibraryGroup, MinecraftJavaProfile, Os, Rule, RuleAction, VersionInfo, + VersionManifest, VersionType, +}; +use daedalus::{get_hash, GradleSpecifier}; +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::convert::TryFrom; +use std::sync::Arc; +use std::time::Instant; +use tokio::sync::{Mutex, Semaphore}; +use tracing::{debug, error, info, warn}; + +/// Retrieve and process all Minecraft version data +/// +/// This is the main entry point for Minecraft version processing. It: +/// 1. Fetches the Minecraft version manifest +/// 2. Processes each version in parallel (with chunking) +/// 3. Handles LWJGL variant detection and validation +/// 4. Applies Log4j security patches +/// 5. Applies library patches +/// 6. Processes assets and uploads to CAS +/// 7. Builds the final manifest with all processed versions +/// +/// # Arguments +/// - `uploader`: Batch uploader for CAS uploads +/// - `manifest_builder`: CAS manifest builder for tracking versions +/// - `s3_client`: S3 bucket client for uploads +/// - `semaphore`: Concurrency control semaphore +/// - `is_first_run`: Whether this is the first run (skips old manifest loading) +/// +/// # Returns +/// The processed Minecraft version manifest with all versions and metadata +pub async fn retrieve_data( + uploader: &BatchUploader, + manifest_builder: &crate::services::cas::ManifestBuilder, + s3_client: &s3::Bucket, + semaphore: Arc, + is_first_run: bool, +) -> Result { + info!(is_first_run = is_first_run, "Retrieving Minecraft data"); + + // TODO: Old manifest doesn't take LWJGL meta into account + let old_manifest = if is_first_run { + None + } else { + daedalus::minecraft::fetch_version_manifest(Some(&format_url(&format!( + "minecraft/v{}/manifest.json", + daedalus::minecraft::CURRENT_FORMAT_VERSION + )))) + .await + .ok() + }; + + let mut manifest = daedalus::minecraft::fetch_version_manifest(None).await?; + + let cloned_manifest = Arc::new(Mutex::new(manifest.clone())); + + let patches = library_patches::get_library_patches().await?; + let cloned_patches = Arc::new(&patches); + + let lwjgl_config = lwjgl::get_lwjgl_config().await?; + + let visited_assets = Arc::new(DashSet::new()); + + let lwjgl_version_variants_mutex: Arc>>> = + Arc::new(Mutex::new(BTreeMap::new())); + let lwjgl_reject_reasons: HashMap> = lwjgl_config + .reject + .clone() + .into_iter() + .map(|mark| (mark.match_, mark.reason)) + .collect(); + let reject_lwjgl_variants: HashSet = lwjgl_config + .reject + .into_iter() + .map(|mark| mark.match_) + .collect(); + let accept_lwjgl_variants: HashSet = lwjgl_config + .accept + .into_iter() + .map(|mark| mark.match_) + .collect(); + + let now = Instant::now(); + + let mut version_futures = Vec::new(); + + for version in manifest.versions.iter_mut().rev() { + version_futures.push(async { + let old_version = if let Some(old_manifest) = &old_manifest { + old_manifest.versions.iter().find(|x| x.id == version.id) + } else { + None + }; + + if let Some(old_version) = old_version { + if old_version.sha1 == version.sha1 { + return Ok(()); + } + } + + let visited_assets = Arc::clone(&visited_assets); + let cloned_manifest_mutex = Arc::clone(&cloned_manifest); + let semaphore = Arc::clone(&semaphore); + let patches = Arc::clone(&cloned_patches); + + let lwjgl_version_variants_mutex = Arc::clone(&lwjgl_version_variants_mutex); + + let assets_hash = old_version.and_then(|x| x.assets_index_sha1.clone()); + + async move { + let mut version_info = daedalus::minecraft::fetch_version_info(version).await?; + + let has_split_natives = helpers::version_has_split_natives(&version_info); + let mut is_lwjgl_3 = false; + let mut lwjgl_buckets: HashMap>, LibraryGroup> = + HashMap::new(); + + let mut new_libraries = Vec::new(); + info!("Processing libraries for version {}", version_info.id); + for library in version_info.libraries.iter_mut() { + if helpers::lib_is_split_natives(library) { + if let Some(identifier) = &library.name.identifier { + info!( + "Splitting library {} into artifact {}", + library.name, identifier + ); + library.name.artifact = + format!("{}-{}", library.name.artifact, identifier); + library.name.identifier = None; + } + } + let spec = &mut library.name; + + if spec.is_lwjgl() { + let mut rules = None; + let set_version: Option = if has_split_natives { + // implies lwjgl3 + is_lwjgl_3 = true; + debug!( + "lwlgl library {} has split natives, version {}", + spec, spec.version + ); + + Some(spec.version.clone()) + } else { + debug!("lwlgl library {} is not split, package: {} artifact:{} version: {}", spec, spec.package, spec.artifact, spec.version); + rules = library.rules.clone(); + library.rules = None; + if helpers::is_macos_only(&rules) { + info!( + "Candidate library {} is only for macOS and is therefore ignored", + spec + ); + continue; + } + if spec.package == "org.lwjgl.lwjgl" && spec.artifact == "lwjgl" { + Some(spec.version.clone()) + } else if spec.package == "org.lwjgl" && spec.artifact == "lwjgl" { + is_lwjgl_3 = true; + Some(spec.version.clone()) + } else { + None + } + }; + debug!("lwjgl library {} is setting version {:?}", spec, set_version); + + let version_id = &version_info.id; + let version_release_time = version_info.release_time; + + info!( + "Setting lwjgl bucket {:?} for {} with release {}", + &rules, version_id, version_release_time + ); + let bucket = lwjgl_buckets.entry(rules.clone()).or_insert_with(|| { + LibraryGroup { + id: "LWJGL".to_string(), + version: "undetermined".to_string(), + uid: "org.lwjgl".to_string(), + release_time: version_release_time, + libraries: Vec::new(), + requires: None, + conflicts: None, + type_: VersionType::Release, + has_split_natives: Some(has_split_natives), + } + }); + bucket.has_split_natives = Some(has_split_natives); + + if let Some(version) = set_version { + debug!( + "Setting bucket version {} for {}", + version, version_info.id + ); + bucket.version = version; + } + bucket.libraries.push(library.clone()); + if version_info.release_time > bucket.release_time { + bucket.release_time = version_info.release_time; + } + } else if spec.is_log4j() { + if let Some((version_override, maven_override)) = + log4j::map_log4j_artifact(&spec.version)? + { + let replacement_library = log4j::create_log4j_replacement_library( + &spec.artifact, + &version_override, + &maven_override, + library.include_in_classpath, + )?; + new_libraries.push(replacement_library); + } else { + new_libraries.push(library.clone()) + } + } else { + let mut libs = library_patches::patch_library(&patches, library.clone()); + new_libraries.append(&mut libs) + } + } + + if lwjgl_buckets.len() == 1 { + for (key, lwjgl) in lwjgl_buckets.iter_mut() { + lwjgl.libraries.sort_by_key(|lib| lib.name.clone()); + lwjgl::add_lwjgl_version(lwjgl_version_variants_mutex.clone(), lwjgl) + .await; + info!("Found only candidate LWJGL {:?} {:?}", lwjgl.version, key); + } + } else { + let common_bucket = lwjgl_buckets.get(&None).cloned(); + for (key, lwjgl) in lwjgl_buckets.iter_mut() { + if key.is_none() { + continue; + } + if let Some(mut common_bucket) = common_bucket.clone() { + lwjgl.libraries.append(&mut common_bucket.libraries); + } + lwjgl.libraries.sort_by_key(|lib| lib.name.clone()); + lwjgl::add_lwjgl_version(lwjgl_version_variants_mutex.clone(), lwjgl) + .await; + info!("Found candidate LWJGL {:?} {:?}", lwjgl.version, key); + } + lwjgl_buckets.remove(&None); + } + + version_info.libraries = new_libraries; + + let suggested_lwjgl_version = if lwjgl_buckets.len() == 1 { + if is_lwjgl_3 { + Ok(lwjgl_buckets + .values() + .next() + .expect("Safe to unwrap because there is one item present") + .version + .clone()) + } else { + Ok("2.9.4-nightly-20150209".to_string()) + } + } else { + let bad_versions: HashSet<&str> = + vec!["3.1.6", "3.2.1"].into_iter().collect(); + let our_versions: HashSet<&str> = lwjgl_buckets + .values() + .map(|lwjgl| lwjgl.version.as_str()) + .collect(); + + if our_versions == bad_versions { + info!( + "Found broken 3.1.6/3.2.1 LWJGL combo in version {} , forcing LWJGL. 3.2.1", + &version_info.id + ); + Ok("3.2.1".to_string()) + } else { + Err(crate::infrastructure::error::invalid_input(format!( + "Can not determine a single suggested LWJGL version in version {} from among {:?}", + &version_info.id, our_versions + ))) + } + }?; + + let lwjgl_dependency = if is_lwjgl_3 { + Dependency { + name: "lwjgl".to_string(), + uid: "org.lwjgl3".to_string(), + rule: Some(DependencyRule::Suggests(suggested_lwjgl_version)), + } + } else { + Dependency { + name: "lwjgl".to_string(), + uid: "org.lwjgl2".to_string(), + rule: Some(DependencyRule::Suggests(suggested_lwjgl_version)), + } + }; + + if version_info.requires.is_none() { + version_info.requires = Some(Vec::new()); + } + version_info + .requires + .as_mut() + .expect("Safe to unwrap because we just ensured it's creation") + .push(lwjgl_dependency); + + // Patch java version + version_info.java_version = { + if let Some(java_version) = &version_info.java_version { + match MinecraftJavaProfile::try_from(&*java_version.component) { + Ok(java_version) => Some(JavaVersion { + component: java_version.as_str().expect("MinecraftJavaProfile::try_from is not handling unknown variant as error").to_string(), + major_version: 0, + }), + Err(err) => { + #[cfg(feature = "sentry")] + sentry::capture_message( + &format!( + "Unknown java version \"{}\": {}", + java_version.component, err + ), + sentry::Level::Warning, + ); + println!( + "Unknown java version \"{}\": {}", + java_version.component, err + ); + None + } + } + } else { + Some(JavaVersion { + component: MinecraftJavaProfile::JreLegacy + .as_str() + .unwrap() + .to_string(), + major_version: 0, + }) + } + }; + + let version_info_hash = + get_hash(bytes::Bytes::from(serde_json::to_vec(&version_info)?)).await?; + + let version_path = format!( + "minecraft/v{}/versions/{}.json", + daedalus::minecraft::CURRENT_FORMAT_VERSION, + version.id + ); + let assets_path = format!( + "minecraft/v{}/assets/{}.json", + daedalus::minecraft::CURRENT_FORMAT_VERSION, + version_info.asset_index.id + ); + let assets_index_url = version_info.asset_index.url.clone(); + + { + let mut cloned_manifest = cloned_manifest_mutex.lock().await; + + if let Some(position) = cloned_manifest + .versions + .iter() + .position(|x| version.id == x.id) + { + cloned_manifest.versions[position].url = format_url(&version_path); + cloned_manifest.versions[position].assets_index_sha1 = + Some(version_info.asset_index.sha1.clone()); + cloned_manifest.versions[position].assets_index_url = + Some(format_url(&assets_path)); + cloned_manifest.versions[position].java_profile = + version_info.java_version.as_ref().map(|x| { + MinecraftJavaProfile::try_from(&*x.component).expect( + "Safe to unwrap since we ensure it's valid in version_json already", + ) + }); + cloned_manifest.versions[position].sha1 = version_info_hash; + } else { + cloned_manifest.versions.insert( + 0, + daedalus::minecraft::Version { + id: version_info.id.clone(), + type_: version_info.type_.clone(), + url: format_url(&version_path), + time: version_info.time, + release_time: version_info.release_time, + sha1: version_info_hash, + java_profile: version_info.java_version.as_ref().map(|x| { + MinecraftJavaProfile::try_from(&*x.component).expect( + "Safe to unwrap since we ensure it's valid in version_json already", + ) + }), + compliance_level: 1, + assets_index_url: Some(format_url(&assets_path)), + assets_index_sha1: Some(version_info.asset_index.sha1.clone()), + }, + ) + } + } + + let mut download_assets = false; + + if visited_assets.insert(version_info.asset_index.id.clone()) { + if let Some(assets_hash) = assets_hash { + if version_info.asset_index.sha1 != assets_hash { + download_assets = true; + } + } else { + download_assets = true; + } + } + + if download_assets { + let assets_index = download_file( + &assets_index_url, + Some(&version_info.asset_index.sha1), + semaphore.clone(), + ) + .await?; + + let asset_bytes = assets_index.to_vec(); + let asset_hash = uploader + .upload_cas( + asset_bytes.clone(), + Some("application/json".to_string()), + s3_client, + semaphore.clone(), + ) + .await?; + + let base_url = dotenvy::var("BASE_URL").unwrap(); + version_info.asset_index.url = format!( + "{}/v{}/objects/{}/{}", + base_url, + crate::services::cas::CAS_VERSION, + &asset_hash[..2], + &asset_hash[2..] + ); + } + + let version_bytes = serde_json::to_vec(&version_info)?; + let _version_hash = uploader + .upload_cas( + version_bytes.clone(), + Some("application/json".to_string()), + s3_client, + semaphore.clone(), + ) + .await?; + + // NOTE: We don't call manifest_builder.add_version() for minecraft here. + // Instead, we use set_loader_versions() with the full VersionManifest at the end + // to preserve rich metadata (type, url, time, releaseTime, sha1, etc.) + + Ok::<(), crate::infrastructure::error::Error>(()) + } + .await?; + + Ok::<(), crate::infrastructure::error::Error>(()) + }) + } + + { + let mut versions = version_futures.into_iter().peekable(); + let mut chunk_index = 0; + let mut successful = 0; + let mut failed = 0; + + while versions.peek().is_some() { + let now = Instant::now(); + + let chunk: Vec<_> = versions.by_ref().take(100).collect(); + + for future in chunk { + match future.await { + Ok(_) => { + successful += 1; + } + Err(e) => { + warn!("⚠️ Minecraft - Failed to process version: {}", e); + failed += 1; + } + } + } + + chunk_index += 1; + + let elapsed = now.elapsed(); + info!( + "Chunk {} Elapsed: {:.2?} (✓ {} ✗ {})", + chunk_index, elapsed, successful, failed + ); + } + + info!( + "📊 Minecraft - Processing complete: {} successful, {} failed", + successful, failed + ); + } + + { + let lwjgl_version_variants = lwjgl_version_variants_mutex.lock().await; + + info!("Processing LWJGL variants"); + for (lwjgl_version_variant, lwjgl_variant_entries) in lwjgl_version_variants.iter() { + info!( + "{} variant(s) for LWJGL {}", + lwjgl_variant_entries.len(), + lwjgl_version_variant + ); + + let mut decided_variant = None; + let mut accepted_variants = 0; + let mut unknown_variants = 0; + + for variant in lwjgl_variant_entries { + if reject_lwjgl_variants.contains(&variant.sha1) { + let reason = lwjgl_reject_reasons + .get(&variant.sha1) + .expect("Unwrap to be safe because sha was present in config") + .clone() + .unwrap_or("unspecified".to_string()); + info!("LWJGL Variant {} for version {} ignored because it was marked as bad. Reason: {}", variant.sha1, lwjgl_version_variant, &reason); + continue; + } + if accept_lwjgl_variants.contains(&variant.sha1) { + info!( + "LWJGL Variant {} for version {} accepted", + variant.sha1, lwjgl_version_variant + ); + decided_variant = Some(variant); + accepted_variants += 1; + continue; + } + + let natives = variant + .group + .libraries + .iter() + .filter_map(|lib| { + lib.natives + .as_ref() + .map(|natives| natives.keys().cloned().collect::>()) + }) + .collect::>(); + + #[cfg(feature = "sentry")] + sentry::capture_message( + &format!( + "Unmarked LWJGL variant {}, #{} ({}) natives: {:?} Split: {}", + variant.sha1, + lwjgl_version_variant, + variant.group.release_time, + natives, + variant + .group + .has_split_natives + .map_or("unknown".to_string(), |b| b.to_string()), + ), + sentry::Level::Warning, + ); + + warn!( + "Unmarked LWJGL variant {}, #{} ({}) natives: {:?} Split: {}", + variant.sha1, + lwjgl_version_variant, + variant.group.release_time, + natives, + variant + .group + .has_split_natives + .map_or("unknown".to_string(), |b| b.to_string()), + ); + unknown_variants += 1; + } + + let patches = Arc::clone(&cloned_patches); + let semaphore = semaphore.clone(); + + async move { + if decided_variant.is_some() && accepted_variants == 1 && unknown_variants == 0 { + if let Some((lwjgl_path, lwjgl)) = lwjgl::process_single_lwjgl_variant( + &decided_variant + .expect("Unwrap to be safe inside is_some") + .group, + &patches, + )? { + debug!("Uploading {}", lwjgl_path); + + let lwjgl_bytes = serde_json::to_vec(&lwjgl)?; + let lwjgl_hash = uploader + .upload_cas( + lwjgl_bytes.clone(), + Some("application/json".to_string()), + s3_client, + semaphore.clone(), + ) + .await?; + + let loader = if lwjgl.version.starts_with("2") { + "minecraft-lwjgl2" + } else if lwjgl.version.starts_with("3") { + "minecraft-lwjgl3" + } else { + return Err(crate::infrastructure::error::invalid_input(format!( + "Unknown LWJGL version {}", + lwjgl.version + ))); + }; + + manifest_builder.add_version( + loader, + lwjgl.version.clone(), + lwjgl_hash, + lwjgl_bytes.len() as u64, + lwjgl.release_time, + ); + } else { + info!( + "Skipped LWJGL {}", + &decided_variant + .expect("Unwrap to be safe inside is_some") + .group + .version + ); + } + } else { + #[cfg(feature = "sentry")] + sentry::capture_message( + &format!( + "No variant decided for version {} of out {} possible and {} unknown", + lwjgl_version_variant, accepted_variants, unknown_variants + ), + sentry::Level::Warning, + ); + error!("No variant decided for version {} of out {} possible and {} unknown", lwjgl_version_variant, accepted_variants, unknown_variants); + } + + Ok::<(), crate::infrastructure::error::Error>(()) + } + .await? + } + } + + let elapsed = now.elapsed(); + info!("Elapsed: {:.2?}", elapsed); + + // Get the final manifest with all processed versions + let final_manifest = Arc::try_unwrap(cloned_manifest) + .map_err(|err| { + crate::infrastructure::error::invalid_input(format!( + "Failed to unwrap Arc>: {:?}", + err + )) + })? + .into_inner(); + + // Set the full Minecraft versions JSON in manifest_builder + // This preserves rich metadata (type, url, time, releaseTime, sha1, complianceLevel, etc.) + let versions_json = serde_json::to_value(&final_manifest.versions)?; + manifest_builder.set_loader_versions("minecraft", versions_json); + info!( + version_count = final_manifest.versions.len(), + "Set Minecraft versions with rich metadata in CAS manifest builder" + ); + + Ok(final_manifest) +} diff --git a/daedalus_client/src/minecraft/types.rs b/daedalus_client/src/minecraft/types.rs new file mode 100644 index 0000000..e1e0798 --- /dev/null +++ b/daedalus_client/src/minecraft/types.rs @@ -0,0 +1,37 @@ +//! Type definitions for Minecraft version processing + +use daedalus::minecraft::{Library, PartialLibrary}; +use serde::Deserialize; + +/// A library patch configuration for modifying or replacing libraries +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct LibraryPatch { + #[serde(rename = "_comment")] + pub _comment: String, + #[serde(rename = "match")] + pub match_: Vec, + pub additional_libraries: Option>, + #[serde(rename = "override")] + pub override_: Option, + pub patch_additional_libraries: Option, +} + +/// Marker for LWJGL variant acceptance/rejection configuration +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct LWJGLVariantMarker { + #[serde(rename = "match")] + pub match_: String, + #[serde(rename = "_comment")] + pub _comment: String, + pub reason: Option, +} + +/// Configuration for LWJGL variant filtering +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct LWJGLVariantConfig { + pub accept: Vec, + pub reject: Vec, +} diff --git a/daedalus_client/src/neoforge.rs b/daedalus_client/src/neoforge/mod.rs similarity index 81% rename from daedalus_client/src/neoforge.rs rename to daedalus_client/src/neoforge/mod.rs index 4f4d33c..c199fd6 100644 --- a/daedalus_client/src/neoforge.rs +++ b/daedalus_client/src/neoforge/mod.rs @@ -1,5 +1,13 @@ +//! NeoForge loader processing +//! +//! This module handles NeoForge version retrieval and processing, +//! using common utilities shared with other loaders. + +pub mod types; + use crate::{download_file, format_url}; use crate::services::upload::BatchUploader; +use crate::common::{change_detection::detect_version_change, manifest_merge::{merge_loader_versions, sort_by_minecraft_order, sort_loaders_by_metadata}}; use dashmap::DashSet; use daedalus::minecraft::{Library, VersionManifest}; use daedalus::modded::{ @@ -16,6 +24,9 @@ use std::sync::{Arc, LazyLock}; use std::time::Instant; use tokio::sync::{Mutex, Semaphore}; +// Re-export types +pub use types::NeoForgeInstallerProfile; + /// Skip list for known broken NeoForge/Forge versions /// These versions have permanent issues (missing files, corrupted archives, etc.) static NEOFORGE_SKIP_LIST: LazyLock> = LazyLock::new(|| { @@ -27,17 +38,6 @@ static NEOFORGE_SKIP_LIST: LazyLock> = LazyLock::new(|| { .collect() }); -fn extract_hash_from_cas_url(url: &str) -> Option { - let parts: Vec<&str> = url.rsplitn(3, '/').collect(); - if parts.len() >= 2 { - let hash_suffix = parts[0]; - let hash_prefix = parts[1]; - Some(format!("{}{}", hash_prefix, hash_suffix)) - } else { - None - } -} - pub async fn retrieve_data( minecraft_versions: &VersionManifest, uploader: &BatchUploader, @@ -87,8 +87,6 @@ pub async fn retrieve_data( let visited_assets = Arc::clone(&visited_assets); let semaphore = Arc::clone(&semaphore); let minecraft_version = minecraft_version.clone(); - let uploader = uploader; - let s3_client = s3_client; async move { // Check skip list first @@ -112,7 +110,7 @@ pub async fn retrieve_data( let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - Ok::(serde_json::from_str::(&contents)?) + Ok::(serde_json::from_str::(&contents)?) }).await??; let mut archive_clone = archive.clone(); @@ -221,8 +219,6 @@ pub async fn retrieve_data( let semaphore = semaphore.clone(); let visited_assets = visited_assets.clone(); let local_libs = local_libs.clone(); - let uploader = uploader; - let s3_client = s3_client; async move { let artifact_path = &lib.name.path(); @@ -288,15 +284,8 @@ pub async fn retrieve_data( semaphore.clone(), ).await?; - // Store full CAS URL - let base_url = dotenvy::var("BASE_URL").unwrap(); - let cas_url = format!( - "{}/v{}/objects/{}/{}", - base_url, - crate::services::cas::CAS_VERSION, - &hash[..2], - &hash[2..] - ); + // Use common CAS URL building + let cas_url = crate::common::cas::build_cas_url(&hash); // Update library URL with CAS URL if let Some(ref mut downloads) = lib.downloads { @@ -340,22 +329,14 @@ pub async fn retrieve_data( .cloned() }; - let should_upload = if let Some(old_version) = &old_loader_version { - if let Some(old_hash) = extract_hash_from_cas_url(&old_version.url) { - if old_hash == new_hash { - info!("✓ NeoForge {} unchanged (hash: {})", loader_version_full, &new_hash[..8]); - false - } else { - info!("↻ NeoForge {} changed (old: {}, new: {})", loader_version_full, &old_hash[..8], &new_hash[..8]); - true - } - } else { - true - } - } else { - info!("+ NeoForge {} is new", loader_version_full); - true - }; + // Use common change detection logic + let change_result = detect_version_change( + "NeoForge", + &loader_version_full, + old_loader_version.as_ref().map(|v| v.url.as_str()), + &new_hash, + ); + let should_upload = change_result.should_upload; let version_hash = if should_upload { uploader.upload_cas( @@ -368,14 +349,8 @@ pub async fn retrieve_data( new_hash.clone() }; - let base_url = dotenvy::var("BASE_URL").unwrap(); - let cas_url = format!( - "{}/v{}/objects/{}/{}", - base_url, - crate::services::cas::CAS_VERSION, - &version_hash[..2], - &version_hash[2..] - ); + // Use common CAS URL building + let cas_url = crate::common::cas::build_cas_url(&version_hash); return Ok(Some(LoaderVersion { id: loader_version_full, @@ -490,64 +465,21 @@ pub async fn retrieve_data( Vec::new() }; - // Merge new versions with old ones: keep old versions + add/update new ones - let mut final_versions = old_manifest_versions; - - for new_version in new_versions { - // Find if this Minecraft version already exists - if let Some(existing) = final_versions.iter_mut().find(|v| v.id == new_version.id) { - // Merge loaders: keep old loaders + add/update new ones - for new_loader in new_version.loaders { - if let Some(existing_loader) = existing.loaders.iter_mut().find(|l| l.id == new_loader.id) { - // Update existing loader - let loader_id = new_loader.id.clone(); - *existing_loader = new_loader; - info!("✅ NeoForge - Updated loader: {}/{}", existing.id, loader_id); - } else { - // Add new loader - info!("✅ NeoForge - Added new loader: {}/{}", existing.id, new_loader.id); - existing.loaders.push(new_loader); - } - } - } else { - // Add new Minecraft version - info!("✅ NeoForge - Added new Minecraft version: {}", new_version.id); - final_versions.push(new_version); - } - } + // Use common version merging logic + let mut final_versions = merge_loader_versions( + old_manifest_versions, + new_versions, + "NeoForge" + ); + + // Use common sorting utilities + sort_by_minecraft_order(&mut final_versions, minecraft_versions); - // Sort by Minecraft version order - final_versions.sort_by(|x, y| { - minecraft_versions - .versions - .iter() - .position(|z| x.id == z.id) - .unwrap_or_default() - .cmp( - &minecraft_versions - .versions - .iter() - .position(|z| y.id == z.id) - .unwrap_or_default(), - ) - }); - - // Sort loaders within each version + // Sort loaders within each version using metadata order for version in &mut final_versions { - let loader_versions = maven_metadata.get(&version.id); - if let Some(loader_versions) = loader_versions { - version.loaders.sort_by(|x, y| { - loader_versions - .iter() - .position(|z| y.id == z.0) - .unwrap_or_default() - .cmp( - &loader_versions - .iter() - .position(|z| x.id == z.0) - .unwrap_or_default(), - ) - }); + if let Some(loader_versions) = maven_metadata.get(&version.id) { + let loader_order: Vec = loader_versions.iter().map(|(id, _)| id.clone()).collect(); + sort_loaders_by_metadata(version, &loader_order); } } @@ -604,8 +536,8 @@ pub async fn fetch_maven_metadata( let mut map: HashMap> = HashMap::new(); for value in forge_values.versioning.versions.version { - let is_snapshot = value.contains('w') || - value.contains("-pre") || + let is_snapshot = value.contains('w') || + value.contains("-pre") || value.contains("-rc"); if is_snapshot { @@ -623,8 +555,8 @@ pub async fn fetch_maven_metadata( } for value in neo_values.versioning.versions.version { - let is_snapshot = value.contains('w') || - value.contains("-pre") || + let is_snapshot = value.contains('w') || + value.contains("-pre") || value.contains("-rc"); if is_snapshot { @@ -653,16 +585,3 @@ pub async fn fetch_maven_metadata( Ok(map) } - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -struct ForgeInstallerProfileV2 { - pub profile: String, - pub version: String, - pub json: String, - pub path: Option, - pub minecraft: String, - pub data: HashMap, - pub libraries: Vec, - pub processors: Vec, -} diff --git a/daedalus_client/src/neoforge/types.rs b/daedalus_client/src/neoforge/types.rs new file mode 100644 index 0000000..91a6928 --- /dev/null +++ b/daedalus_client/src/neoforge/types.rs @@ -0,0 +1,5 @@ +//! Type definitions for NeoForge loader processing + +// NeoForge uses the same installer profile format as Forge V2+ +// Re-export from forge module to avoid duplication +pub use crate::forge::types::ForgeInstallerProfileV2 as NeoForgeInstallerProfile; diff --git a/daedalus_client/src/services/cas.rs b/daedalus_client/src/services/cas.rs index a193ff7..4b303c3 100644 --- a/daedalus_client/src/services/cas.rs +++ b/daedalus_client/src/services/cas.rs @@ -214,7 +214,7 @@ impl ManifestBuilder { /// * `hash` - SHA256 hash of the version's content /// * `size` - Size of the content in bytes /// * `release_time` - When this version was originally released - #[instrument(skip(self))] + #[instrument(skip(self), level = "debug")] pub fn add_version(&self, loader: &str, version_id: String, hash: String, size: u64, release_time: DateTime) { // Get or create the version map for this loader let loader_map = self @@ -224,8 +224,6 @@ impl ManifestBuilder { // Add the version entry loader_map.insert(version_id, (hash, size, release_time)); - - info!(loader = %loader, "Added version to manifest builder"); } /// Set custom versions JSON for a loader (complex mode) @@ -258,10 +256,9 @@ impl ManifestBuilder { /// ]); /// builder.set_loader_versions("minecraft", minecraft_versions); /// ``` - #[instrument(skip(self, versions))] + #[instrument(skip(self, versions), level = "debug")] pub fn set_loader_versions(&self, loader: &str, versions: serde_json::Value) { self.custom_versions.insert(loader.to_string(), versions); - info!(loader = %loader, "Set custom versions JSON for loader"); } /// Build a loader manifest from the tracked versions @@ -352,58 +349,6 @@ impl ManifestBuilder { pub fn loader_count(&self) -> usize { self.versions.len() } - - /// Load old manifest data for comparison - /// - /// This populates the builder with version hashes from an existing manifest, - /// allowing us to detect which versions have changed. - /// - /// # Arguments - /// - /// * `manifest` - The old loader manifest to load - #[allow(dead_code)] - pub fn load_old_manifest(&self, manifest: &LoaderManifest) { - let loader_map = self - .versions - .entry(manifest.loader.clone()) - .or_default(); - - // Try to deserialize versions as Vec - // This works for simple loaders (forge, neoforge) that use the standard schema - // For complex loaders (minecraft), this method won't be used - if let Ok(entries) = serde_json::from_value::>(manifest.versions.clone()) { - for entry in entries { - loader_map.insert(entry.id.clone(), (entry.hash.clone(), entry.size, entry.release_time)); - } - } - } - - /// Check if a version's content hash has changed compared to the old manifest - /// - /// Returns true if: - /// - The version doesn't exist in old data (new version) - /// - The version exists but hash is different (content changed) - /// - /// Returns false if: - /// - The version exists with the same hash (no changes) - /// - /// # Arguments - /// - /// * `loader` - Loader name (e.g., "minecraft", "forge") - /// * `version_id` - Version identifier - /// * `new_hash` - New content hash to compare - #[allow(dead_code)] - pub fn has_version_changed(&self, loader: &str, version_id: &str, new_hash: &str) -> bool { - if let Some(loader_map) = self.versions.get(loader) { - if let Some(entry) = loader_map.get(version_id) { - // Version exists, check if hash changed - let (old_hash, _, _) = entry.value(); - return old_hash.as_str() != new_hash; - } - } - // Version doesn't exist, so it's new - true - } } impl Default for ManifestBuilder { From 7a0755d10e96a04b16c77b0bc51b21f1d82fab7d Mon Sep 17 00:00:00 2001 From: Davide Date: Sat, 18 Oct 2025 15:51:09 +0200 Subject: [PATCH 7/7] Bumps CAS version to 5.0.0 Increments the CAS version to 5.0.0. Optimizes processing of Fabric/Quilt mods by only downloading intermediary libraries per game version. --- daedalus/Cargo.toml | 2 +- daedalus_client/src/services/cas.rs | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/daedalus/Cargo.toml b/daedalus/Cargo.toml index a8ceb17..d779d63 100644 --- a/daedalus/Cargo.toml +++ b/daedalus/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "daedalus" -version = "0.1.21" +version = "5.0.0" authors = ["Jai A "] edition = "2024" license = "MIT" diff --git a/daedalus_client/src/services/cas.rs b/daedalus_client/src/services/cas.rs index 4b303c3..7fe9056 100644 --- a/daedalus_client/src/services/cas.rs +++ b/daedalus_client/src/services/cas.rs @@ -8,7 +8,11 @@ use tracing::{info, instrument}; /// /// This is the single version entrypoint for all metadata (minecraft, forge, fabric, quilt, neoforge). /// Old versions had individual versioning per loader, but v3+ uses a unified version. -pub const CAS_VERSION: u32 = 4; +/// +/// ## Version History +/// - v4: Previous version +/// - v5: Optimized Fabric/Quilt processing - only intermediary libraries are downloaded per game version +pub const CAS_VERSION: u32 = 5; /// Content-Addressable Storage (CAS) system ///