From c19a1077e85334a3e5ba885a60b03d76409a2b2e Mon Sep 17 00:00:00 2001 From: bigfoot547 Date: Sat, 1 Feb 2025 23:06:37 -0600 Subject: restructure project --- Cargo.lock | 62 +-- Cargo.toml | 33 +- ozone-cli/Cargo.toml | 2 +- ozone-cli/src/main.rs | 6 +- ozone-helpers/Cargo.toml | 9 + ozone-helpers/src/lib.rs | 8 + ozone/Cargo.toml | 30 ++ ozone/src/assets.rs | 86 ++++ ozone/src/auth.rs | 336 +++++++++++++++ ozone/src/auth/mcservices.rs | 92 +++++ ozone/src/auth/msa.rs | 170 ++++++++ ozone/src/auth/types.rs | 130 ++++++ ozone/src/auth/types/property_map.rs | 61 +++ ozone/src/launcher.rs | 765 +++++++++++++++++++++++++++++++++++ ozone/src/launcher/assets.rs | 322 +++++++++++++++ ozone/src/launcher/constants.rs | 18 + ozone/src/launcher/download.rs | 267 ++++++++++++ ozone/src/launcher/extract.rs | 136 +++++++ ozone/src/launcher/jre.rs | 330 +++++++++++++++ ozone/src/launcher/jre/arch.rs | 45 +++ ozone/src/launcher/jre/download.rs | 195 +++++++++ ozone/src/launcher/jre/manifest.rs | 65 +++ ozone/src/launcher/rules.rs | 114 ++++++ ozone/src/launcher/runner.rs | 222 ++++++++++ ozone/src/launcher/settings.rs | 232 +++++++++++ ozone/src/launcher/strsub.rs | 192 +++++++++ ozone/src/launcher/version.rs | 398 ++++++++++++++++++ ozone/src/lib.rs | 6 + ozone/src/util.rs | 334 +++++++++++++++ ozone/src/util/progress.rs | 3 + ozone/src/version.rs | 489 ++++++++++++++++++++++ ozone/src/version/manifest.rs | 91 +++++ src/assets.rs | 86 ---- src/auth.rs | 336 --------------- src/auth/mcservices.rs | 92 ----- src/auth/msa.rs | 170 -------- src/auth/types.rs | 130 ------ src/auth/types/property_map.rs | 61 --- src/launcher.rs | 765 ----------------------------------- src/launcher/assets.rs | 322 --------------- src/launcher/constants.rs | 18 - src/launcher/download.rs | 267 ------------ src/launcher/extract.rs | 136 ------- src/launcher/jre.rs | 330 --------------- src/launcher/jre/arch.rs | 45 --- src/launcher/jre/download.rs | 195 --------- src/launcher/jre/manifest.rs | 65 --- src/launcher/rules.rs | 114 ------ src/launcher/runner.rs | 222 ---------- src/launcher/settings.rs | 232 ----------- src/launcher/strsub.rs | 192 --------- src/launcher/version.rs | 398 ------------------ src/lib.rs | 5 - src/util.rs | 334 --------------- src/util/progress.rs | 3 - src/version.rs | 489 ---------------------- src/version/manifest.rs | 91 ----- 57 files changed, 5184 insertions(+), 5163 deletions(-) create mode 100644 ozone-helpers/Cargo.toml create mode 100644 ozone-helpers/src/lib.rs create mode 100644 ozone/Cargo.toml create mode 100644 ozone/src/assets.rs create mode 100644 ozone/src/auth.rs create mode 100644 ozone/src/auth/mcservices.rs create mode 100644 ozone/src/auth/msa.rs create mode 100644 ozone/src/auth/types.rs create mode 100644 ozone/src/auth/types/property_map.rs create mode 100644 ozone/src/launcher.rs create mode 100644 ozone/src/launcher/assets.rs create mode 100644 ozone/src/launcher/constants.rs create mode 100644 ozone/src/launcher/download.rs create mode 100644 ozone/src/launcher/extract.rs create mode 100644 ozone/src/launcher/jre.rs create mode 100644 ozone/src/launcher/jre/arch.rs create mode 100644 ozone/src/launcher/jre/download.rs create mode 100644 ozone/src/launcher/jre/manifest.rs create mode 100644 ozone/src/launcher/rules.rs create mode 100644 ozone/src/launcher/runner.rs create mode 100644 ozone/src/launcher/settings.rs create mode 100644 ozone/src/launcher/strsub.rs create mode 100644 ozone/src/launcher/version.rs create mode 100644 ozone/src/lib.rs create mode 100644 ozone/src/util.rs create mode 100644 ozone/src/util/progress.rs create mode 100644 ozone/src/version.rs create mode 100644 ozone/src/version/manifest.rs delete mode 100644 src/assets.rs delete mode 100644 src/auth.rs delete mode 100644 src/auth/mcservices.rs delete mode 100644 src/auth/msa.rs delete mode 100644 src/auth/types.rs delete mode 100644 src/auth/types/property_map.rs delete mode 100644 src/launcher.rs delete mode 100644 src/launcher/assets.rs delete mode 100644 src/launcher/constants.rs delete mode 100644 src/launcher/download.rs delete mode 100644 src/launcher/extract.rs delete mode 100644 src/launcher/jre.rs delete mode 100644 src/launcher/jre/arch.rs delete mode 100644 src/launcher/jre/download.rs delete mode 100644 src/launcher/jre/manifest.rs delete mode 100644 src/launcher/rules.rs delete mode 100644 src/launcher/runner.rs delete mode 100644 src/launcher/settings.rs delete mode 100644 src/launcher/strsub.rs delete mode 100644 src/launcher/version.rs delete mode 100644 src/lib.rs delete mode 100644 src/util.rs delete mode 100644 src/util/progress.rs delete mode 100644 src/version.rs delete mode 100644 src/version/manifest.rs diff --git a/Cargo.lock b/Cargo.lock index 3284e76..fd875c5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2975,34 +2975,6 @@ dependencies = [ "libc", ] -[[package]] -name = "o3launcher" -version = "0.1.0" -dependencies = [ - "cfg-if", - "chrono", - "const_format", - "futures", - "indexmap", - "lazy_static", - "log", - "lzma-rs", - "multimap", - "oauth2", - "regex", - "reqwest", - "serde", - "serde_json", - "sha1_smol", - "simple_logger", - "sysinfo", - "tokio", - "tokio-stream", - "uuid", - "walkdir", - "zip", -] - [[package]] name = "oauth2" version = "5.0.0" @@ -3400,17 +3372,49 @@ dependencies = [ "ttf-parser 0.25.1", ] +[[package]] +name = "ozone" +version = "0.1.0" +dependencies = [ + "cfg-if", + "chrono", + "const_format", + "futures", + "indexmap", + "lazy_static", + "log", + "lzma-rs", + "multimap", + "oauth2", + "regex", + "reqwest", + "serde", + "serde_json", + "sha1_smol", + "simple_logger", + "sysinfo", + "tokio", + "tokio-stream", + "uuid", + "walkdir", + "zip", +] + [[package]] name = "ozone-cli" version = "0.1.0" dependencies = [ "log", - "o3launcher", + "ozone", "simple_logger", "sysinfo", "tokio", ] +[[package]] +name = "ozone-helpers" +version = "0.1.0" + [[package]] name = "ozone-ui" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 6b34197..5360866 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,33 +1,2 @@ -[package] -name = "o3launcher" -version = "0.1.0" -edition = "2021" - -[dependencies] -cfg-if = "1.0.0" -chrono = { version = "0.4.39", default-features = false, features = ["std", "alloc", "clock", "now", "serde"] } -const_format = "0.2.34" -futures = "0.3.31" -indexmap = { version = "2.7.1", features = ["serde"] } -lazy_static = "1.5.0" -log = "0.4.22" -lzma-rs = { version = "0.3.0", features = ["stream"] } -multimap = { version = "0.10.0", features = ["serde"] } -oauth2 = "5.0.0" -regex = "1.11.1" -reqwest = { version = "0.12.12", features = ["json", "stream"] } -serde = { version = "1.0.216", features = ["derive"] } -serde_json = "1.0.133" -sha1_smol = { version = "1.0.1", features = ["alloc", "std", "serde"] } -sysinfo = { version = "0.33.1", features = ["system", "multithread"] } -tokio = { version = "1.42.0", features = ["fs", "io-util", "sync", "rt", "macros"] } -tokio-stream = { version = "0.1.17", features = ["fs"] } -uuid = { version = "1.12.1", features = ["v4", "serde"] } -walkdir = "2.5.0" -zip = { version = "2.2.2", default-features = false, features = ["bzip2", "deflate", "deflate64", "lzma", "xz"] } - -[dev-dependencies] -simple_logger = "5.0.0" - [workspace] -members = [ "ozone-cli", "ozone-ui" ] +members = [ "ozone", "ozone-cli", "ozone-helpers", "ozone-ui" ] diff --git a/ozone-cli/Cargo.toml b/ozone-cli/Cargo.toml index bdcec85..61b7808 100644 --- a/ozone-cli/Cargo.toml +++ b/ozone-cli/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] sysinfo = { version = "0.33.1", features = ["system", "multithread"] } -o3launcher = { path = ".." } +ozone = { path = "../ozone" } tokio = { version = "1.43.0", features = ["rt", "rt-multi-thread", "macros"] } simple_logger = { version = "5.0.0", features = ["colors"] } log = "0.4.25" diff --git a/ozone-cli/src/main.rs b/ozone-cli/src/main.rs index e36867a..a27cc98 100644 --- a/ozone-cli/src/main.rs +++ b/ozone-cli/src/main.rs @@ -2,7 +2,7 @@ use std::env::consts::{ARCH, OS}; use std::error::Error; use log::{error, info}; use sysinfo::System; -use o3launcher::launcher::{Launcher, Settings}; +use ozone::launcher::{Launcher, Settings}; #[tokio::main] async fn main() -> Result<(), Box> { @@ -26,8 +26,8 @@ async fn main() -> Result<(), Box> { dbg!(&launch); info!("ok"); - - o3launcher::launcher::run_the_game(&launch)?; + + ozone::launcher::run_the_game(&launch)?; Ok(()) } diff --git a/ozone-helpers/Cargo.toml b/ozone-helpers/Cargo.toml new file mode 100644 index 0000000..d1d4c90 --- /dev/null +++ b/ozone-helpers/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "ozone-helpers" +version = "0.1.0" +edition = "2021" + +[lib] +proc-macro = true + +[dependencies] diff --git a/ozone-helpers/src/lib.rs b/ozone-helpers/src/lib.rs new file mode 100644 index 0000000..4412282 --- /dev/null +++ b/ozone-helpers/src/lib.rs @@ -0,0 +1,8 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro_attribute] +pub fn ozone_special(attr: TokenStream, item: TokenStream) -> TokenStream { + todo!() +} \ No newline at end of file diff --git a/ozone/Cargo.toml b/ozone/Cargo.toml new file mode 100644 index 0000000..cd2f852 --- /dev/null +++ b/ozone/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "ozone" +version = "0.1.0" +edition = "2021" + +[dependencies] +cfg-if = "1.0.0" +chrono = { version = "0.4.39", default-features = false, features = ["std", "alloc", "clock", "now", "serde"] } +const_format = "0.2.34" +futures = "0.3.31" +indexmap = { version = "2.7.1", features = ["serde"] } +lazy_static = "1.5.0" +log = "0.4.22" +lzma-rs = { version = "0.3.0", features = ["stream"] } +multimap = { version = "0.10.0", features = ["serde"] } +oauth2 = "5.0.0" +regex = "1.11.1" +reqwest = { version = "0.12.12", features = ["json", "stream"] } +serde = { version = "1.0.216", features = ["derive"] } +serde_json = "1.0.133" +sha1_smol = { version = "1.0.1", features = ["alloc", "std", "serde"] } +sysinfo = { version = "0.33.1", features = ["system", "multithread"] } +tokio = { version = "1.42.0", features = ["fs", "io-util", "sync", "rt", "macros"] } +tokio-stream = { version = "0.1.17", features = ["fs"] } +uuid = { version = "1.12.1", features = ["v4", "serde"] } +walkdir = "2.5.0" +zip = { version = "2.2.2", default-features = false, features = ["bzip2", "deflate", "deflate64", "lzma", "xz"] } + +[dev-dependencies] +simple_logger = "5.0.0" diff --git a/ozone/src/assets.rs b/ozone/src/assets.rs new file mode 100644 index 0000000..15087c9 --- /dev/null +++ b/ozone/src/assets.rs @@ -0,0 +1,86 @@ +use std::collections::HashMap; +use std::fmt::Formatter; +use std::marker::PhantomData; +use serde::{Deserialize, Deserializer}; +use serde::de::{MapAccess, Visitor}; +use sha1_smol::Digest; + +#[derive(Debug, Deserialize)] +pub struct Asset { + #[serde(skip)] + pub name: String, + pub hash: Digest, + pub size: usize +} + +#[derive(Debug, Deserialize)] +pub struct AssetIndex { + #[serde(rename = "virtual", default)] + pub virtual_assets: bool, + #[serde(default)] + pub map_to_resources: bool, + + #[serde(deserialize_with = "deserialize_assets")] + pub objects: HashMap +} + +trait SetName { + fn set_name(&mut self, name: String); +} + +impl SetName for Asset { + fn set_name(&mut self, name: String) { + self.name = name; + } +} + +fn deserialize_assets<'de, D, T>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: SetName + Deserialize<'de> +{ + struct AssetVisitor(PhantomData); + + impl<'de, T> Visitor<'de> for AssetVisitor + where + T: SetName + Deserialize<'de> + { + type Value = HashMap; + + fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + formatter.write_str("asset objects map") + } + + fn visit_map(self, mut map: A) -> Result + where + A: MapAccess<'de>, + { + let mut out = HashMap::new(); + + while let Some((key, mut asset)) = map.next_entry::()? { + asset.set_name(key.clone()); + out.insert(key, asset); + } + + Ok(out) + } + } + + deserializer.deserialize_any(AssetVisitor(PhantomData)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_it() { + dbg!(serde_json::from_str::(r#"{ + "virtual": true, + "objects": { + "object1": { "hash": "0d000710b71ca9aafabd8f587768431d0b560b32", "size": 100 }, + "object2/abc": { "hash": "0e000710b71ca9aafabd8f587768431d0b560b32", "size": 10000 } + } + }"#).unwrap()); + } +} diff --git a/ozone/src/auth.rs b/ozone/src/auth.rs new file mode 100644 index 0000000..057cceb --- /dev/null +++ b/ozone/src/auth.rs @@ -0,0 +1,336 @@ +mod types; +mod msa; +mod mcservices; + +use std::error::Error; +use std::fmt::{Display, Formatter}; +use std::future::Future; +use std::time::{Duration, SystemTime}; +use chrono::{DateTime, TimeDelta, Utc}; +use log::debug; +use oauth2::{AccessToken, DeviceAuthorizationUrl, DeviceCodeErrorResponse, EndpointNotSet, EndpointSet, HttpClientError, RequestTokenError, Scope, StandardDeviceAuthorizationResponse, StandardRevocableToken, TokenResponse, TokenUrl}; +use oauth2::basic::{BasicErrorResponse, BasicErrorResponseType, BasicRevocationErrorResponse, BasicTokenIntrospectionResponse, BasicTokenResponse}; +use reqwest::{IntoUrl, Method, RequestBuilder}; +pub use types::*; +use crate::auth::msa::{XSTS_RP_MINECRAFT_SERVICES, XSTS_RP_XBOX_LIVE}; +use crate::util::USER_AGENT; + +#[derive(Debug)] +pub enum AuthError { + // An unexpected error happened while performing a request + Request { what: &'static str, error: reqwest::Error }, + OAuthRequestToken { what: &'static str, error: RequestTokenError, BasicErrorResponse> }, + OAuthRequestDeviceCode { what: &'static str, error: RequestTokenError, DeviceCodeErrorResponse> }, + + // Some internal auth error (unrecoverable) + Internal(String), + + // Device code auth was cancelled + Cancel(Option>), + + // Device code auth timed out + Timeout, + + // Requires interactive authentication + RequireInteractive(&'static str), + + // XSTS error + AuthXError { what: &'static str, x_error: u64, message: Option }, + + // You don't own the game! + EntitlementError +} + +impl Display for AuthError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + AuthError::Request { what, error } => write!(f, "auth request error ({}): {}", what, error), + AuthError::OAuthRequestToken { what, error } => write!(f, "oauth error requesting token ({what}): {error}"), + AuthError::OAuthRequestDeviceCode { what, error } => write!(f, "oauth error with device code ({what}): {error}"), + AuthError::Internal(msg) => write!(f, "internal auth error: {}", msg), + AuthError::Cancel(Some(error)) => write!(f, "operation cancelled: {error}"), + AuthError::Cancel(None) => f.write_str("operation cancelled"), + AuthError::Timeout => f.write_str("interactive authentication timed out"), + AuthError::RequireInteractive(why) => write!(f, "user must log in interactively: {why}"), + AuthError::AuthXError { what, x_error, message } => write!(f, "XSTS error: {what} ({x_error} -> {})", message.as_ref().map_or("", |s| s.as_str())), + AuthError::EntitlementError => f.write_str("no minecraft entitlement (do you own the game?)") + } + } +} + +impl Error for AuthError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + AuthError::Request { error, .. } => Some(error), + AuthError::OAuthRequestToken { error, .. } => Some(error), + AuthError::OAuthRequestDeviceCode { error, .. } => Some(error), + AuthError::Cancel(Some(error)) => Some(error.as_ref()), + _ => None + } + } +} + +impl Token { + fn is_expired(&self, now: DateTime) -> bool { + self.expire.is_some_and(|exp| now >= exp) + } +} + +macro_rules! create_oauth_client { + ($is_azure_client_id:expr, $client_id:expr) => { + oauth2::Client::new($client_id) + .set_token_uri(TokenUrl::new(if $is_azure_client_id { AZURE_TOKEN_URL.into() } else { NON_AZURE_TOKEN_URL.into() }).expect("hardcoded url")) + .set_device_authorization_url(DeviceAuthorizationUrl::new(if $is_azure_client_id { AZURE_DEVICE_CODE_URL.into() } else { NON_AZURE_DEVICE_CODE_URL.into() }).expect("hardcoded url")) + as oauth2::Client + } +} + +const AZURE_TOKEN_URL: &str = "https://login.microsoftonline.com/consumers/oauth2/v2.0/token"; +const AZURE_DEVICE_CODE_URL: &str = "https://login.microsoftonline.com/consumers/oauth2/v2.0/devicecode"; +const NON_AZURE_TOKEN_URL: &str = "https://login.live.com/oauth20_token.srf"; +const NON_AZURE_DEVICE_CODE_URL: &str = "https://login.live.com/oauth20_connect.srf"; + +const AZURE_LOGIN_SCOPES: &[&str] = ["XboxLive.signin", "offline_access"].as_slice(); +const NON_AZURE_LOGIN_SCOPES: &[&str] = ["service::user.auth.xboxlive.com::MBI_SSL"].as_slice(); + +fn build_json_request(client: &reqwest::Client, url: impl IntoUrl, method: Method) -> RequestBuilder { + client.request(method, url) + .header(reqwest::header::USER_AGENT, USER_AGENT) + .header(reqwest::header::ACCEPT, "application/json") +} + +impl MsaUser { + pub fn create_client() -> reqwest::Client { + reqwest::ClientBuilder::new() + .redirect(reqwest::redirect::Policy::none()) + .build().expect("building client should succeed") + } + + fn scopes_iter(&self) -> impl Iterator { + let to_scope = |f: &&str| Scope::new(String::from(*f)); + + if self.is_azure_client_id { + AZURE_LOGIN_SCOPES.iter().map(to_scope) + } else { + NON_AZURE_LOGIN_SCOPES.iter().map(to_scope) + } + } + + // uses an access token from, for example, a device code grant logs into xbox live + async fn xbl_login(&mut self, client: &reqwest::Client, token: &AccessToken) -> Result<(), AuthError> { + debug!("Logging into xbox live using access token"); + self.xbl_token = Some(msa::xbox_live_login(client, token, self.is_azure_client_id).await?); + + Ok(()) + } + + // logs into xbox live using a refresh token + // (panics if no refresh token present) + async fn xbl_login_refresh(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { + debug!("Using refresh token for XBL login"); + let oauth_client = create_oauth_client!(self.is_azure_client_id, self.client_id.clone()); + let refresh_token = self.refresh_token.as_ref().expect("refresh_access_token called with no refresh token"); + + let tokenres: BasicTokenResponse = oauth_client + .exchange_refresh_token(refresh_token) + .add_scopes(self.scopes_iter()) + .add_extra_param("response_type", "device_code") + .request_async(client) + .await.map_err(|e| AuthError::OAuthRequestToken { what: "refresh", error: e })?; + + self.refresh_token = tokenres.refresh_token().cloned(); + + self.xbl_login(client, tokenres.access_token()).await + } + + pub async fn xbl_login_device(&mut self, client: &reqwest::Client, handle_device: D) -> Result<(), AuthError> + where + D: FnOnce(StandardDeviceAuthorizationResponse) -> DF, + DF: Future + { + debug!("Using device authorization for XBL login"); + let oauth_client = create_oauth_client!(self.is_azure_client_id, self.client_id.clone()); + let device_auth: StandardDeviceAuthorizationResponse = oauth_client.exchange_device_code() + .add_scopes(self.scopes_iter()) + .add_extra_param("response_type", "device_code") + .request_async(client) + .await.map_err(|e| AuthError::OAuthRequestToken { what: "device code", error: e })?; + + handle_device(device_auth.clone()).await; + + let tokenres = oauth_client.exchange_device_access_token(&device_auth) + .set_max_backoff_interval(Duration::from_secs(20u64)) + .request_async(client, tokio::time::sleep, None) + .await.map_err(|e| AuthError::OAuthRequestDeviceCode { what: "device access code", error: e })?; + + self.refresh_token = tokenres.refresh_token().cloned(); + + self.xbl_login(client, tokenres.access_token()).await + } + + // ensure we have an xbox live token for this person + // tasks for this function: + // - check if the XBL token is valid/not expired + // - if it is expired, try to use refresh token to get a new one + // - get rid of auth token if yeah + async fn ensure_xbl(&mut self, client: &reqwest::Client, now: DateTime) -> Result<(), AuthError> { + if self.xbl_token.as_ref().is_some_and(|tok| !tok.is_expired(now)) { + debug!("XBL token valid. Using it."); + return Ok(()) + } + + if self.refresh_token.is_none() { + return Err(AuthError::RequireInteractive("no refresh token")); + } + + debug!("XBL token expired. Trying to refresh it."); + self.xbl_login_refresh(client).await + .map_err(|e| match &e { + AuthError::OAuthRequestToken { error: RequestTokenError::ServerResponse(res), .. } => match res.error() { + BasicErrorResponseType::Extension(s) if s == "interaction_required" || s == "consent_required" => { + AuthError::RequireInteractive("msa requested interactive logon") + }, + _ => e + }, + _ => e + })?; + + self.mc_token = None; + + Ok(()) + } + + // function's tasks: + // - if the minecraft services token invalid/expired/missing, do the following + // - get minecraftservices xsts token + // - use minecraftservices to get mojang token with that xsts token + async fn ensure_mc_token(&mut self, client: &reqwest::Client, now: DateTime) -> Result<(), AuthError> { + if self.mc_token.as_ref().is_some_and(|tok| !tok.is_expired(now)) { + debug!("Mojang token valid. Using it."); + return Ok(()) + } + + debug!("Mojang token has expired. Must log in again."); + let xbl_token = self.xbl_token.as_ref().expect("ensure_mc_token requires xbl token").value.as_str(); + let (user_hash, mc_xsts_tok) = match msa::xsts_request(client, xbl_token, XSTS_RP_MINECRAFT_SERVICES).await? { + msa::XSTSAuthResponse::Success(res) => { + let user_hash = res.get_user_hash() + .map_or(Err(AuthError::Internal("malformed response: no user hash".into())), |h| Ok(h.to_owned()))?; + (user_hash, res.into_token()) + }, + msa::XSTSAuthResponse::Error(e) => return Err(e.into()) + }; + + debug!("Got MinecraftServices XSTS, logging in."); + self.mc_token = Some(mcservices::login_with_xbox(client, mc_xsts_tok.as_str(), user_hash.as_str()).await?); + + Ok(()) + } + + async fn load_xbox_info(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { + debug!("Loading Xbox info..."); + let xbl_token = self.xbl_token.as_ref().expect("xbl token missing").value.as_str(); + + let res = match msa::xsts_request(client, xbl_token, XSTS_RP_XBOX_LIVE).await? { + msa::XSTSAuthResponse::Success(res) => res, + msa::XSTSAuthResponse::Error(e) => return Err(e.into()) + }; + + let Some(xuid) = res.get_xuid() else { + return Err(AuthError::Internal("missing xuid for user".into())); + }; + + self.xuid = Some(xuid.to_owned()); + self.gamertag = res.get_gamertag().map(|s| s.to_owned()); + + debug!("Xbox info loaded: (xuid {xuid}, gamertag {})", res.get_gamertag().unwrap_or("")); + + Ok(()) + } + + async fn load_profile(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { + self.load_xbox_info(client).await?; + + let mc_token = self.mc_token.as_ref().expect("minecraft token missing").value.as_str(); + + debug!("Checking if you own the game..."); + if !mcservices::owns_the_game(client, mc_token).await? { + return Err(AuthError::EntitlementError); + } + + debug!("Getting your profile info..."); + let player_info = mcservices::get_player_info(client, mc_token).await?; + let player_profile = mcservices::get_player_profile(client, player_info.id).await + .map_err(|e| AuthError::Request { what: "looking up profile", error: e })?; + + self.player_info = Some(player_info); + self.player_profile = Some(player_profile); + + Ok(()) + } + + pub async fn log_in_silent(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { + let now: DateTime = DateTime::from(SystemTime::now()) + TimeDelta::hours(12); + + self.ensure_xbl(client, now).await?; + self.ensure_mc_token(client, now).await?; + self.load_profile(client).await?; + + Ok(()) + } +} + +#[cfg(test)] +mod test { + use oauth2::ClientId; + use super::*; + + #[tokio::test] + async fn abc() { + simple_logger::SimpleLogger::new().with_colors(true).with_level(log::LevelFilter::Trace).init().unwrap(); + + let mut user = match tokio::fs::read_to_string("test_stuff/test.json").await { + Ok(s) => serde_json::from_str::(&s).unwrap(), + Err(e) if e.kind() == tokio::io::ErrorKind::NotFound => { + MsaUser { + player_profile: None, + xuid: None, + gamertag: None, + player_info: None, + //client_id: ClientId::new("00000000402b5328".into()), + client_id: ClientId::new("60b6cc54-fc07-4bab-bca9-cbe9aa713c80".into()), + is_azure_client_id: true, + mc_token: None, + xbl_token: None, + refresh_token: None + } + }, + Err(e) => panic!("i/o error: {}", e) + }; + + let client = MsaUser::create_client(); + + loop { + match user.log_in_silent(&client).await { + Ok(_) => break, + Err(AuthError::RequireInteractive(s)) => { + debug!("Requires interactive auth: {s}") + }, + Err(e) => { + panic!("{}", e); + } + } + + user.xbl_login_device(&client, |d| async move { + let d = dbg!(d); + debug!("User code: {}", d.user_code().secret()); + }).await.unwrap() + } + + debug!("User: {user:?}"); + + let user_str = serde_json::to_string_pretty(&user).unwrap(); + tokio::fs::write("test_stuff/test.json", user_str.as_str()).await.unwrap(); + } +} diff --git a/ozone/src/auth/mcservices.rs b/ozone/src/auth/mcservices.rs new file mode 100644 index 0000000..45ef795 --- /dev/null +++ b/ozone/src/auth/mcservices.rs @@ -0,0 +1,92 @@ +use std::time::{Duration, SystemTime}; +use chrono::{DateTime, Utc}; +use reqwest::{IntoUrl, Method, RequestBuilder}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; +use super::{AuthError, MinecraftPlayerInfo, PlayerProfile}; +use super::types::Token; + +const MINECRAFT_LOGIN: &str = "https://api.minecraftservices.com/authentication/login_with_xbox"; +const MINECRAFT_ENTITLEMENTS: &str = "https://api.minecraftservices.com/entitlements"; +const MINECRAFT_PROFILE: &str = "https://api.minecraftservices.com/minecraft/profile"; + +const MINECRAFT_SESSION_PROFILE: &str = "https://sessionserver.mojang.com/session/minecraft/profile/"; + +fn build_authenticated(client: &reqwest::Client, url: impl IntoUrl, method: Method, mc_token: &str) -> RequestBuilder { + super::build_json_request(client, url, method) + .bearer_auth(mc_token) +} + +#[derive(Serialize, Debug)] +#[serde(rename_all = "camelCase")] +struct MinecraftXboxLoginRequest<'a> { + identity_token: &'a str, + ensure_legacy_enabled: bool +} + +#[derive(Deserialize, Debug)] +struct MinecraftXboxLoginResponse { + access_token: String, + expires_in: u64 +} + +pub async fn login_with_xbox(client: &reqwest::Client, xsts_token: &str, user_hash: &str) -> Result { + let tok = format!("XBL3.0 x={user_hash};{xsts_token}"); + let req = MinecraftXboxLoginRequest { + identity_token: tok.as_str(), + ensure_legacy_enabled: true + }; + + let res: MinecraftXboxLoginResponse = super::build_json_request(client, MINECRAFT_LOGIN, Method::POST) + .json(&req).send().await + .and_then(|r| r.error_for_status()) + .map_err(|e| AuthError::Request { what: "minecraft xbox login", error: e })? + .json().await + .map_err(|e| AuthError::Request { what: "minecraft xbox login (decode)", error: e })?; + + let now: DateTime = SystemTime::now().into(); + + Ok(Token { + value: res.access_token, + expire: Some(now + Duration::from_secs(res.expires_in)) + }) +} + +#[derive(Deserialize, Debug)] +struct EntitlementItem { + name: String + // we don't care about the signature +} + +#[derive(Deserialize, Debug)] +struct EntitlementResponse { + #[serde(default)] + items: Vec +} + +pub async fn owns_the_game(client: &reqwest::Client, token: &str) -> Result { + let res: EntitlementResponse = build_authenticated(client, MINECRAFT_ENTITLEMENTS, Method::GET, token) + .send().await + .and_then(|r| r.error_for_status()) + .map_err(|e| AuthError::Request { what: "entitlements", error: e })? + .json().await + .map_err(|e| AuthError::Request { what: "entitlements (receive)", error: e})?; + + Ok(res.items.iter().any(|i| i.name == "game_minecraft" || i.name == "product_minecraft")) +} + +pub async fn get_player_info(client: &reqwest::Client, token: &str) -> Result { + build_authenticated(client, MINECRAFT_PROFILE, Method::GET, token) + .send().await + .and_then(|r| r.error_for_status()) + .map_err(|e| AuthError::Request { what: "player info", error: e })? + .json().await + .map_err(|e| AuthError::Request { what: "player info (receive)", error: e }) +} + +pub async fn get_player_profile(client: &reqwest::Client, uuid: Uuid) -> Result { + super::build_json_request(client, format!("{}{}?unsigned=false", MINECRAFT_SESSION_PROFILE, uuid.as_simple()), Method::GET) + .send().await + .and_then(|r| r.error_for_status())? + .json().await +} diff --git a/ozone/src/auth/msa.rs b/ozone/src/auth/msa.rs new file mode 100644 index 0000000..add345c --- /dev/null +++ b/ozone/src/auth/msa.rs @@ -0,0 +1,170 @@ +use std::borrow::Cow; +use std::collections::HashMap; +use chrono::{DateTime, Utc}; +use log::debug; +use oauth2::AccessToken; +use reqwest::{Method}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; +use crate::auth::AuthError; +use crate::auth::types::Token; + +const XBOX_LIVE_AUTH: &str = "https://user.auth.xboxlive.com/user/authenticate"; +const XBOX_LIVE_XSTS: &str = "https://xsts.auth.xboxlive.com/xsts/authorize"; + +#[derive(Debug, Serialize)] +#[serde(rename_all = "PascalCase")] +struct XboxLiveAuthRequestProperties<'a> { + auth_method: &'a str, + site_name: &'a str, + rps_ticket: &'a str +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "PascalCase")] +struct XboxLiveAuthRequest<'a> { + properties: XboxLiveAuthRequestProperties<'a>, + relying_party: &'a str, + token_type: &'a str +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "PascalCase")] +struct XboxLiveAuthResponse { + token: String, + not_after: DateTime +} + +pub async fn xbox_live_login(client: &reqwest::Client, access_token: &AccessToken, azure: bool) -> Result { + debug!("MSA performing xbox live login ({azure})"); + + let ticket = match azure { + true => Cow::Owned(format!("d={}", access_token.secret())), + _ => Cow::Borrowed(access_token.secret().as_str()) + }; + + let request = XboxLiveAuthRequest { + properties: XboxLiveAuthRequestProperties { + auth_method: "RPS", + site_name: "user.auth.xboxlive.com", + rps_ticket: ticket.as_ref() + }, + relying_party: "http://auth.xboxlive.com", + token_type: "JWT" + }; + + let res: XboxLiveAuthResponse = super::build_json_request(client, XBOX_LIVE_AUTH, Method::POST).json(&request).send().await + .and_then(|r| r.error_for_status()) + .map_err(|e| AuthError::Request { what: "xbox live auth", error: e })? + .json().await.map_err(|e| AuthError::Request { what: "xbox live auth (decode)", error: e })?; + + Ok(Token { + value: res.token, + expire: Some(res.not_after) + }) +} + +#[derive(Serialize, Debug)] +#[serde(rename_all = "PascalCase")] +struct XSTSAuthRequest<'a> { + properties: XSTSAuthRequestProperties<'a>, + relying_party: &'a str, + token_type: &'a str +} + +#[derive(Serialize, Debug)] +#[serde(rename_all = "PascalCase")] +struct XSTSAuthRequestProperties<'a> { + sandbox_id: &'a str, + user_tokens: &'a[&'a str] +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "PascalCase")] +pub(super) struct XSTSAuthSuccessResponse { + token: String, + #[serde(default)] + display_claims: XSTSAuthResponseDisplayClaims +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "PascalCase")] +pub(super) struct XSTSAuthErrorResponse { + x_err: u64, + message: Option +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "PascalCase", untagged)] +pub(super) enum XSTSAuthResponse { + Success(XSTSAuthSuccessResponse), + Error(XSTSAuthErrorResponse) +} + +#[derive(Deserialize, Debug, Default)] +pub(super) struct XSTSAuthResponseDisplayClaims { + xui: Vec> +} + +impl XSTSAuthSuccessResponse { + pub(super) fn into_token(self) -> String { + self.token + } + + fn get_display_claim(&self, name: &str) -> Option<&str> { + self.display_claims.xui.iter().find(|m| m.contains_key(name)).and_then(|f| f.get(name).map(|s| s.as_str())) + } + + pub(super) fn get_user_hash(&self) -> Option<&str> { + self.get_display_claim("uhs") + } + + pub(super) fn get_xuid(&self) -> Option<&str> { + self.get_display_claim("xid") + } + + pub(super) fn get_gamertag(&self) -> Option<&str> { + self.get_display_claim("gtg") + } +} + +#[allow(clippy::from_over_into)] +impl Into for XSTSAuthErrorResponse { + fn into(self) -> AuthError { + AuthError::AuthXError { + // some well-known error values + what: match self.x_err { + 2148916238u64 => "Microsoft account held by a minor outside of a family.", + 2148916233u64 => "Account is not on Xbox.", + _ => "Unknown error." + }, + x_error: self.x_err, + message: self.message + } + } +} + +pub(super) const XSTS_RP_MINECRAFT_SERVICES: &str = "rp://api.minecraftservices.com/"; +pub(super) const XSTS_RP_XBOX_LIVE: &str = "http://xboxlive.com"; + +pub async fn xsts_request(client: &reqwest::Client, xbl_token: &str, relying_party: &str) -> Result { + debug!("Performing XSTS auth {relying_party}"); + + let token_array = [xbl_token]; + let req = XSTSAuthRequest { + properties: XSTSAuthRequestProperties { + sandbox_id: "RETAIL", + user_tokens: token_array.as_slice() + }, + relying_party, + token_type: "JWT" + }; + + let res: XSTSAuthResponse = super::build_json_request(client, XBOX_LIVE_XSTS, Method::POST).json(&req).send().await + .and_then(|r| r.error_for_status()) + .map_err(|e| AuthError::Request { what: "xsts", error: e })? + .json().await + .map_err(|e| AuthError::Request { what: "xsts (decode)", error: e })?; + + Ok(res) +} diff --git a/ozone/src/auth/types.rs b/ozone/src/auth/types.rs new file mode 100644 index 0000000..b9cdaad --- /dev/null +++ b/ozone/src/auth/types.rs @@ -0,0 +1,130 @@ +pub mod property_map; +pub use property_map::PropertyMap; + +use std::fmt::{Debug, Formatter}; +use chrono::{DateTime, Utc}; +use multimap::MultiMap; +use oauth2::RefreshToken; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Property { + pub name: String, + pub value: String, + + #[serde(skip_serializing_if = "Option::is_none")] + pub signature: Option +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct PlayerProfile { + #[serde(with = "uuid::serde::simple")] + pub id: Uuid, + pub name: String, + + #[serde(default, skip_serializing_if = "MultiMap::is_empty", with = "property_map")] + pub properties: PropertyMap +} + +#[derive(Serialize, Deserialize)] +pub(super) struct Token { + pub value: String, + + #[serde(skip_serializing_if = "Option::is_none")] + pub expire: Option> +} + +struct RedactedValue; +impl Debug for RedactedValue { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str("[redacted]") + } +} + +impl Debug for Token { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Token") + .field("value", &RedactedValue) + .field("expire", &self.expire) + .finish() + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "UPPERCASE")] +pub enum SkinState { + Active, + Inactive +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "UPPERCASE")] +pub enum SkinVariant { + Classic, + Slim +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkinInfo { + pub id: Uuid, + pub state: SkinState, + pub url: String, + pub texture_key: Option, + pub variant: Option, + pub alias: Option +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct MinecraftPlayerInfo { + #[serde(with = "uuid::serde::simple")] + pub id: Uuid, + pub name: String, + + #[serde(default)] + pub skins: Vec, + #[serde(default)] + pub capes: Vec, + + #[serde(default)] + pub demo: bool, + + #[serde(default)] + pub legacy: bool, + + // todo: profile actions (idk the format) +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct MsaUser { + #[serde(skip_serializing_if = "Option::is_none")] + pub player_profile: Option, + pub xuid: Option, + pub gamertag: Option, + + #[serde(skip)] // this information is transient + pub player_info: Option, + + pub(super) client_id: oauth2::ClientId, + + #[serde(default, skip_serializing_if = "std::ops::Not::not")] + pub(super) is_azure_client_id: bool, + + pub(super) mc_token: Option, + pub(super) xbl_token: Option, + pub(super) refresh_token: Option +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum User { + Dummy(PlayerProfile), + MSA(Box) +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct AuthenticationDatabase { + pub users: Vec +} diff --git a/ozone/src/auth/types/property_map.rs b/ozone/src/auth/types/property_map.rs new file mode 100644 index 0000000..ddfc9ce --- /dev/null +++ b/ozone/src/auth/types/property_map.rs @@ -0,0 +1,61 @@ +use std::fmt::Formatter; +use multimap::MultiMap; +use serde::de::{SeqAccess, Visitor}; +use serde::{Deserializer, Serializer}; +use crate::auth::Property; + +pub type PropertyMap = MultiMap; + +pub mod legacy { + use serde::Serializer; + use super::PropertyMap; + + pub fn serialize(value: &PropertyMap, serializer: S) -> Result + where S: Serializer + { + serializer.collect_map(value.iter_all() + .filter_map(|(k, v)| { + if v.is_empty() { + None + } else { + Some((k, v.iter().map(|p| &p.value).collect::>())) + } + })) + } +} + +pub fn serialize(value: &PropertyMap, serializer: S) -> Result +where + S: Serializer +{ + serializer.collect_seq(value.flat_iter().map(|(_, v)| v)) +} + +pub fn deserialize<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de> +{ + struct PropertyMapVisitor; + + impl<'de> Visitor<'de> for PropertyMapVisitor { + type Value = PropertyMap; + + fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + formatter.write_str("a property map") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let mut map = MultiMap::new() as PropertyMap; + while let Some(prop) = seq.next_element::()? { + map.insert(prop.name.clone(), prop); + } + + Ok(map) + } + } + + deserializer.deserialize_seq(PropertyMapVisitor) +} diff --git a/ozone/src/launcher.rs b/ozone/src/launcher.rs new file mode 100644 index 0000000..2836db5 --- /dev/null +++ b/ozone/src/launcher.rs @@ -0,0 +1,765 @@ +mod constants; +mod version; +mod strsub; +mod download; +mod rules; +mod assets; +mod extract; +mod settings; +mod runner; +mod jre; + +use std::borrow::Cow; +use std::cmp::min; +use std::env::consts::{ARCH, OS}; +use std::error::Error; +use std::ffi::{OsStr, OsString}; +use std::fmt::{Display, Formatter}; +use std::io::ErrorKind; +use std::io::ErrorKind::AlreadyExists; +use std::path::{Component, Path, PathBuf}; +use std::{env, process}; +use std::env::JoinPathsError; +use std::time::{Instant, SystemTime, UNIX_EPOCH}; +use const_format::formatcp; +use futures::{StreamExt, TryStreamExt}; +use indexmap::IndexMap; +use log::{debug, info, trace, warn}; +use reqwest::Client; +use sysinfo::System; +use tokio::{fs, io}; +use tokio_stream::wrappers::ReadDirStream; +use download::{MultiDownloader, VerifiedDownload}; +use rules::{CompatCheck, IncompatibleError}; +use version::{VersionList, VersionResolveError, VersionResult}; +use crate::version::{Library, OSRestriction, OperatingSystem, DownloadType, LibraryExtractRule, FeatureMatcher, ClientLogging}; + +use assets::{AssetError, AssetRepository}; +use crate::util::{self, AsJavaPath}; + +pub use settings::*; +pub use runner::run_the_game; +pub use crate::util::{EnsureFileError, FileVerifyError, IntegrityError}; +use crate::assets::AssetIndex; +use runner::ArgumentType; +use strsub::SubFunc; +use crate::launcher::download::FileDownload; +use crate::launcher::jre::{JavaRuntimeError, JavaRuntimeRepository}; +use crate::launcher::version::VersionError; +use crate::version::manifest::VersionType; + +#[derive(Debug)] +pub enum LogConfigError { + UnknownType(String), + InvalidId(Option), + MissingURL, + IO{ what: &'static str, error: io::Error }, + Offline, + Download{ url: String, error: reqwest::Error }, + + Integrity(IntegrityError) +} + +impl Display for LogConfigError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + LogConfigError::UnknownType(log_type) => write!(f, "unknown log type {}", log_type), + LogConfigError::InvalidId(oid) => match oid { + Some(id) => write!(f, "invalid log config id: {}", id), + None => f.write_str("missing log config id") + }, + LogConfigError::MissingURL => f.write_str("missing log config download URL"), + LogConfigError::IO { what, error} => write!(f, "i/o error ({}): {}", what, error), + LogConfigError::Offline => f.write_str("launcher in offline mode"), + LogConfigError::Download { url, error } => write!(f, "failed to download log config ({}): {}", url, error), + LogConfigError::Integrity(e) => write!(f, "log config verify error: {}", e) + } + } +} + +impl Error for LogConfigError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + LogConfigError::IO { error, .. } => Some(error), + LogConfigError::Download {error, ..} => Some(error), + LogConfigError::Integrity(error) => Some(error), + _ => None + } + } +} + +struct SystemInfo { + os: OperatingSystem, + os_version: String, + arch: String +} + +struct LibraryRepository { + home: PathBuf, + natives: PathBuf +} + +pub struct Launcher { + online: bool, + home: PathBuf, + versions: VersionList, + + system_info: SystemInfo, + + libraries: LibraryRepository, + assets: AssetRepository, + java_runtimes: JavaRuntimeRepository +} + +#[derive(Debug)] +pub enum LaunchError { + UnknownInstance(String), + + // version resolution errors + VersionInit(VersionError), + UnknownVersion(String), + LoadVersion(VersionError), + ResolveVersion(VersionResolveError), + IncompatibleVersion(IncompatibleError), + MissingMainClass, + + // library errors + LibraryDirError(PathBuf, io::Error), + LibraryVerifyError(FileVerifyError), + LibraryDownloadError, + LibraryExtractError(extract::ZipExtractError), + LibraryClasspathError(JoinPathsError), + + // ensure file errors + EnsureFile(EnsureFileError), + IO { what: &'static str, error: io::Error }, + + // log errors + UnknownLogType(String), + InvalidLogId(Option), + + // asset errors + Assets(AssetError), + + // java runtime errors + ResolveJavaRuntime { what: &'static str, error: io::Error }, + MissingJavaRuntime, + JavaRuntimeRepo(JavaRuntimeError) +} + +impl Display for LaunchError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match &self { + LaunchError::UnknownInstance(inst) => write!(f, "unknown instance: {inst}"), + LaunchError::VersionInit(e) => write!(f, "initializing version: {e}"), + LaunchError::UnknownVersion(id) => write!(f, "unknown version id: {id}"), + LaunchError::LoadVersion(e) => write!(f, "error loading remote version: {e}"), + LaunchError::ResolveVersion(e) => write!(f, "error resolving remote version: {e}"), + LaunchError::IncompatibleVersion(e) => e.fmt(f), + LaunchError::MissingMainClass => f.write_str("main class not specified"), + LaunchError::LibraryDirError(path, e) => write!(f, "failed to create library directory {}: {}", path.display(), e), + LaunchError::LibraryVerifyError(e) => write!(f, "failed to verify library: {}", e), + LaunchError::LibraryDownloadError => f.write_str("library download failed (see above logs for details)"), // TODO: booo this sucks + LaunchError::LibraryExtractError(e) => write!(f, "library extract zip error: {e}"), + LaunchError::LibraryClasspathError(e) => write!(f, "error building classpath: {e}"), + LaunchError::IO { what, error } => write!(f, "i/o error ({}): {}", what, error), + LaunchError::EnsureFile(e) => e.fmt(f), + LaunchError::UnknownLogType(t) => write!(f, "unknown log type: {}", t), + LaunchError::InvalidLogId(Some(id)) => write!(f, "invalid log id: {}", id), + LaunchError::InvalidLogId(None) => write!(f, "missing log id"), + LaunchError::Assets(e) => write!(f, "failed to fetch assets: {}", e), + LaunchError::ResolveJavaRuntime { what, error } => write!(f, "failed to find java runtime ({}): {}", what, error), + LaunchError::MissingJavaRuntime => f.write_str("suitable java executable not found"), + LaunchError::JavaRuntimeRepo(e) => write!(f, "runtime repository error: {e}") + } + } +} + +impl Error for LaunchError { + fn cause(&self) -> Option<&dyn Error> { + match &self { + LaunchError::VersionInit(e) => Some(e), + LaunchError::LoadVersion(e) => Some(e), + LaunchError::ResolveVersion(e) => Some(e), + LaunchError::IncompatibleVersion(e) => Some(e), + LaunchError::LibraryDirError(_, e) => Some(e), + LaunchError::LibraryVerifyError(e) => Some(e), + LaunchError::LibraryExtractError(e) => Some(e), + LaunchError::LibraryClasspathError(e) => Some(e), + LaunchError::IO { error: e, .. } => Some(e), + LaunchError::EnsureFile(e) => Some(e), + LaunchError::Assets(e) => Some(e), + LaunchError::ResolveJavaRuntime { error: e, .. } => Some(e), + LaunchError::JavaRuntimeRepo(e) => Some(e), + _ => None + } + } +} + +struct LaunchInfo<'l, F: FeatureMatcher> { + launcher: &'l Launcher, + feature_matcher: &'l F, + + asset_index_name: Option, + classpath: String, + virtual_assets_path: Option, + instance_home: PathBuf, + natives_path: PathBuf, + client_jar: Option, + version_id: String, + version_type: Option, + asset_index: Option +} + +#[derive(Debug)] +pub struct Launch { + jvm_args: Vec, + game_args: Vec, + main_class: String, + instance_path: PathBuf, + runtime_path: PathBuf, + runtime_legacy_launch: bool +} + +struct ProfileFeatureMatcher<'prof> { + profile: &'prof Profile +} + +impl FeatureMatcher for ProfileFeatureMatcher<'_> { + fn matches(&self, feature: &str) -> bool { + match feature { + "has_custom_resolution" => self.profile.get_resolution().is_some(), + _ => false + } + } +} + +impl Launcher { + // FIXME: more descriptive error type por favor + pub async fn new(home: impl AsRef, online: bool) -> Result { + match tokio::fs::create_dir_all(home.as_ref()).await { + Err(e) if e.kind() != AlreadyExists => { + warn!("Failed to create launcher home directory: {}", e); + return Err(LaunchError::IO { what: "create launcher home", error: e }); + }, + _ => () + } + + let home = fs::canonicalize(home.as_ref()).await + .map_err(|e| LaunchError::IO { what: "resolve home path", error: e })?; + + let versions_home = home.join("versions"); + + debug!("Version list online?: {online}"); + let versions = if online { + VersionList::online(versions_home.as_ref()).await.map_err(LaunchError::VersionInit)? + } else { + VersionList::offline(versions_home.as_ref()).await.map_err(LaunchError::VersionInit)? + }; + + let assets_path = home.join("assets"); + + let java_runtimes = JavaRuntimeRepository::new(home.join("jre"), online).await.map_err(LaunchError::JavaRuntimeRepo)?; + + Ok(Launcher { + online, + versions, + system_info: SystemInfo::new(), + libraries: LibraryRepository { + home: home.join("libraries"), + natives: home.join("natives") + }, + assets: AssetRepository::new(online, &assets_path).await.map_err(|e| LaunchError::IO { what: "setting up assets", error: e })?, + java_runtimes, + home + }) + } + + fn choose_lib_classifier<'lib>(&self, lib: &'lib Library) -> Option<&'lib str> { + lib.natives.as_ref().and_then(|n| n.get(&self.system_info.os)).map(|s| s.as_str()) + } + + async fn log_config_ensure(&self, config: &ClientLogging) -> Result { + info!("Ensuring log configuration exists and is valid."); + + if config.log_type != "log4j2-xml" { + return Err(LaunchError::UnknownLogType(config.log_type.clone())); + } + + let dlinfo = &config.file; + let Some(id) = dlinfo.id.as_ref() else { + return Err(LaunchError::InvalidLogId(None)); + }; + + let mut path = self.home.join("logging"); + fs::create_dir_all(path.as_path()).await + .map_err(|e| LaunchError::IO{ what: "creating log directory", error: e })?; + + let Some(Component::Normal(filename)) = Path::new(id).components().last() else { + return Err(LaunchError::InvalidLogId(Some(id.clone()))); + }; + + path.push(filename); + + debug!("Logger config {} is at {}", id, path.display()); + + util::ensure_file(&path, dlinfo.url.as_deref(), dlinfo.size, dlinfo.sha1, self.online, false).await + .map_err(LaunchError::EnsureFile)?; + + struct PathSub<'a>(&'a Path); + impl<'a> SubFunc<'a> for PathSub<'a> { + fn substitute(&self, key: &str) -> Option> { + match key { + "path" => Some(self.0.as_java_path().to_string_lossy()), + _ => None + } + } + } + + Ok(strsub::replace_string(config.argument.as_str(), &PathSub(path.as_ref())).to_string()) + } + + /* TODO: + * - launch game using JNI + * - auth + */ + pub async fn prepare_launch(&self, profile: &Profile, instance: &Instance) -> Result { + let start = Instant::now(); + let feature_matcher = ProfileFeatureMatcher { profile }; + let version_id = profile.get_version(); + + let Some(version_id) = self.versions.get_profile_version_id(version_id) else { + // idk how common this use case actually is + warn!("Can't use latest release/snapshot profiles while offline!"); + return Err(LaunchError::UnknownVersion("".into())); + }; + + info!("Preparing launch for \"{}\"...", version_id); + + let inst_home = instance.get_path(&self.home).await.map_err(|e| LaunchError::IO { + what: "resolving instance directory", + error: e + })?; + + fs::create_dir_all(inst_home.as_path()).await.map_err(|e| LaunchError::IO { + what: "creating instance directory", + error: e + })?; + + info!("Launching the game in {}", inst_home.display()); + + let ver_res = self.versions.get_version_lazy(version_id.as_ref()); + let ver = match ver_res { + VersionResult::Remote(mv) => Cow::Owned(self.versions.load_remote_version(mv).await.map_err(LaunchError::LoadVersion)?), + VersionResult::Complete(cv) => Cow::Borrowed(cv), + VersionResult::None => { + return Err(LaunchError::UnknownVersion(version_id.into_owned())) + } + }; + + let ver = self.versions.resolve_version(ver.as_ref()).await.map_err(LaunchError::ResolveVersion)?; + ver.rules_apply(&self.system_info, &feature_matcher).map_err(LaunchError::IncompatibleVersion)?; + + info!("Resolved launch version {}!", ver.id); + + let mut extract_jobs = Vec::new(); + let mut downloads = IndexMap::new(); + + for lib in ver.libraries.iter() { + if lib.rules_apply(&self.system_info, &feature_matcher).is_err() { + trace!("Skipping library {}, compatibility rules failed", lib.name); + continue; + } + + let classifier = self.choose_lib_classifier(lib); + + if let Some(dl) = self.libraries.create_download(lib, classifier) { + let canon_name = lib.get_canonical_name(); + if downloads.contains_key(&canon_name) { + debug!("Skipping library {}, we already have another version of that library.", lib.name); + continue; + } + + trace!("Using library {} ({})", lib.name, classifier.unwrap_or("None")); + dl.make_dirs().await.map_err(|e| LaunchError::LibraryDirError(dl.get_path().to_path_buf(), e))?; + + if lib.natives.is_some() { + extract_jobs.push(LibraryExtractJob { + source: dl.get_path().to_owned(), + rule: lib.extract.clone() + }); + } + + downloads.insert(canon_name, dl); + } else { + trace!("Skipping library {} ({}), no download", lib.name, classifier.unwrap_or("None")); + } + } + + if self.online { + info!("Downloading {} libraries...", downloads.len()); + let client = Client::new(); + MultiDownloader::new(downloads.values_mut()).perform(&client).await + .inspect_err(|e| warn!("library download failed: {e}")) + .try_fold((), |_, _| async {Ok(())}) + .await + .map_err(|_| LaunchError::LibraryDownloadError)?; + } else { + info!("Verifying {} libraries...", downloads.len()); + download::verify_files(downloads.values_mut()).await.map_err(|e| { + warn!("A library could not be verified: {}", e); + warn!("Since the launcher is in offline mode, libraries cannot be downloaded. Please try again in online mode."); + LaunchError::LibraryVerifyError(e) + })?; + } + + let log_arg; + if let Some(logging) = ver.logging.as_ref().and_then(|l| l.client.as_ref()) { + log_arg = Some(self.log_config_ensure(logging).await?); + } else { + log_arg = None; + } + + // download assets + + let (asset_idx_name, asset_idx) = + if let Some(idx_download) = ver.asset_index.as_ref() { + let asset_idx_name = idx_download.id.as_ref().or(ver.assets.as_ref()).map(String::as_str); + let asset_idx = self.assets.load_index(idx_download, asset_idx_name).await + .map_err(LaunchError::Assets)?; + + self.assets.ensure_assets(&asset_idx).await.map_err(LaunchError::Assets)?; + + (asset_idx_name, Some(asset_idx)) + } else { + (None, None) + }; + + // download client jar + + let client_jar_path; + if let Some(client) = ver.downloads.get(&DownloadType::Client) { + let mut client_path: PathBuf = [self.home.as_ref(), OsStr::new("versions"), OsStr::new(&ver.id)].iter().collect(); + fs::create_dir_all(&client_path).await.map_err(|e| LaunchError::IO{ what: "creating client download directory", error: e })?; + + client_path.push(format!("{}.jar", ver.id)); + + info!("Downloading client jar {}", client_path.display()); + + util::ensure_file(client_path.as_path(), client.url.as_deref(), client.size, client.sha1, self.online, false).await + .map_err(LaunchError::EnsureFile)?; + + client_jar_path = Some(client_path); + } else { + client_jar_path = None; + } + + // clean up old natives + let nnatives = self.libraries.clean_old_natives().await?; + info!("Cleaned up {} old natives directories.", nnatives); + + // extract natives (execute this function unconditionally because we still need the natives dir to exist) + info!("Extracting natives from libraries"); + let natives_dir = self.libraries.extract_natives(extract_jobs).await?; + + let game_assets = if let Some(asset_idx) = asset_idx.as_ref() { + info!("Reconstructing assets"); + self.assets.reconstruct_assets(asset_idx, inst_home.as_path(), asset_idx_name).await + .map_err(LaunchError::Assets)? + } else { + None + }; + + info!("Building classpath"); + let classpath = env::join_paths(downloads.values() + .map(|job| job.get_path().as_java_path()) + .chain(client_jar_path.iter().map(|p| p.as_path().as_java_path()))) + .map_err(LaunchError::LibraryClasspathError)? + .into_string() + .unwrap_or_else(|os| { + warn!("Classpath contains invalid UTF-8. The game may not launch correctly."); + os.to_string_lossy().to_string() + }); + + trace!("Classpath: {classpath}"); + + info!("Resolving java runtime environment path"); + let runtime_path; + + if let Some(ref profile_jre) = profile.get_java_runtime() { + runtime_path = fs::canonicalize(profile_jre).await + .map_err(|e| LaunchError::ResolveJavaRuntime {what: "resolving jre path", error: e})?; + } else { + let Some(ref java_ver) = ver.java_version else { + warn!("Version {} does not specify java version information. You must select a runtime manually.", ver.id); + return Err(LaunchError::MissingJavaRuntime); + }; + + let runtime = self.java_runtimes.choose_runtime(java_ver.component.as_str()).await.map_err(LaunchError::JavaRuntimeRepo)?; + runtime_path = self.java_runtimes.ensure_jre(java_ver.component.as_str(), runtime).await.map_err(LaunchError::JavaRuntimeRepo)?; + } + + let Some(runtime_exe_path) = runner::find_java(runtime_path.as_path(), profile.is_legacy_launch()).await + .map_err(|e| LaunchError::ResolveJavaRuntime {what: "finding java executable", error: e})? else { + return Err(LaunchError::MissingJavaRuntime); + }; + + + debug!("Found runtime exe: {}", runtime_exe_path.display()); + + info!("Deriving launch arguments"); + let info = LaunchInfo { + launcher: self, + feature_matcher: &feature_matcher, + + asset_index_name: asset_idx_name.map(|s| s.to_owned()), + classpath, + virtual_assets_path: game_assets, + instance_home: inst_home.clone(), + natives_path: natives_dir, + client_jar: client_jar_path, + version_id: ver.id.to_string(), + version_type: ver.version_type.clone(), + asset_index: asset_idx + }; + + let Some(ref main_class) = ver.main_class else { + return Err(LaunchError::MissingMainClass); + }; + + // yuck + let jvm_args = profile.iter_arguments().map(OsString::from) + .chain(runner::build_arguments(&info, ver.as_ref(), ArgumentType::Jvm).drain(..)) + .chain(log_arg.iter().map(OsString::from)).collect(); + let game_args = runner::build_arguments(&info, ver.as_ref(), ArgumentType::Game); + + let diff = Instant::now().duration_since(start); + info!("Finished preparing launch for {} in {:.02} seconds!", ver.id, diff.as_secs_f32()); + + Ok(Launch { + jvm_args, + game_args, + main_class: main_class.to_string(), + instance_path: inst_home, + runtime_path: runtime_exe_path, + runtime_legacy_launch: profile.is_legacy_launch() + }) + } +} + +#[derive(Debug)] +enum LibraryError { + InvalidName(String), + IO { what: &'static str, error: io::Error } +} + +impl Display for LibraryError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + LibraryError::InvalidName(name) => write!(f, "invalid name: {name}"), + LibraryError::IO { what, error } => write!(f, "library i/o error ({what}): {error}"), + } + } +} + +impl Error for LibraryError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + LibraryError::IO { error, .. } => Some(error), + _ => None + } + } +} + +#[derive(Debug)] +struct LibraryExtractJob { + source: PathBuf, + rule: Option +} + +const ARCH_BITS: &str = formatcp!("{}", usize::BITS); + +impl LibraryRepository { + fn get_artifact_base_dir(name: &str) -> Option { + let end_of_gid = name.find(':')?; + + Some(name[..end_of_gid].split('.').chain(name.split(':').skip(1).take(2)).collect()) + } + + fn get_artifact_filename(name: &str, classifier: Option<&str>) -> Option { + let n: Vec<&str> = name.splitn(4, ':').skip(1).collect(); + + struct LibReplace; + impl SubFunc<'static> for LibReplace { + fn substitute(&self, key: &str) -> Option> { + match key { + "arch" => Some(Cow::Borrowed(ARCH_BITS)), + _ => None + } + } + } + + if let Some(classifier) = classifier { + match n.len() { + 2 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}-{}.jar", n[0], n[1], classifier).as_str(), &LibReplace).as_ref())), + 3 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}-{}-{}.jar", n[0], n[1], classifier, n[2]).as_str(), &LibReplace).as_ref())), + _ => None + } + } else { + match n.len() { + 2 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}.jar", n[0], n[1]).as_str(), &LibReplace).as_ref())), + 3 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}-{}.jar", n[0], n[1], n[2]).as_str(), &LibReplace).as_ref())), + _ => None + } + } + } + + fn get_artifact_path(name: &str, classifier: Option<&str>) -> Option { + let mut p = Self::get_artifact_base_dir(name)?; + + p.push(Self::get_artifact_filename(name, classifier)?); + Some(p) + } + + fn create_download(&self, lib: &Library, classifier: Option<&str>) -> Option { + if let Some(ref url) = lib.url { + let path = Self::get_artifact_path(lib.name.as_str(), classifier)?; + let url = [url.as_str(), path.to_string_lossy().as_ref()].into_iter().collect::(); + Some(VerifiedDownload::new(url.as_ref(), self.home.join(path).as_path(), lib.size, lib.sha1)) // TODO: could download sha1 + } else if let Some(ref downloads) = lib.downloads { + let dlinfo = downloads.get_download_info(classifier)?; + // drinking game: take a shot once per heap allocation + let path = self.home.join(dlinfo.path.as_ref().map(PathBuf::from).or_else(|| Self::get_artifact_path(lib.name.as_str(), classifier))?); + + Some(VerifiedDownload::new(dlinfo.url.as_ref()?, path.as_path(), dlinfo.size, dlinfo.sha1)) + } else { + let path = Self::get_artifact_path(lib.name.as_str(), classifier)?; + let url = ["https://libraries.minecraft.net/", path.to_string_lossy().as_ref()].into_iter().collect::(); + Some(VerifiedDownload::new(url.as_ref(), self.home.join(path).as_path(), lib.size, lib.sha1)) // TODO: could download sha1 + } + } + + async fn clean_old_natives(&self) -> Result { + info!("Cleaning up old natives folders..."); + + let boot_time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() - min(System::uptime(), 7u64*24*60*60); + + let readdir = match fs::read_dir(&self.natives).await { + Ok(readdir) => readdir, + Err(e) if e.kind() == ErrorKind::NotFound => return Ok(0), + Err(e) => return Err(LaunchError::IO { what: "reading natives directory", error: e }) + }; + + ReadDirStream::new(readdir) + .map(|entry| Ok(async move { + let entry = entry.map_err(|e| LaunchError::IO { what: "reading natives entry", error: e })?; + let ftype = entry.file_type().await.map_err(|e| LaunchError::IO { what: "'stat'ing natives entry", error: e })?; + + if !ftype.is_dir() { return Ok(false); } + + let Some(ftime) = entry.file_name().to_str() + .and_then(|s| constants::NATIVES_DIR_PATTERN.captures(s)) + .and_then(|c| c.get(1)) + .and_then(|cap| cap.as_str().parse::().ok()) else { + return Ok(false); + }; + + if ftime < boot_time { + let path = entry.path(); + info!("Deleting old natives directory {}", path.display()); + + fs::remove_dir_all(&path).await.map_err(|e| LaunchError::IO { + what: "reading natives entry", + error: e + })?; + + return Ok(true); + } + + Ok(false) + })) + .try_buffer_unordered(32) + .try_fold(0usize, |accum, res| async move { + match res { + true => Ok(accum + 1), + _ => Ok(accum) + } + }).await + } + + async fn extract_natives(&self, libs: Vec) -> Result { + fs::create_dir_all(&self.natives).await.map_err(|e| LaunchError::IO { + what: "creating natives directory", + error: e + })?; + + let time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + let natives_dir = self.natives.join(format!("{}{}-{}", constants::NATIVES_PREFIX, time, process::id())); + + // create_dir_all suppresses "AlreadyExists", but this is a fatal error here. + fs::create_dir(&natives_dir).await.map_err(|e| LaunchError::IO { + what: "creating natives directory", + error: e + })?; + + let (path_again, extracted) = tokio::task::spawn_blocking(move || { + let mut tally = 0usize; + + for job in libs { + debug!("Extracting natives for {}", job.source.display()); + tally += extract::extract_zip(&job.source, &natives_dir, |name| + job.rule.as_ref().is_none_or(|rules| + rules.exclude.iter().any(|ex| + name.starts_with(ex.as_str()))))?; + } + + Ok((natives_dir, tally)) + }).await.unwrap().map_err(LaunchError::LibraryExtractError)?; + + info!("Done extracting natives! Copied {} files.", extracted); + + Ok(path_again) + } +} + +impl SystemInfo { + fn new() -> SystemInfo { + let os = match OS { + "windows" => OperatingSystem::Windows, + "macos" => OperatingSystem::MacOS, + "linux" => OperatingSystem::Linux, + _ => OperatingSystem::Unknown // could probably consider "hurd" and "*bsd" to be linux... + }; + + let mut os_version = System::os_version().unwrap_or_default(); + if os == OperatingSystem::Windows && (os_version.starts_with("10") || os_version.starts_with("11")) { + os_version.replace_range(..2, "10.0"); // minecraft expects this funny business... + } + + let mut arch = ARCH.to_owned(); + if arch == "x86_64" { + // this nomenclature is preferred, since some versions expect the arch containing "x86" to mean 32-bit. + arch.replace_range(.., "amd64"); + } + + SystemInfo { + os, + os_version, + arch + } + } + + fn is_our_os(&self, os: OperatingSystem) -> bool { + if self.os == OperatingSystem::Unknown { + return false; + } + + self.os == os + } + + fn applies(&self, restriction: &OSRestriction) -> bool { + restriction.os.is_none_or(|os| self.is_our_os(os)) + && restriction.version.as_deref().is_none_or(|pat| pat.is_match(&self.os_version)) + && restriction.arch.as_deref().is_none_or(|pat| pat.is_match(&self.arch)) + } +} diff --git a/ozone/src/launcher/assets.rs b/ozone/src/launcher/assets.rs new file mode 100644 index 0000000..aa7d42e --- /dev/null +++ b/ozone/src/launcher/assets.rs @@ -0,0 +1,322 @@ +use std::error::Error; +use std::ffi::OsStr; +use std::fmt::{Display, Formatter}; +use std::io::ErrorKind; +use std::path::{Path, PathBuf}; +use std::path::Component::Normal; +use futures::{stream, TryStreamExt}; +use log::{debug, info, warn}; +use reqwest::Client; +use sha1_smol::Sha1; +use tokio::{fs, io}; +use tokio::fs::File; +use crate::assets::{Asset, AssetIndex}; +use super::download::{MultiDownloader, VerifiedDownload}; +use crate::util; +use crate::util::{FileVerifyError, IntegrityError}; +use crate::version::DownloadInfo; + +const INDEX_PATH: &str = "indexes"; +const OBJECT_PATH: &str = "objects"; + +pub struct AssetRepository { + online: bool, + home: PathBuf +} + +#[derive(Debug)] +pub enum AssetError { + InvalidId(Option), + IO { what: &'static str, error: io::Error }, + IndexParse(serde_json::Error), + Offline, + MissingURL, + DownloadIndex(reqwest::Error), + Integrity(IntegrityError), + AssetObjectDownload, + AssetVerifyError(FileVerifyError), + AssetNameError(&'static str) +} + +impl Display for AssetError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + AssetError::InvalidId(None) => f.write_str("missing asset index id"), + AssetError::InvalidId(Some(id)) => write!(f, "invalid asset index id: {}", id), + AssetError::IO { what, error } => write!(f, "i/o error ({}): {}", what, error), + AssetError::IndexParse(error) => write!(f, "error parsing asset index: {}", error), + AssetError::Offline => f.write_str("cannot download asset index while offline"), + AssetError::MissingURL => f.write_str("missing asset index URL"), + AssetError::DownloadIndex(e) => write!(f, "error downloading asset index: {}", e), + AssetError::Integrity(e) => write!(f, "asset index integrity error: {}", e), + AssetError::AssetObjectDownload => f.write_str("asset object download failed"), + AssetError::AssetVerifyError(e) => write!(f, "error verifying asset object: {e}"), + AssetError::AssetNameError(e) => write!(f, "invalid asset name: {e}") + } + } +} + +impl Error for AssetError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + AssetError::IO { error, .. } => Some(error), + AssetError::IndexParse(error) => Some(error), + AssetError::DownloadIndex(error) => Some(error), + AssetError::Integrity(error) => Some(error), + AssetError::AssetVerifyError(error) => Some(error), + _ => None + } + } +} + +impl From<(&'static str, io::Error)> for AssetError { + fn from((what, error): (&'static str, io::Error)) -> Self { + AssetError::IO { what, error } + } +} + +impl AssetRepository { + pub async fn new(online: bool, home: impl AsRef) -> Result { + let home = home.as_ref().to_owned(); + + match fs::create_dir_all(&home).await { + Ok(_) => (), + Err(e) => match e.kind() { + ErrorKind::AlreadyExists => (), + _ => return Err(e) + } + }; + + Ok(AssetRepository { + online, + home + }) + } + + pub fn get_home(&self) -> &Path { + self.home.as_path() + } + + fn get_index_path(&self, id: &str) -> Result { + let mut indexes_path: PathBuf = [self.home.as_ref(), OsStr::new(INDEX_PATH)].iter().collect(); + let Some(Normal(path)) = Path::new(id).components().last() else { + return Err(AssetError::InvalidId(Some(id.into()))); + }; + + let path = path.to_str().ok_or(AssetError::InvalidId(Some(path.to_string_lossy().into())))?; + + // FIXME: change this once "add_extension" is stabilized + indexes_path.push(format!("{}.json", path)); + + Ok(indexes_path) + } + + pub async fn load_index(&self, index: &DownloadInfo, id: Option<&str>) -> Result { + let Some(id) = id else { + return Err(AssetError::InvalidId(None)); + }; + + info!("Loading asset index {}", id); + + let path = self.get_index_path(id)?; + debug!("Asset index {} is located at {}", id, path.display()); + + match util::verify_file(&path, index.size, index.sha1).await { + Ok(_) => { + debug!("Asset index {} verified on disk. Loading it.", id); + let idx_data = fs::read_to_string(&path).await.map_err(|e| AssetError::IO { + what: "reading asset index", + error: e + })?; + + return serde_json::from_str(&idx_data).map_err(AssetError::IndexParse); + }, + Err(FileVerifyError::Open(_, e)) => match e.kind() { + ErrorKind::NotFound => { + debug!("Asset index {} not found on disk. Must download it.", id); + }, + _ => return Err(("opening asset index", e).into()) + }, + Err(FileVerifyError::Integrity(_, e)) => { + info!("Asset index {} has mismatched integrity: {}, must download it.", id, e); + let _ = fs::remove_file(&path).await.map_err(|e| warn!("Error deleting modified index {}: {} (ignored)", id, e)); + }, + Err(FileVerifyError::Read(_, e)) => return Err(("reading asset index", e).into()) + } + + if !self.online { + warn!("Must download asset index {}, but the launcher is in offline mode. Please try again in online mode.", id); + return Err(AssetError::Offline); + } + + let Some(url) = index.url.as_ref() else { + return Err(AssetError::MissingURL); + }; + + debug!("Downloading asset index {} from {}", id, url); + + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).await.map_err(|e| AssetError::IO { + what: "creating asset index folder", + error: e + })?; + } + + let idx_text = reqwest::get(url).await + .map_err(AssetError::DownloadIndex)? + .text().await + .map_err(AssetError::DownloadIndex)?; + + if index.size.is_some_and(|s| s != idx_text.len()) { + return Err(AssetError::Integrity(IntegrityError::SizeMismatch { + expect: index.size.unwrap(), + actual: idx_text.len() + })); + } + + if let Some(expect) = index.sha1 { + let actual = Sha1::from(&idx_text).digest(); + + if actual != expect { + return Err(AssetError::Integrity(IntegrityError::Sha1Mismatch { expect, actual })); + } + } + + debug!("Saving downloaded asset index to {}", path.display()); + fs::write(&path, &idx_text).await.map_err(|e| AssetError::IO { + what: "writing asset index", + error: e + })?; + + serde_json::from_str(&idx_text).map_err(AssetError::IndexParse) + } + + fn get_object_url(obj: &Asset) -> String { + format!("{}{:02x}/{}", super::constants::URL_RESOURCE_BASE, obj.hash.bytes()[0], obj.hash) + } + + pub fn get_object_path(&self, obj: &Asset) -> PathBuf { + let hex_digest = obj.hash.to_string(); + [self.home.as_ref(), OsStr::new(OBJECT_PATH), OsStr::new(&hex_digest[..2]), OsStr::new(&hex_digest)].iter().collect() + } + + async fn ensure_dir(path: impl AsRef) -> Result<(), io::Error> { + match fs::create_dir(path).await { + Ok(_) => Ok(()), + Err(e) if e.kind() == ErrorKind::AlreadyExists => Ok(()), + Err(e) => Err(e) + } + } + + pub async fn ensure_assets(&self, index: &AssetIndex) -> Result<(), AssetError> { + let mut downloads = Vec::new(); + let objects_path = [self.home.as_ref(), OsStr::new(OBJECT_PATH)].iter().collect::(); + + Self::ensure_dir(&objects_path).await.map_err(|e| AssetError::IO { + what: "creating objects directory", + error: e + })?; + + for object in index.objects.values() { + let path = self.get_object_path(object); + + Self::ensure_dir(path.parent().unwrap()).await.map_err(|error| AssetError::IO { error, what: "creating directory for object" })?; + + downloads.push(VerifiedDownload::new(&Self::get_object_url(object), &path, Some(object.size), Some(object.hash))); + } + + if self.online { + info!("Downloading {} asset objects...", downloads.len()); + let client = Client::new(); + MultiDownloader::with_concurrent(downloads.iter_mut(), 32).perform(&client).await + .inspect_err(|e| warn!("asset download failed: {e}")) + .try_fold((), |_, _| async {Ok(())}) + .await + .map_err(|_| AssetError::AssetObjectDownload)?; + } else { + info!("Verifying {} asset objects...", downloads.len()); + super::download::verify_files(downloads.iter_mut()).await.map_err(AssetError::AssetVerifyError)?; + } + + Ok(()) + } + + pub async fn reconstruct_assets(&self, index: &AssetIndex, instance_path: &Path, index_id: Option<&str>) -> Result, AssetError> { + let target_path: PathBuf; + let Some(index_id) = index_id else { + return Err(AssetError::InvalidId(None)); + }; + + if index.virtual_assets { + target_path = [self.home.as_ref(), OsStr::new("virtual"), OsStr::new(index_id)].iter().collect(); + } else if index.map_to_resources { + target_path = [instance_path, Path::new("resources")].iter().collect(); + } else { + info!("This asset index does not request a virtual assets folder. Nothing to be done."); + return Ok(None); + } + + info!("Reconstructing virtual assets for {}", index_id); + + fs::create_dir_all(&target_path).await.map_err(|e| AssetError::from(("creating virtual assets directory", e)))?; + + stream::iter(index.objects.values() + .map(|object| { + let obj_path = util::check_path(object.name.as_str()).map_err(AssetError::AssetNameError)?; + let obj_path = target_path.join(obj_path); + + Ok((object, obj_path)) + })) + .try_filter_map(|(object, obj_path)| async move { + match util::verify_file(&obj_path, Some(object.size), Some(object.hash)).await { + Ok(_) => { + debug!("Not copying asset {}, integrity matches.", object.name); + Ok(None) + } + Err(FileVerifyError::Open(_, e)) if e.kind() == ErrorKind::NotFound => { + debug!("Copying asset {}, file does not exist.", object.name); + Ok(Some((object, obj_path))) + }, + Err(FileVerifyError::Integrity(_, e)) => { + debug!("Copying asset {}: {}", object.name, e); + Ok(Some((object, obj_path))) + }, + Err(e) => { + debug!("Error while reconstructing assets: {e}"); + Err(AssetError::AssetVerifyError(e)) + } + } + }) + .try_for_each_concurrent(32, |(object, obj_path)| async move { + if let Some(parent) = obj_path.parent() { + fs::create_dir_all(parent).await + .inspect_err(|e| debug!("Error creating directory for asset object {}: {e}", object.name)) + .map_err(|e| AssetError::from(("creating asset object directory", e)))?; + } + + let mut fromfile = File::open(self.get_object_path(object)).await + .map_err(|e| AssetError::from(("opening source object", e)))?; + let mut tofile = File::create(&obj_path).await + .map_err(|e| AssetError::from(("creating target object", e)))?; + + io::copy(&mut fromfile, &mut tofile).await.map_err(|e| AssetError::from(("copying asset object", e)))?; + debug!("Copied object {} to {}.", object.name, obj_path.display()); + Ok(()) + }).await.map(|_| Some(target_path)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_it() { + let digest_str = "ad1115931887a73cd596300f2c93f84adf39521d"; + assert_eq!(AssetRepository::get_object_url(&Asset { + name: String::from("test"), + hash: digest_str.parse().unwrap(), + size: 0usize + }), "https://resources.download.minecraft.net/ad/ad1115931887a73cd596300f2c93f84adf39521d"); + } +} diff --git a/ozone/src/launcher/constants.rs b/ozone/src/launcher/constants.rs new file mode 100644 index 0000000..4506ab5 --- /dev/null +++ b/ozone/src/launcher/constants.rs @@ -0,0 +1,18 @@ +use lazy_static::lazy_static; +use regex::Regex; + +pub const URL_VERSION_MANIFEST: &str = "https://piston-meta.mojang.com/mc/game/version_manifest_v2.json"; +pub const URL_RESOURCE_BASE: &str = "https://resources.download.minecraft.net/"; +pub const URL_JRE_MANIFEST: &str = "https://piston-meta.mojang.com/v1/products/java-runtime/2ec0cc96c44e5a76b9c8b7c39df7210883d12871/all.json"; + +pub const NATIVES_PREFIX: &str = "natives-"; + +pub const DEF_INSTANCE_NAME: &str = "default"; +pub const DEF_PROFILE_NAME: &str = "default"; + +// https://github.com/unmojang/FjordLauncher/pull/14/files +// https://login.live.com/oauth20_authorize.srf?client_id=00000000402b5328&redirect_uri=ms-xal-00000000402b5328://auth&response_type=token&display=touch&scope=service::user.auth.xboxlive.com::MBI_SSL%20offline_access&prompt=select_account + +lazy_static! { + pub static ref NATIVES_DIR_PATTERN: Regex = Regex::new("^natives-(\\d+)").unwrap(); +} diff --git a/ozone/src/launcher/download.rs b/ozone/src/launcher/download.rs new file mode 100644 index 0000000..132cd7f --- /dev/null +++ b/ozone/src/launcher/download.rs @@ -0,0 +1,267 @@ +use std::error::Error; +use std::fmt::{Debug, Display, Formatter}; +use std::path::{Path, PathBuf}; +use futures::{stream, StreamExt, TryStream, TryStreamExt}; +use log::debug; +use reqwest::{Client, Method, RequestBuilder}; +use sha1_smol::{Digest, Sha1}; +use tokio::fs; +use tokio::fs::File; +use tokio::io::{self, AsyncWriteExt}; +use crate::util; +use crate::util::{FileVerifyError, IntegrityError, USER_AGENT}; + +pub trait Download: Debug + Display { + // return Ok(None) to skip downloading this file + async fn prepare(&mut self, client: &Client) -> Result, Box>; + async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box>; + async fn finish(&mut self) -> Result<(), Box>; +} + +pub trait FileDownload: Download { + fn get_path(&self) -> &Path; +} + +pub struct MultiDownloader<'j, T: Download + 'j, I: Iterator> { + jobs: I, + nconcurrent: usize +} + +#[derive(Debug, Clone, Copy)] +pub enum Phase { + Prepare, + Send, + Receive, + HandleChunk, + Finish +} + +impl Display for Phase { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + /* an error occurred while (present participle) ... */ + Self::Prepare => f.write_str("preparing the request"), + Self::Send => f.write_str("sending the request"), + Self::Receive => f.write_str("receiving response data"), + Self::HandleChunk => f.write_str("handling response data"), + Self::Finish => f.write_str("finishing the request"), + } + } +} + +pub struct PhaseDownloadError<'j, T: Download> { + phase: Phase, + inner: Box, + job: &'j T +} + +impl Debug for PhaseDownloadError<'_, T> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("PhaseDownloadError") + .field("phase", &self.phase) + .field("inner", &self.inner) + .field("job", &self.job) + .finish() + } +} + +impl Display for PhaseDownloadError<'_, T> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "error while {} ({}): {}", self.phase, self.job, self.inner) + } +} + +impl Error for PhaseDownloadError<'_, T> { + fn source(&self) -> Option<&(dyn Error + 'static)> { + Some(&*self.inner) + } +} + +impl<'j, T: Download> PhaseDownloadError<'j, T> { + fn new(phase: Phase, inner: Box, job: &'j T) -> Self { + PhaseDownloadError { + phase, inner, job + } + } +} + +impl<'j, T: Download + 'j, I: Iterator> MultiDownloader<'j, T, I> { + pub fn new(jobs: I) -> MultiDownloader<'j, T, I> { + Self::with_concurrent(jobs, 24) + } + + pub fn with_concurrent(jobs: I, n: usize) -> MultiDownloader<'j, T, I> { + assert!(n > 0); + + MultiDownloader { + jobs, + nconcurrent: n + } + } + + pub async fn perform(self, client: &'j Client) -> impl TryStream> { + stream::iter(self.jobs).map(move |job| Ok(async move { + macro_rules! map_err { + ($result:expr, $phase:expr, $job:expr) => { + match $result { + Ok(v) => v, + Err(e) => return Err(PhaseDownloadError::new($phase, e.into(), $job)) + } + } + } + + let Some(rq) = map_err!(job.prepare(client).await, Phase::Prepare, job) else { + return Ok(()) + }; + + let rq = rq.header(reqwest::header::USER_AGENT, USER_AGENT); + + let mut data = map_err!(map_err!(rq.send().await, Phase::Send, job).error_for_status(), Phase::Send, job).bytes_stream(); + + while let Some(bytes) = data.next().await { + let bytes = map_err!(bytes, Phase::Receive, job); + + map_err!(job.handle_chunk(bytes.as_ref()).await, Phase::HandleChunk, job); + } + + job.finish().await.map_err(|e| PhaseDownloadError::new(Phase::Finish, e, job))?; + + Ok(()) + })).try_buffer_unordered(self.nconcurrent) + } +} + +pub struct VerifiedDownload { + url: String, + expect_size: Option, + expect_sha1: Option, + + path: PathBuf, + file: Option, + sha1: Sha1, + tally: usize +} + +impl Debug for VerifiedDownload { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("VerifiedDownload") + .field("url", &self.url) + .field("expect_size", &self.expect_size) + .field("expect_sha1", &self.expect_sha1) + .field("path", &self.path).finish() + } +} + +impl Display for VerifiedDownload { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "downloading {} to {}", self.url, self.path.display()) + } +} + +impl VerifiedDownload { + pub fn new(url: &str, path: &Path, expect_size: Option, expect_sha1: Option) -> VerifiedDownload { + VerifiedDownload { + url: url.to_owned(), + path: path.to_owned(), + + expect_size, + expect_sha1, + + file: None, + sha1: Sha1::new(), + tally: 0 + } + } + + pub fn with_size(mut self, expect: usize) -> VerifiedDownload { + self.expect_size = Some(expect); + self + } + + pub fn with_sha1(mut self, expect: Digest) -> VerifiedDownload { + self.expect_sha1.replace(expect); + self + } + + pub fn get_url(&self) -> &str { + &self.url + } + + pub fn get_expect_size(&self) -> Option { + self.expect_size + } + + pub fn get_expect_sha1(&self) -> Option { + self.expect_sha1 + } + + pub async fn make_dirs(&self) -> Result<(), io::Error> { + fs::create_dir_all(self.path.parent().expect("download created with no containing directory (?)")).await + } + + async fn open_output(&mut self) -> Result<(), io::Error> { + self.file.replace(File::create(&self.path).await?); + Ok(()) + } +} + +impl Download for VerifiedDownload { + async fn prepare(&mut self, client: &Client) -> Result, Box> { + if !util::should_download(&self.path, self.expect_size, self.expect_sha1).await? { + return Ok(None) + } + + // potentially racy to close the file and reopen it... :/ + self.open_output().await?; + + Ok(Some(client.request(Method::GET, &self.url))) + } + + async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box> { + self.file.as_mut().unwrap().write_all(chunk).await?; + self.tally += chunk.len(); + self.sha1.update(chunk); + + Ok(()) + } + + async fn finish(&mut self) -> Result<(), Box> { + let digest = self.sha1.digest(); + + if let Some(d) = self.expect_sha1 { + if d != digest { + debug!("Could not download {}: sha1 mismatch (exp {}, got {}).", self.path.display(), d, digest); + return Err(IntegrityError::Sha1Mismatch { expect: d, actual: digest }.into()); + } + } else if let Some(s) = self.expect_size { + if s != self.tally { + debug!("Could not download {}: size mismatch (exp {}, got {}).", self.path.display(), s, self.tally); + return Err(IntegrityError::SizeMismatch { expect: s, actual: self.tally }.into()); + } + } + + debug!("Successfully downloaded {} ({} bytes).", self.path.display(), self.tally); + + // release the file descriptor (don't want to wait until it's dropped automatically because idk when that would be) + drop(self.file.take().unwrap()); + + Ok(()) + } +} + +impl FileDownload for VerifiedDownload { + fn get_path(&self) -> &Path { + &self.path + } +} + +pub async fn verify_files(files: impl Iterator) -> Result<(), FileVerifyError> { + stream::iter(files) + .map(|dl| Ok(async move { + debug!("Verifying library {}", dl.get_path().display()); + util::verify_file(dl.get_path(), dl.get_expect_size(), dl.get_expect_sha1()).await + })) + .try_buffer_unordered(32) + .try_fold((), |_, _| async {Ok(())}) + .await +} diff --git a/ozone/src/launcher/extract.rs b/ozone/src/launcher/extract.rs new file mode 100644 index 0000000..8c5f2b8 --- /dev/null +++ b/ozone/src/launcher/extract.rs @@ -0,0 +1,136 @@ +use std::error::Error; +use std::fmt::{Display, Formatter}; +use std::{fs, io, os}; +use std::fs::File; +use std::io::{BufReader, Error as IOError, Read}; +use std::path::{Path, PathBuf}; +use log::{debug, trace}; +use zip::result::ZipError; +use zip::ZipArchive; +use crate::util; + +#[derive(Debug)] +pub enum ZipExtractError { + IO { what: &'static str, error: IOError }, + Zip { what: &'static str, error: ZipError }, + InvalidEntry { why: &'static str, name: String } +} + +impl From<(&'static str, IOError)> for ZipExtractError { + fn from((what, error): (&'static str, IOError)) -> Self { + ZipExtractError::IO { what, error } + } +} + +impl From<(&'static str, ZipError)> for ZipExtractError { + fn from((what, error): (&'static str, ZipError)) -> Self { + ZipExtractError::Zip { what, error } + } +} + +impl Display for ZipExtractError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + ZipExtractError::IO { what, error } => write!(f, "i/o error ({what}): {error}"), + ZipExtractError::Zip { what, error } => write!(f, "zip error ({what}): {error}"), + ZipExtractError::InvalidEntry { why, name } => write!(f, "invalid entry in zip file ({why}): {name}") + } + } +} + +impl Error for ZipExtractError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + ZipExtractError::IO { error, .. } => Some(error), + ZipExtractError::Zip { error, .. } => Some(error), + _ => None + } + } +} + +fn check_entry_path(name: &str) -> Result<&Path, ZipExtractError> { + util::check_path(name).map_err(|e| ZipExtractError::InvalidEntry { + why: e, + name: name.to_owned() + }) +} + +#[cfg(unix)] +fn extract_symlink(path: impl AsRef, target: &str) -> io::Result<()> { + os::unix::fs::symlink(target, path) +} + +#[cfg(windows)] +fn extract_symlink(path: impl AsRef, target: &str) -> io::Result<()> { + os::windows::fs::symlink_file(target, path) +} + +#[cfg(not(any(unix, windows)))] +fn extract_symlink(path: impl AsRef, _target: &str) -> io::Result<()> { + warn!("Refusing to extract symbolic link to {}. I don't know how to do it on this platform!", path.as_ref().display()); + Ok(()) +} + +pub fn extract_zip(zip_path: impl AsRef, extract_root: impl AsRef, condition: F) -> Result +where + F: Fn(&str) -> bool +{ + debug!("Extracting zip file {} into {}", zip_path.as_ref().display(), extract_root.as_ref().display()); + + fs::create_dir_all(&extract_root).map_err(|e| ZipExtractError::from(("create extract root", e)))?; + + let mut extracted = 0usize; + + let file = File::open(&zip_path).map_err(|e| ZipExtractError::from(("extract zip file (open)", e)))?; + let read = BufReader::new(file); + + let mut archive = ZipArchive::new(read).map_err(|e| ZipExtractError::from(("read zip archive", e)))?; + + // create directories + for n in 0..archive.len() { + let entry = archive.by_index(n).map_err(|e| ZipExtractError::from(("read zip entry (1)", e)))?; + if !entry.is_dir() { continue; } + + let name = entry.name(); + if !condition(name) { + continue; + } + + let entry_path = check_entry_path(name)?; + let entry_path: PathBuf = [extract_root.as_ref(), entry_path].iter().collect(); + + trace!("Extracting directory {} from {}", entry.name(), zip_path.as_ref().display()); + fs::create_dir_all(entry_path).map_err(|e| ZipExtractError::from(("extract directory", e)))?; + } + + // extract the files + for n in 0..archive.len() { + let mut entry = archive.by_index(n).map_err(|e| ZipExtractError::from(("read zip entry (2)", e)))?; + let name = entry.name(); + + if entry.is_dir() { continue; } + + if !condition(name) { + continue; + } + + let entry_path = check_entry_path(name)?; + let entry_path: PathBuf = [extract_root.as_ref(), entry_path].iter().collect(); + + if entry.is_symlink() { + let mut target = String::new(); + entry.read_to_string(&mut target).map_err(|e| ZipExtractError::from(("read to symlink target", e)))?; + + trace!("Extracting symbolic link {} -> {} from {}", entry.name(), target, zip_path.as_ref().display()); + extract_symlink(entry_path.as_path(), target.as_str()).map_err(|e| ZipExtractError::from(("extract symlink", e)))?; + } else if entry.is_file() { + let mut outfile = File::create(&entry_path).map_err(|e| ZipExtractError::from(("extract zip entry (open)", e)))?; + + trace!("Extracting file {} from {}", entry.name(), zip_path.as_ref().display()); + io::copy(&mut entry, &mut outfile).map_err(|e| ZipExtractError::from(("extract zip entry (write)", e)))?; + extracted += 1; + } + } + + Ok(extracted) +} diff --git a/ozone/src/launcher/jre.rs b/ozone/src/launcher/jre.rs new file mode 100644 index 0000000..a5cc166 --- /dev/null +++ b/ozone/src/launcher/jre.rs @@ -0,0 +1,330 @@ +use std::error::Error; +use std::fmt::{Debug, Display, Formatter}; +use std::path::{Component, Path, PathBuf}; +use std::sync::Arc; +use futures::{stream, StreamExt, TryStreamExt}; +use log::{debug, info, warn}; +use reqwest::Client; +use tokio::{fs, io, io::ErrorKind}; + +mod arch; +mod manifest; +mod download; + +use arch::JRE_ARCH; +use manifest::JavaRuntimesManifest; +use manifest::JavaRuntimeManifest; +use super::download::MultiDownloader; +use super::jre::download::{LzmaDownloadError, LzmaDownloadJob}; +use super::jre::manifest::JavaRuntimeFile; +use crate::util; +use crate::util::{EnsureFileError, IntegrityError}; +use crate::version::DownloadInfo; +use super::constants; + +pub struct JavaRuntimeRepository { + online: bool, + home: PathBuf, + manifest: JavaRuntimesManifest +} + +impl JavaRuntimeRepository { + pub async fn new(home: impl AsRef, online: bool) -> Result { + info!("Java runtime architecture is \"{}\".", JRE_ARCH); + + fs::create_dir_all(&home).await.map_err(|e| JavaRuntimeError::IO { what: "creating home directory", error: e })?; + + let manifest_path = home.as_ref().join("manifest.json"); + match util::ensure_file(manifest_path.as_path(), Some(constants::URL_JRE_MANIFEST), None, None, online, true).await { + Ok(_) => (), + Err(EnsureFileError::Offline) => { + info!("Launcher is offline, cannot download runtime manifest."); + }, + Err(e) => return Err(JavaRuntimeError::EnsureFile(e)) + }; + + let manifest_file = fs::read_to_string(&manifest_path).await + .map_err(|e| JavaRuntimeError::IO { what: "reading runtimes manifest", error: e })?; + + Ok(JavaRuntimeRepository { + online, + home: home.as_ref().to_path_buf(), + manifest: serde_json::from_str(&manifest_file).map_err(|e| JavaRuntimeError::Deserialize { what: "runtimes manifest", error: e })?, + }) + } + + fn get_component_dir(&self, component: &str) -> PathBuf { + [self.home.as_path(), Path::new(JRE_ARCH), Path::new(component)].into_iter().collect() + } + + async fn load_runtime_manifest(&self, component: &str, info: &DownloadInfo) -> Result { + let comp_dir = self.get_component_dir(component); + let manifest_path = comp_dir.join("manifest.json"); + + debug!("Ensuring manifest for runtime {JRE_ARCH}.{component}"); + + fs::create_dir_all(comp_dir.as_path()).await + .inspect_err(|e| warn!("Failed to create directory for JRE component {}: {}", component, e)) + .map_err(|e| JavaRuntimeError::IO { what: "creating component directory", error: e })?; + + util::ensure_file(&manifest_path, info.url.as_deref(), info.size, info.sha1, self.online, false).await + .map_err(JavaRuntimeError::EnsureFile)?; + + let manifest_file = fs::read_to_string(&manifest_path).await + .map_err(|e| JavaRuntimeError::IO { what: "reading runtimes manifest", error: e })?; + + serde_json::from_str(&manifest_file).map_err(|e| JavaRuntimeError::Deserialize { what: "runtime manifest", error: e }) + } + + // not very descriptive function name + pub async fn choose_runtime(&self, component: &str) -> Result { + let Some(runtime_components) = self.manifest.get(JRE_ARCH) else { + return Err(JavaRuntimeError::UnsupportedArch(JRE_ARCH)); + }; + + let Some(runtime_component) = runtime_components.get(component) else { + return Err(JavaRuntimeError::UnsupportedComponent { arch: JRE_ARCH, component: component.to_owned() }); + }; + + let Some(runtime) = runtime_component.iter().find(|r| r.availability.progress == 100) else { + if !runtime_components.is_empty() { + warn!("Weird: the only java runtimes in {JRE_ARCH}.{component} has a progress of less than 100!"); + } + + return Err(JavaRuntimeError::UnsupportedComponent { arch: JRE_ARCH, component: component.to_owned() }); + }; + + self.load_runtime_manifest(component, &runtime.manifest).await + } + + fn clean_up_runtime_sync(path: &Path, manifest: Arc) -> Result<(), io::Error> { + for entry in walkdir::WalkDir::new(path).contents_first(true) { + let entry = entry?; + let rel_path = entry.path().strip_prefix(path).expect("walkdir escaped root (???)"); + + if !rel_path.components().any(|c| !matches!(&c, Component::CurDir)) { + // if this path is trivial (points at the root), ignore it + continue; + } + + let rel_path_str = if std::path::MAIN_SEPARATOR != '/' { + rel_path.to_str().map(|s| s.replace(std::path::MAIN_SEPARATOR, "/")) + } else { + rel_path.to_str().map(String::from) + }; + + if !rel_path_str.as_ref().is_some_and(|s| manifest.files.get(s) + .is_some_and(|f| (f.is_file() == entry.file_type().is_file()) + || (f.is_directory() == entry.file_type().is_dir()) + || (f.is_link() == entry.file_type().is_symlink()))) { + // path is invalid utf-8, extraneous, or of the wrong type + debug!("File {} is extraneous or of wrong type ({:?}). Deleting it.", entry.path().display(), entry.file_type()); + + if entry.file_type().is_dir() { + std::fs::remove_dir(entry.path())?; + } else { + std::fs::remove_file(entry.path())?; + } + } + } + + Ok(()) + } + + async fn clean_up_runtime(path: &Path, manifest: Arc) -> Result<(), io::Error> { + let (tx, rx) = tokio::sync::oneshot::channel(); + + let path = path.to_owned(); + let manifest = manifest.clone(); + + tokio::task::spawn_blocking(move || { + let res = Self::clean_up_runtime_sync(&path, manifest); + let _ = tx.send(res); + }).await.expect("clean_up_runtime_sync panicked"); + + rx.await.expect("clean_up_runtime_sync hung up") + } + + async fn ensure_jre_dirs(&self, path: &Path, manifest: &JavaRuntimeManifest) -> Result<(), JavaRuntimeError> { + stream::iter(manifest.files.iter().filter(|(_, f)| f.is_directory())) + .map::, _>(|(name, _)| Ok(name)) + .try_for_each(|name| async move { + let ent_path = util::check_path(name).map_err(JavaRuntimeError::MalformedManifest)?; + let ent_path = [path, ent_path].into_iter().collect::(); + + match fs::metadata(&ent_path).await { + Ok(meta) => { + if !meta.is_dir() { + debug!("Deleting misplaced file at {}", ent_path.display()); + fs::remove_file(&ent_path).await.map_err(|e| JavaRuntimeError::IO { + what: "deleting misplaced file", + error: e + })?; + } + }, + Err(e) if e.kind() == ErrorKind::NotFound => (), + Err(e) => return Err(JavaRuntimeError::IO { what: "'stat'ing directory", error: e }) + } + + match fs::create_dir(&ent_path).await { + Ok(_) => { + debug!("Created directory at {}", ent_path.display()); + Ok(()) + }, + Err(e) if e.kind() == ErrorKind::AlreadyExists => Ok(()), + Err(e) => { + warn!("Could not create directory {} for JRE!", ent_path.display()); + Err(JavaRuntimeError::IO { what: "creating directory", error: e }) + } + } + }).await + } + + async fn ensure_jre_files(path: &Path, manifest: &JavaRuntimeManifest) -> Result<(), JavaRuntimeError> { + let mut downloads = Vec::new(); + for (name, file) in manifest.files.iter().filter(|(_, f)| f.is_file()) { + let file_path = util::check_path(name).map_err(JavaRuntimeError::MalformedManifest)?; + let file_path = [path, file_path].into_iter().collect::(); + + downloads.push(LzmaDownloadJob::try_from((file, file_path)).map_err(|e| { + match e { + LzmaDownloadError::MissingURL => JavaRuntimeError::MalformedManifest("runtime manifest missing URL"), + LzmaDownloadError::NotAFile => unreachable!("we just made sure this was a file") + } + })?); + } + + let dl = MultiDownloader::new(downloads.iter_mut()); + let client = Client::new(); + + dl.perform(&client).await + .inspect_err(|e| warn!("jre file download failed: {e}")) + .try_fold((), |_, _| async { Ok(()) }) + .await + .map_err(|_| JavaRuntimeError::MultiDownloadError) + } + + async fn ensure_links(root_path: &Path, manifest: &JavaRuntimeManifest) -> Result<(), JavaRuntimeError> { + stream::iter(manifest.files.iter().filter(|(_, f)| f.is_link())) + .map::, _>(|(name, file)| Ok(async move { + let JavaRuntimeFile::Link { target } = file else { + unreachable!(); + }; + + let target_exp = PathBuf::from(target); + + let path = util::check_path(name.as_str()).map_err(JavaRuntimeError::MalformedManifest)?; + let link_path = [root_path, path].into_iter().collect::(); + + match fs::read_link(&link_path).await { + Ok(target_path) => { + if target_path == target_exp { + debug!("Symbolic link at {} matches! Nothing to be done.", link_path.display()); + return Ok(()) + } + + debug!("Symbolic link at {} does not match (exp {}, got {}). Recreating it.", link_path.display(), target_exp.display(), target_path.display()); + fs::remove_file(&link_path).await.map_err(|e| JavaRuntimeError::IO { + what: "deleting bad symlink", + error: e + })?; + } + Err(e) if e.kind() == ErrorKind::NotFound => (), + Err(e) => return Err(JavaRuntimeError::IO { what: "reading jre symlink", error: e }) + } + + debug!("Creating symbolic link at {} to {}", link_path.display(), target_exp.display()); + + let symlink; + #[cfg(unix)] + { + symlink = |targ, path| async { fs::symlink(targ, path).await }; + } + + #[cfg(windows)] + { + symlink = |targ, path| async { fs::symlink_file(targ, path).await }; + } + + #[cfg(not(any(unix, windows)))] + { + symlink = |_, _| async { Ok(()) }; + } + + symlink(target_exp, link_path).await.map_err(|e| JavaRuntimeError::IO { + what: "creating symlink", + error: e + })?; + + Ok(()) + })) + .try_buffer_unordered(32) + .try_fold((), |_, _| async { Ok(()) }).await + } + + pub async fn ensure_jre(&self, component: &str, manifest: JavaRuntimeManifest) -> Result { + let runtime_path = self.get_component_dir(component); + let runtime_path = runtime_path.join("runtime"); + let manifest = Arc::new(manifest); + + fs::create_dir_all(&runtime_path).await + .map_err(|e| JavaRuntimeError::IO { what: "creating runtime directory", error: e })?; + + debug!("Cleaning up JRE directory for {component}"); + Self::clean_up_runtime(runtime_path.as_path(), manifest.clone()).await + .map_err(|e| JavaRuntimeError::IO { what: "cleaning up runtime directory", error: e })?; + + debug!("Building directory structure for {component}"); + self.ensure_jre_dirs(&runtime_path, manifest.as_ref()).await?; + + debug!("Downloading JRE files for {component}"); + Self::ensure_jre_files(&runtime_path, manifest.as_ref()).await?; + + debug!("Ensuring symbolic links for {component}"); + Self::ensure_links(&runtime_path, manifest.as_ref()).await?; + + Ok(runtime_path) + } +} + +#[derive(Debug)] +pub enum JavaRuntimeError { + EnsureFile(EnsureFileError), + IO { what: &'static str, error: io::Error }, + Download { what: &'static str, error: reqwest::Error }, + Deserialize { what: &'static str, error: serde_json::Error }, + UnsupportedArch(&'static str), + UnsupportedComponent { arch: &'static str, component: String }, + MalformedManifest(&'static str), + Integrity(IntegrityError), + MultiDownloadError +} + +impl Display for JavaRuntimeError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + JavaRuntimeError::EnsureFile(e) => std::fmt::Display::fmt(e, f), + JavaRuntimeError::IO { what, error } => write!(f, "i/o error ({}): {}", what, error), + JavaRuntimeError::Download { what, error } => write!(f, "error downloading {}: {}", what, error), + JavaRuntimeError::Deserialize { what, error } => write!(f, "error deserializing ({what}): {error}"), + JavaRuntimeError::UnsupportedArch(arch) => write!(f, r#"unsupported architecture "{arch}""#), + JavaRuntimeError::UnsupportedComponent { arch, component } => write!(f, r#"unsupported component "{component}" for architecture "{arch}""#), + JavaRuntimeError::MalformedManifest(what) => write!(f, "malformed runtime manifest: {what} (launcher bug?)"), + JavaRuntimeError::Integrity(e) => std::fmt::Display::fmt(e, f), + JavaRuntimeError::MultiDownloadError => f.write_str("error in multi downloader (see logs for more details)") + } + } +} + +impl Error for JavaRuntimeError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + JavaRuntimeError::EnsureFile(error) => Some(error), + JavaRuntimeError::IO { error, .. } => Some(error), + JavaRuntimeError::Download { error, .. } => Some(error), + JavaRuntimeError::Deserialize { error, .. } => Some(error), + JavaRuntimeError::Integrity(error) => Some(error), + _ => None + } + } +} diff --git a/ozone/src/launcher/jre/arch.rs b/ozone/src/launcher/jre/arch.rs new file mode 100644 index 0000000..e984171 --- /dev/null +++ b/ozone/src/launcher/jre/arch.rs @@ -0,0 +1,45 @@ +use cfg_if::cfg_if; + +macro_rules! define_arch { + ($arch:expr) => { + pub const JRE_ARCH: &str = $arch; + } +} + +cfg_if! { + if #[cfg(target_os = "windows")] { + cfg_if! { + if #[cfg(target_arch = "x86_64")] { + define_arch!("windows-x64"); + } else if #[cfg(target_arch = "x86")] { + define_arch!("windows-x86"); + } else if #[cfg(target_arch = "aarch64")] { + define_arch!("windows-arm64"); + } else { + define_arch!("gamecore"); + } + } + } else if #[cfg(target_os = "linux")] { + cfg_if! { + if #[cfg(target_arch = "x86_64")] { + define_arch!("linux"); + } else if #[cfg(target_arch = "x86")] { + define_arch!("linux-i386"); + } else { + define_arch!("gamecore"); + } + } + } else if #[cfg(target_os = "macos")] { + cfg_if! { + if #[cfg(target_arch = "aarch64")] { + define_arch!("mac-os-arm64"); + } else if #[cfg(target_arch = "x86_64")] { + define_arch!("mac-os"); + } else { + define_arch!("gamecore"); + } + } + } else { + define_arch!("gamecore"); + } +} diff --git a/ozone/src/launcher/jre/download.rs b/ozone/src/launcher/jre/download.rs new file mode 100644 index 0000000..ddf1ff6 --- /dev/null +++ b/ozone/src/launcher/jre/download.rs @@ -0,0 +1,195 @@ +use std::error::Error; +use std::fmt::{Debug, Display, Formatter}; +use std::io::Write; +use std::path::{PathBuf}; +use log::debug; +use lzma_rs::decompress; +use reqwest::{Client, RequestBuilder}; +use sha1_smol::{Digest, Sha1}; +use tokio::io::AsyncWriteExt; +use tokio::fs::File; +use crate::launcher::download::Download; +use crate::launcher::jre::manifest::JavaRuntimeFile; +use crate::util; +use crate::util::IntegrityError; +use crate::version::DownloadInfo; + +pub enum LzmaDownloadError { + NotAFile, + MissingURL +} + +pub struct LzmaDownloadJob { + url: String, + path: PathBuf, + inflate: bool, + executable: bool, + + raw_size: Option, + raw_sha1: Option, + + raw_sha1_st: Sha1, + raw_tally: usize, + + stream: Option>>, + out_file: Option +} + +impl LzmaDownloadJob { + fn new_inflate(raw: &DownloadInfo, lzma: &DownloadInfo, exe: bool, path: PathBuf) -> Result { + Ok(LzmaDownloadJob { + url: lzma.url.as_ref().map_or_else(|| Err(LzmaDownloadError::MissingURL), |u| Ok(u.to_owned()))?, + path, + inflate: true, + executable: exe, + + raw_size: raw.size, + raw_sha1: raw.sha1, + + raw_sha1_st: Sha1::new(), + raw_tally: 0, + + stream: Some(decompress::Stream::new(Vec::new())), + out_file: None + }) + } + + fn new_raw(raw: &DownloadInfo, exe: bool, path: PathBuf) -> Result { + Ok(LzmaDownloadJob { + url: raw.url.as_ref().map_or_else(|| Err(LzmaDownloadError::MissingURL), |u| Ok(u.to_owned()))?, + path, + inflate: false, + executable: exe, + + raw_size: raw.size, + raw_sha1: raw.sha1, + + raw_sha1_st: Sha1::new(), + raw_tally: 0, + + stream: None, + out_file: None + }) + } +} + +impl TryFrom<(&JavaRuntimeFile, PathBuf)> for LzmaDownloadJob { + type Error = LzmaDownloadError; + + fn try_from((file, path): (&JavaRuntimeFile, PathBuf)) -> Result { + if !file.is_file() { + return Err(LzmaDownloadError::NotAFile); + } + + let JavaRuntimeFile::File { executable, downloads } = file else { + unreachable!("we just made sure this was a file"); + }; + + match downloads.lzma.as_ref() { + Some(lzma) => LzmaDownloadJob::new_inflate(&downloads.raw, lzma, *executable, path), + None => LzmaDownloadJob::new_raw(&downloads.raw, *executable, path) + } + } +} + +impl Debug for LzmaDownloadJob { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("LzmaDownloadJob") + .field("url", &self.url) + .field("path", &self.path) + .field("inflate", &self.inflate) + .finish() + } +} + +impl Display for LzmaDownloadJob { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + if self.inflate { + write!(f, "download and inflate {} to {}", &self.url, self.path.display()) + } else { + write!(f, "download {} to {}", &self.url, self.path.display()) + } + } +} + +impl Download for LzmaDownloadJob { + async fn prepare(&mut self, client: &Client) -> Result, Box> { + if !util::should_download(&self.path, self.raw_size, self.raw_sha1).await? { + return Ok(None) + } + + let mut options = File::options(); + + #[cfg(unix)] + { + options.mode(match self.executable { + true => 0o775, + _ => 0o664 + }); + } + + let file = options.create(true).write(true).truncate(true).open(&self.path).await?; + self.out_file = Some(file); + + Ok(Some(client.get(&self.url))) + } + + async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box> { + let out_file = self.out_file.as_mut().expect("output file gone"); + + if let Some(ref mut stream) = self.stream { + stream.write_all(chunk)?; + let buf = stream.get_output_mut().expect("stream output missing before finish()"); + + out_file.write_all(buf.as_slice()).await?; + + self.raw_sha1_st.update(buf.as_slice()); + self.raw_tally += buf.len(); + + buf.truncate(0); + } else { + out_file.write_all(chunk).await?; + + self.raw_sha1_st.update(chunk); + self.raw_tally += chunk.len(); + } + + Ok(()) + } + + async fn finish(&mut self) -> Result<(), Box> { + let mut out_file = self.out_file.take().expect("output file gone"); + + if let Some(stream) = self.stream.take() { + let buf = stream.finish()?; + + out_file.write_all(buf.as_slice()).await?; + + self.raw_sha1_st.update(buf.as_slice()); + self.raw_tally += buf.len(); + } + + let inf_digest = self.raw_sha1_st.digest(); + if let Some(sha1) = self.raw_sha1 { + if inf_digest != sha1 { + debug!("Could not download {}: sha1 mismatch (exp {}, got {}).", self.path.display(), sha1, inf_digest); + return Err(IntegrityError::Sha1Mismatch { + expect: sha1, + actual: inf_digest + }.into()); + } + } + + if let Some(size) = self.raw_size { + if self.raw_tally != size { + debug!("Could not download {}: size mismatch (exp {}, got {}).", self.path.display(), size, self.raw_tally); + return Err(IntegrityError::SizeMismatch { + expect: size, + actual: self.raw_tally + }.into()); + } + } + + Ok(()) + } +} diff --git a/ozone/src/launcher/jre/manifest.rs b/ozone/src/launcher/jre/manifest.rs new file mode 100644 index 0000000..3fd6484 --- /dev/null +++ b/ozone/src/launcher/jre/manifest.rs @@ -0,0 +1,65 @@ +use std::collections::HashMap; +use indexmap::IndexMap; +use serde::Deserialize; +use crate::version::DownloadInfo; + +#[derive(Debug, Deserialize)] +pub struct Availability { + pub group: u32, // unknown meaning + pub progress: u32 // unknown meaning +} + +#[derive(Debug, Deserialize)] +pub struct Version { + pub name: String, + pub version: String +} + +#[derive(Debug, Deserialize)] +pub struct JavaRuntimeInfo { + // I don't see how half of this information is useful with how the JRE system currently functions -figboot + pub availability: Availability, + pub manifest: DownloadInfo, + //pub version: Version +} + +pub type JavaRuntimesManifest = HashMap>>; + +#[derive(Debug, Deserialize)] +pub struct FileDownloads { + pub lzma: Option, + pub raw: DownloadInfo +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "lowercase", tag = "type")] +pub enum JavaRuntimeFile { + File { + #[serde(default)] + executable: bool, + downloads: Box + }, + Directory, + Link { + target: String + } +} + +impl JavaRuntimeFile { + pub fn is_file(&self) -> bool { + matches!(*self, JavaRuntimeFile::File { .. }) + } + + pub fn is_directory(&self) -> bool { + matches!(*self, JavaRuntimeFile::Directory) + } + + pub fn is_link(&self) -> bool { + matches!(*self, JavaRuntimeFile::Link { .. }) + } +} + +#[derive(Debug, Deserialize)] +pub struct JavaRuntimeManifest { + pub files: IndexMap +} diff --git a/ozone/src/launcher/rules.rs b/ozone/src/launcher/rules.rs new file mode 100644 index 0000000..29a36d1 --- /dev/null +++ b/ozone/src/launcher/rules.rs @@ -0,0 +1,114 @@ +use std::error::Error; +use std::fmt::Display; +use crate::version::{Argument, CompatibilityRule, CompleteVersion, FeatureMatcher, Library, OSRestriction, RuleAction}; +use super::SystemInfo; + +#[derive(Debug)] +pub struct IncompatibleError { + what: &'static str, + reason: Option +} + +impl Display for IncompatibleError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(reason) = self.reason.as_ref() { + write!(f, "{} incompatible: {}", self.what, reason) + } else { + write!(f, "{} incompatible", self.what) + } + } +} + +impl Error for IncompatibleError {} + +mod seal { + pub trait CompatCheckInner { + const WHAT: &'static str; + + fn get_rules(&self) -> Option>; + fn get_incompatibility_reason(&self) -> Option<&str>; + } +} + +pub trait CompatCheck: seal::CompatCheckInner { + fn rules_apply(&self, system: &SystemInfo, feature_matcher: &impl FeatureMatcher) -> Result<(), IncompatibleError> { + let Some(rules) = self.get_rules() else { return Ok(()) }; + let mut action = RuleAction::Disallow; + + fn match_os(os: &OSRestriction, system: &SystemInfo) -> bool { + os.os.is_none_or(|o| system.is_our_os(o)) + && os.version.as_ref().is_none_or(|v| v.is_match(system.os_version.as_str())) + && os.arch.as_ref().is_none_or(|a| a.is_match(system.arch.as_str())) + } + + for rule in rules { + if rule.os.as_ref().is_none_or(|o| match_os(o, system)) + && rule.features_match(feature_matcher) { + action = rule.action; + } + } + + if action == RuleAction::Disallow { + Err(IncompatibleError { + what: Self::WHAT, + reason: self.get_incompatibility_reason().map(|s| s.to_owned()) + }) + } else { + Ok(()) + } + } +} + +// trivial +impl seal::CompatCheckInner for CompatibilityRule { + const WHAT: &'static str = "rule"; + + fn get_rules(&self) -> Option> { + Some(Some(self)) + } + + fn get_incompatibility_reason(&self) -> Option<&str> { + None + } +} + +impl seal::CompatCheckInner for CompleteVersion { + const WHAT: &'static str = "version"; + + fn get_rules(&self) -> Option> { + self.compatibility_rules.as_ref() + } + + fn get_incompatibility_reason(&self) -> Option<&str> { + self.incompatibility_reason.as_deref() + } +} + +impl seal::CompatCheckInner for Library { + const WHAT: &'static str = "library"; + + fn get_rules(&self) -> Option> { + self.rules.as_ref() + } + + fn get_incompatibility_reason(&self) -> Option<&str> { + None + } +} + +impl seal::CompatCheckInner for Argument { + const WHAT: &'static str = "argument"; + + fn get_rules(&self) -> Option> { + self.rules.as_ref() + } + + fn get_incompatibility_reason(&self) -> Option<&str> { + None + } +} + +impl CompatCheck for CompatibilityRule {} +impl CompatCheck for CompleteVersion {} +impl CompatCheck for Library {} +impl CompatCheck for Argument {} \ No newline at end of file diff --git a/ozone/src/launcher/runner.rs b/ozone/src/launcher/runner.rs new file mode 100644 index 0000000..afdfc7f --- /dev/null +++ b/ozone/src/launcher/runner.rs @@ -0,0 +1,222 @@ +use std::borrow::Cow; +use std::ffi::{OsStr, OsString}; +use std::iter; +use std::path::{Path, PathBuf}; +use std::process::Command; +use log::{debug, warn}; +use tokio::{fs, io}; +use crate::util::AsJavaPath; +use crate::version::{CompleteVersion, FeatureMatcher, OperatingSystem}; +use super::rules::CompatCheck; +use super::strsub::{self, SubFunc}; +use super::{Launch, LaunchInfo}; + +#[derive(Clone, Copy)] +struct LaunchArgSub<'a, 'l, F: FeatureMatcher>(&'a LaunchInfo<'l, F>); + +// FIXME: this is not correct +#[cfg(windows)] +const PATH_SEP: &str = ";"; + +#[cfg(not(windows))] +const PATH_SEP: &str = ":"; + +impl<'rep, F: FeatureMatcher> SubFunc<'rep> for LaunchArgSub<'rep, '_, F> { + fn substitute(&self, key: &str) -> Option> { + match key { + "assets_index_name" => self.0.asset_index_name.as_ref().map(|s| Cow::Borrowed(s.as_str())), + "assets_root" => Some(self.0.launcher.assets.get_home().as_java_path().to_string_lossy()), + "auth_access_token" => Some(Cow::Borrowed("-")), // TODO + "auth_player_name" => Some(Cow::Borrowed("Player")), // TODO + "auth_session" => Some(Cow::Borrowed("-")), // TODO + "auth_uuid" => Some(Cow::Borrowed("00000000-0000-0000-0000-000000000000")), // TODO + "auth_xuid" => Some(Cow::Borrowed("00000000-0000-0000-0000-000000000000")), // TODO + "classpath" => Some(Cow::Borrowed(self.0.classpath.as_str())), + "classpath_separator" => Some(Cow::Borrowed(PATH_SEP)), + "game_assets" => self.0.virtual_assets_path.as_ref() + .map(|s| s.as_path().as_java_path().to_string_lossy()), + "game_directory" => Some(self.0.instance_home.as_java_path().to_string_lossy()), + "language" => Some(Cow::Borrowed("en-us")), // ??? + "launcher_name" => Some(Cow::Borrowed("ozone (olauncher 3)")), // TODO + "launcher_version" => Some(Cow::Borrowed("yeah")), // TODO + "library_directory" => Some(self.0.launcher.libraries.home.as_java_path().to_string_lossy()), + "natives_directory" => Some(self.0.natives_path.as_java_path().to_string_lossy()), + "primary_jar" => self.0.client_jar.as_ref().map(|p| p.as_path().as_java_path().to_string_lossy()), + "quickPlayMultiplayer" => None, // TODO + "quickPlayPath" => None, // TODO + "quickPlayRealms" => None, // TODO + "quickPlaySingleplayer" => None, // TODO + "resolution_height" => None, // TODO + "resolution_width" => None, // TODO + "user_properties" => Some(Cow::Borrowed("{}")), // TODO + "user_property_map" => Some(Cow::Borrowed("[]")), // TODO + "user_type" => Some(Cow::Borrowed("legacy")), // TODO + "version_name" => Some(Cow::Borrowed(self.0.version_id.as_ref())), + "version_type" => self.0.version_type.as_ref().map(|s| Cow::Borrowed(s.to_str())), + _ => { + if let Some(asset_key) = key.strip_prefix("asset=") { + return self.0.asset_index.as_ref().and_then(|idx| idx.objects.get(asset_key)) + .map(|obj| Cow::Owned(self.0.launcher.assets.get_object_path(obj).as_java_path().to_string_lossy().into_owned())) + } + + None + } + } + } +} + +#[derive(Clone, Copy)] +pub enum ArgumentType { + Jvm, + Game +} + +pub fn build_arguments(launch: &LaunchInfo<'_, F>, version: &CompleteVersion, arg_type: ArgumentType) -> Vec { + let sub = LaunchArgSub(launch); + let system_info = &launch.launcher.system_info; + + if let Some(arguments) = version.arguments.as_ref().and_then(|args| match arg_type { + ArgumentType::Jvm => args.jvm.as_ref(), + ArgumentType::Game => args.game.as_ref() + }) { + arguments.iter() + .filter(|wa| wa.rules_apply(system_info, launch.feature_matcher).is_ok()) + .flat_map(|wa| &wa.value) + .map(|s| OsString::from(strsub::replace_string(s, &sub).into_owned())).collect() + } else if let Some(arguments) = version.minecraft_arguments.as_ref() { + match arg_type { + ArgumentType::Jvm => { + [ + "-Djava.library.path=${natives_directory}", + "-Dminecraft.launcher.brand=${launcher_name}", + "-Dminecraft.launcher.version=${launcher_version}", + "-Dminecraft.client.jar=${primary_jar}", + "-cp", + "${classpath}" + ].into_iter() + .chain(iter::once("-XX:HeapDumpPath=MojangTricksIntelDriversForPerformance_javaw.exe_minecraft.exe.heapdump") + .take_while(|_| system_info.os == OperatingSystem::Windows)) + .chain(iter::once(["-Dos.name=Windows 10", "-Dos.version=10.0"]) + .take_while(|_| launch.feature_matcher.matches("__ozone_win10_hack")) + .flatten()) + .chain(iter::once(["-Xdock:icon=${asset=icons/minecraft.icns}", "-Xdock:name=Minecraft"]) + .take_while(|_| system_info.os == OperatingSystem::MacOS) + .flatten()) + .map(|s| OsString::from(strsub::replace_string(s, &sub).into_owned())) + .collect() + }, + ArgumentType::Game => { + arguments.split(' ') + .chain(iter::once("--demo") + .take_while(|_| launch.feature_matcher.matches("is_demo_user"))) + .chain(iter::once(["--width", "${resolution_width}", "--height", "${resolution_height}"]) + .take_while(|_| launch.feature_matcher.matches("has_custom_resolution")) + .flatten()) + .map(|s| OsString::from(strsub::replace_string(s, &sub).into_owned())) + .collect() + } + } + } else { + Vec::default() + } +} + +pub fn run_the_game(launch: &Launch) -> Result<(), Box> { + if launch.runtime_legacy_launch { + Command::new(launch.runtime_path.as_path().as_java_path()) + .args(launch.jvm_args.iter() + .map(|o| o.as_os_str()) + .chain(iter::once(OsStr::new(launch.main_class.as_str()))) + .chain(launch.game_args.iter().map(|o| o.as_os_str()))) + .current_dir(launch.instance_path.as_path().as_java_path()).spawn()?.wait()?; + } else { + todo!("jni launch not supported :(") + } + + Ok(()) +} + +#[allow(dead_code)] +mod windows { + pub const JNI_SEARCH_PATH: Option<&str> = Some("server/jvm.dll"); + pub const JAVA_SEARCH_PATH: Option<&str> = Some("bin/java.exe"); + pub const JRE_PLATFORM_KNOWN: bool = true; +} + +#[allow(dead_code)] +mod linux { + pub const JNI_SEARCH_PATH: Option<&str> = Some("server/libjvm.so"); + pub const JAVA_SEARCH_PATH: Option<&str> = Some("bin/java"); + pub const JRE_PLATFORM_KNOWN: bool = true; +} + +#[allow(dead_code)] +mod macos { + pub const JNI_SEARCH_PATH: Option<&str> = Some("server/libjvm.dylib"); + pub const JAVA_SEARCH_PATH: Option<&str> = Some("bin/java"); + pub const JRE_PLATFORM_KNOWN: bool = true; +} + +#[allow(dead_code)] +mod unknown { + pub const JNI_SEARCH_PATH: Option<&str> = None; + pub const JAVA_SEARCH_PATH: Option<&str> = None; + pub const JRE_PLATFORM_KNOWN: bool = false; +} + +#[cfg(target_os = "windows")] +use self::windows::*; +#[cfg(target_os = "linux")] +use self::linux::*; +#[cfg(target_os = "macos")] +use self::macos::*; +#[cfg(not(any(target_os = "windows", target_os = "linux", target_os = "macos")))] +use self::unknown::*; + +fn search_java_sync(base: impl AsRef, legacy: bool) -> Result, io::Error> { + assert!(JRE_PLATFORM_KNOWN); + let search_path = Path::new(match legacy { + true => JAVA_SEARCH_PATH, + _ => JNI_SEARCH_PATH + }.unwrap()); + + let walker = walkdir::WalkDir::new(base.as_ref()).into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.file_type().is_dir()); + + for entry in walker { + let check_path = [base.as_ref(), entry.path(), Path::new(search_path)].into_iter().collect::(); + match std::fs::metadata(check_path.as_path()) { + Err(e) if e.kind() == io::ErrorKind::NotFound => (), + Err(e) => return Err(e), + Ok(meta) if meta.is_file() => return Ok(Some(check_path)), + _ => () + } + } + + Ok(None) // not found (sadface) +} + +//noinspection RsConstantConditionIf +pub async fn find_java(base: impl AsRef, legacy: bool) -> Result, io::Error> { + let meta = fs::metadata(&base).await?; + if meta.is_dir() { // do search + if !JRE_PLATFORM_KNOWN { + warn!("Unknown platform! Cannot search for java executable in {}. Please specify the executable file manually.", base.as_ref().display()); + return Ok(None); + } + + let (tx, rx) = tokio::sync::oneshot::channel(); + let base = base.as_ref().to_path_buf(); // idc + + tokio::task::spawn_blocking(move || { + let res = search_java_sync(base, legacy); + let _ = tx.send(res); // I really don't care if the reader hung up + }).await.expect("jre search panicked"); + + rx.await.expect("jre search didn't send us a result") + } else { // we are pointed directly at a file. assume it's what we want + debug!("JRE path {} is a file ({}). Assuming it's what we want.", base.as_ref().display(), legacy); + Ok(Some(base.as_ref().to_path_buf())) + } +} diff --git a/ozone/src/launcher/settings.rs b/ozone/src/launcher/settings.rs new file mode 100644 index 0000000..8453653 --- /dev/null +++ b/ozone/src/launcher/settings.rs @@ -0,0 +1,232 @@ +use std::collections::HashMap; +use std::error::Error; +use std::fmt::{Display, Formatter}; +use std::io::ErrorKind; +use std::path::{Path, PathBuf}; +use log::warn; +use serde::{Deserialize, Serialize}; +use tokio::{fs, io}; +use tokio::fs::File; +use tokio::io::AsyncWriteExt; +use super::constants; + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct SettingsInner { + profiles: HashMap, + instances: HashMap +} + +pub struct Settings { + path: Option, + inner: SettingsInner +} + +#[derive(Debug)] +pub enum SettingsError { + IO { what: &'static str, error: io::Error }, + Format(serde_json::Error), + Inconsistent(String) +} + +impl Display for SettingsError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + SettingsError::IO { what, error } => write!(f, "settings i/o error ({}): {}", what, error), + SettingsError::Format(err) => write!(f, "settings format error: {}", err), + SettingsError::Inconsistent(err) => write!(f, "inconsistent settings: {}", err), + } + } +} + +impl Error for SettingsError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + SettingsError::IO { error: err, .. } => Some(err), + SettingsError::Format(err) => Some(err), + _ => None + } + } +} + +impl Default for SettingsInner { + fn default() -> Self { + SettingsInner { + instances: [(String::from(constants::DEF_INSTANCE_NAME), PathBuf::from(constants::DEF_INSTANCE_NAME).into())].into_iter().collect(), + profiles: [(String::from(constants::DEF_PROFILE_NAME), Profile::new(constants::DEF_INSTANCE_NAME))].into_iter().collect() + } + } +} + +impl Settings { + async fn load_inner(path: impl AsRef) -> Result { + match fs::read_to_string(&path).await { + Ok(data) => serde_json::from_str(data.as_str()).map_err(SettingsError::Format), + Err(e) if e.kind() == ErrorKind::NotFound => Ok(SettingsInner::default()), + Err(e) => Err(SettingsError::IO { what: "loading settings", error: e }) + } + } + + fn check_consistent(mut inner: SettingsInner, path: Option>) -> Result { + inner.profiles.retain(|name, profile| { + if !inner.instances.contains_key(&profile.instance) { + warn!("Settings inconsistency: profile {} refers to instance {}, which does not exist. Ignoring this profile.", name, profile.instance); + false + } else { + true + } + }); + + // there will be more checks later maybe + + Ok(Settings { + path: path.map(|p| p.as_ref().to_owned()), + inner + }) + } + + pub async fn load(path: impl AsRef) -> Result { + Self::check_consistent(Self::load_inner(&path).await?, Some(path)) + } + + pub fn get_path(&self) -> Option<&Path> { + self.path.as_deref() + } + + pub async fn save_to(&self, path: impl AsRef) -> Result<(), SettingsError> { + let path = path.as_ref(); + + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).await + .map_err(|e| SettingsError::IO { what: "saving settings (creating directory)", error: e })?; + } + + let mut file = File::create(path).await + .map_err(|e| SettingsError::IO { what: "saving settings (open)", error: e })?; + + file.write_all(serde_json::to_string_pretty(&self.inner).map_err(SettingsError::Format)?.as_bytes()).await + .map_err(|e| SettingsError::IO { what: "saving settings (write)", error: e })?; + + Ok(()) + } + + pub async fn save(&self) -> Result<(), SettingsError> { + self.save_to(self.path.as_ref().expect("save() called on Settings instance not loaded from file")).await + } + + pub fn get_instance(&self, name: &str) -> Option<&Instance> { + self.inner.instances.get(name) + } + + pub fn get_profile(&self, name: &str) -> Option<&Profile> { + self.inner.profiles.get(name) + } + + pub fn get_instance_for(&self, profile: &Profile) -> &Instance { + self.inner.instances.get(&profile.instance).unwrap() + } +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct Instance { + path: PathBuf // relative to launcher home (or absolute) +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +pub enum ProfileVersion { + LatestSnapshot, + LatestRelease, + #[serde(untagged)] + Specific(String) +} + +#[derive(Deserialize, Serialize, Debug, Clone, Copy)] +pub struct Resolution { + width: u32, + height: u32 +} + +impl Default for Resolution { + fn default() -> Self { + Resolution { width: 864, height: 480 } + } +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct Profile { + game_version: ProfileVersion, + java_runtime: Option, + instance: String, + + #[serde(default)] + jvm_arguments: Vec, + #[serde(default)] + legacy_launch: bool, + + resolution: Option +} + +impl> From

for Instance { + fn from(path: P) -> Self { + Self { path: path.as_ref().into() } + } +} + +impl Instance { + pub async fn get_path(&self, home: impl AsRef) -> Result { + let path = self.path.as_path(); + + if path.is_relative() { + Ok([home.as_ref(), Path::new("instances"), path].iter().collect::()) + } else { + fs::canonicalize(path).await + } + } +} + +const DEF_JVM_ARGUMENTS: [&str; 7] = [ + "-Xmx2G", + "-XX:+UnlockExperimentalVMOptions", + "-XX:+UseG1GC", + "-XX:G1NewSizePercent=20", + "-XX:G1ReservePercent=20", + "-XX:MaxGCPauseMillis=50", + "-XX:G1HeapRegionSize=32M" +]; + +impl Profile { + fn new(instance_name: &str) -> Self { + Self { + game_version: ProfileVersion::LatestRelease, + java_runtime: None, + instance: instance_name.into(), + jvm_arguments: DEF_JVM_ARGUMENTS.iter().map(|s| String::from(*s)).collect(), + legacy_launch: false, + resolution: None + } + } + + pub fn get_version(&self) -> &ProfileVersion { + &self.game_version + } + + pub fn get_instance_name(&self) -> &str { + &self.instance + } + + pub fn iter_arguments(&self) -> impl Iterator { + self.jvm_arguments.iter() + } + + pub fn get_resolution(&self) -> Option { + self.resolution + } + + pub fn get_java_runtime(&self) -> Option<&String> { + self.java_runtime.as_ref() + } + + pub fn is_legacy_launch(&self) -> bool { + self.legacy_launch + } +} diff --git a/ozone/src/launcher/strsub.rs b/ozone/src/launcher/strsub.rs new file mode 100644 index 0000000..5764405 --- /dev/null +++ b/ozone/src/launcher/strsub.rs @@ -0,0 +1,192 @@ +// a cheap-o implementation of StrSubstitutor from apache commons +// (does not need to support recursive evaluation or preserving escapes, it was never enabled in + +use std::borrow::Cow; + +const ESCAPE: char = '$'; +const VAR_BEGIN: &str = "${"; +const VAR_END: &str = "}"; +const VAR_DEFAULT: &str = ":-"; + +pub trait SubFunc<'rep> { + fn substitute(&self, key: &str) -> Option>; +} + +/* NOTE: the in-place implementation has been replaced for the following reasons: + * - it was annoying to get lifetimes to work, so you could only either pass a trait implementation + * or a closure + * - it was probably slower than doing it out-of-place anyway, since you keep having to copy the + * tail of the string for each replacement + */ + +// handles ${replacements} on this string IN-PLACE. Calls the "sub" function for each key it receives. +// if "sub" returns None, it will use a default value or ignore the ${substitution}. +// There are no "invalid inputs" and this function should never panic unless "sub" panics. +/*pub fn replace_string(input: &mut String, sub: impl SubFunc) { + let mut cursor = input.len(); + while let Some(idx) = input[..cursor].rfind(VAR_BEGIN) { + // note: for some reason, apache processes escapes BEFORE checking if it's even a valid + // replacement expression. strange behavior IMO. + if let Some((pidx, ESCAPE)) = prev_char(input.as_ref(), idx) { + // this "replacement" is escaped. remove the escape marker and continue. + input.remove(pidx); + cursor = pidx; + continue; + } + + let Some(endidx) = input[idx..cursor].find(VAR_END).map(|v| v + idx) else { + // unclosed replacement expression. ignore. + cursor = idx; + continue; + }; + + let spec = &input[(idx + VAR_BEGIN.len())..endidx]; + let name; + let def_opt; + + if let Some(def) = spec.find(VAR_DEFAULT) { + name = &spec[..def]; + def_opt = Some(&spec[(def + VAR_DEFAULT.len())..]); + } else { + name = spec; + def_opt = None; + } + + if let Some(sub_val) = sub.substitute(name).map_or_else(|| def_opt.map(|d| Cow::Owned(d.to_owned())), |v| Some(v)) { + input.replace_range(idx..(endidx + VAR_END.len()), sub_val.as_ref()); + } + + cursor = idx; + } +}*/ + +pub fn replace_string<'inp, 'rep>(input: &'inp str, sub: &impl SubFunc<'rep>) -> Cow<'inp, str> { + let mut ret: Option = None; + let mut cursor = 0usize; + + while let Some(idx) = input[cursor..].find(VAR_BEGIN) { + let idx = idx + cursor; // make idx an absolute index into 'input' + let spec_start = idx + VAR_BEGIN.len(); // the start of the "spec" (area inside {}) + + // first, check if this is escaped + if let Some((prev_idx, ESCAPE)) = input[..idx].char_indices().next_back() { + let s = ret.get_or_insert_default(); + s.push_str(&input[cursor..prev_idx]); + + // advance past this so we don't match it again + s.push_str(&input[idx..spec_start]); + cursor = spec_start; + continue; + } + + // now, find the closing tag + let Some(spec_end) = input[spec_start..].find(VAR_END).map(|v| v + spec_start) else { + break; // reached the end of the string + }; + + let full_spec = &input[spec_start..spec_end]; + + // check for a default argument + let (name, def) = if let Some(defidx) = full_spec.find(VAR_DEFAULT) { + (&full_spec[..defidx], Some(&full_spec[(defidx + VAR_DEFAULT.len())..])) + } else { + (full_spec, None) + }; + + let after = spec_end + VAR_END.len(); + if let Some(subst) = sub.substitute(name).map_or_else(|| def.map(Cow::Borrowed), Some) { + let s = ret.get_or_insert_default(); + s.push_str(&input[cursor..idx]); + s.push_str(subst.as_ref()); + } else { + ret.get_or_insert_default().push_str(&input[cursor..after]); + } + + cursor = after; + } + + if let Some(ret) = ret.as_mut() { + ret.push_str(&input[cursor..]); + } + + ret.map_or(Cow::Borrowed(input), Cow::Owned) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[derive(Clone, Copy)] + struct TestSub; + impl SubFunc<'static> for TestSub { + fn substitute(&self, key: &str) -> Option> { + match key { + "exists" => Some(Cow::Borrowed("value123")), + "empty" => None, + "borger" => Some(Cow::Borrowed("\u{1f354}")), + _ => panic!("replace_fun called with unexpected key: {}", key) + } + } + } + + #[test] + fn test_standard_replace() { + assert_eq!(replace_string("this has ${exists} and more", &TestSub), "this has value123 and more"); + assert_eq!(replace_string("multiple ${exists} repl${exists}ace", &TestSub), "multiple value123 replvalue123ace"); + assert_eq!(replace_string("${exists}${exists}", &TestSub), "value123value123"); + } + + #[test] + fn test_empty_replace() { + assert_eq!(replace_string("this has ${empty} and more", &TestSub), "this has ${empty} and more"); + assert_eq!(replace_string("multiple ${empty} repl${empty}ace", &TestSub), "multiple ${empty} repl${empty}ace"); + assert_eq!(replace_string("${empty}${empty}", &TestSub), "${empty}${empty}"); + } + + #[test] + fn test_homogenous_replace() { + assert_eq!(replace_string("some ${exists} and ${empty} ...", &TestSub), "some value123 and ${empty} ..."); + assert_eq!(replace_string("some ${empty} and ${exists} ...", &TestSub), "some ${empty} and value123 ..."); + assert_eq!(replace_string("${exists}${empty}", &TestSub), "value123${empty}"); + assert_eq!(replace_string("${empty}${exists}", &TestSub), "${empty}value123"); + } + + #[test] + fn test_default_replace() { + assert_eq!(replace_string("some ${exists:-def1} and ${empty:-def2} ...", &TestSub), "some value123 and def2 ..."); + assert_eq!(replace_string("some ${empty:-def1} and ${exists:-def2} ...", &TestSub), "some def1 and value123 ..."); + assert_eq!(replace_string("abc${empty:-}def", &TestSub), "abcdef"); + assert_eq!(replace_string("${empty:-}${empty:-}", &TestSub), ""); + } + + #[test] + fn test_escape() { + assert_eq!(replace_string("an $${escaped} replacement (${exists})", &TestSub), "an ${escaped} replacement (value123)"); + assert_eq!(replace_string("${exists}$${escaped}${exists}", &TestSub), "value123${escaped}value123"); + + // make sure this weird behavior is preserved... (the original code seemed to show it) + assert_eq!(replace_string("some $${ else", &TestSub), "some ${ else"); + } + + #[test] + fn test_weird() { + assert_eq!(replace_string("${exists}", &TestSub), "value123"); + assert_eq!(replace_string("$${empty}", &TestSub), "${empty}"); + assert_eq!(replace_string("${empty:-a}", &TestSub), "a"); + assert_eq!(replace_string("${empty:-}", &TestSub), ""); + } + + // these make sure it doesn't chop up multibyte characters illegally + #[test] + fn test_multibyte_surround() { + assert_eq!(replace_string("\u{1f354}$${}\u{1f354}", &TestSub), "\u{1f354}${}\u{1f354}"); + assert_eq!(replace_string("\u{1f354}${exists}\u{1f354}${empty:-}\u{1f354}", &TestSub), "\u{1f354}value123\u{1f354}\u{1f354}"); + } + + #[test] + fn test_multibyte_replace() { + assert_eq!(replace_string("borger ${borger}", &TestSub), "borger \u{1f354}"); + assert_eq!(replace_string("${exists:-\u{1f354}}${empty:-\u{1f354}}", &TestSub), "value123\u{1f354}"); + assert_eq!(replace_string("${borger}$${}${borger}", &TestSub), "\u{1f354}${}\u{1f354}"); + } +} diff --git a/ozone/src/launcher/version.rs b/ozone/src/launcher/version.rs new file mode 100644 index 0000000..f6cdd58 --- /dev/null +++ b/ozone/src/launcher/version.rs @@ -0,0 +1,398 @@ +use std::{collections::{BTreeMap, HashMap}, error::Error, io::ErrorKind}; +use std::borrow::Cow; +use std::collections::HashSet; +use std::fmt::Display; +use std::path::{Path, PathBuf}; + +use log::{debug, info, warn}; +use sha1_smol::Digest; +use tokio::{fs, io}; +use super::settings::ProfileVersion; +use crate::util; +use crate::version::{*, manifest::*}; + +use super::constants::*; + +#[derive(Debug)] +pub enum VersionError { + IO { what: String, error: io::Error }, + Request { what: String, error: reqwest::Error }, + MalformedObject { what: String, error: serde_json::Error }, + VersionIntegrity { id: String, expect: Digest, got: Digest } +} + +impl Display for VersionError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + VersionError::IO { what, error } => write!(f, "i/o error ({what}): {error}"), + VersionError::Request { what, error } => write!(f, "request error ({what}): {error}"), + VersionError::MalformedObject { what, error } => write!(f, "malformed {what}: {error}"), + VersionError::VersionIntegrity { id, expect, got } => write!(f, "version {id} integrity mismatch (expect {expect}, got {got})") + } + } +} + +impl Error for VersionError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + VersionError::IO { error, .. } => Some(error), + VersionError::Request { error, .. } => Some(error), + VersionError::MalformedObject { error, .. } => Some(error), + _ => None + } + } +} + +struct RemoteVersionList { + versions: HashMap, + latest: LatestVersions +} + +impl RemoteVersionList { + async fn new() -> Result { + debug!("Looking up remote version manifest."); + let text = reqwest::get(URL_VERSION_MANIFEST).await + .and_then(|r| r.error_for_status()) + .map_err(|e| VersionError::Request { what: "download version manifest".into(), error: e })? + .text().await.map_err(|e| VersionError::Request { what: "download version manifest (decode)".into(), error: e })?; + + debug!("Parsing version manifest."); + let manifest: VersionManifest = serde_json::from_str(text.as_str()).map_err(|e| VersionError::MalformedObject { what: "version manifest".into(), error: e })?; + + let mut versions = HashMap::new(); + for v in manifest.versions { + versions.insert(v.id.clone(), v); + } + + debug!("Done loading remote versions!"); + Ok(RemoteVersionList { + versions, + latest: manifest.latest + }) + } + + async fn download_version(&self, ver: &VersionManifestVersion, path: &Path) -> Result { + // ensure parent directory exists + info!("Downloading version {}.", ver.id); + tokio::fs::create_dir_all(path.parent().expect("version .json has no parent (impossible)")).await + .inspect_err(|e| warn!("failed to create {} parent dirs: {e}", path.display())) + .map_err(|e| VersionError::IO { what: format!("creating version directory for {}", path.display()), error: e })?; + + // download it + let ver_text = reqwest::get(ver.url.as_str()).await + .and_then(|r| r.error_for_status()) + .map_err(|e| VersionError::Request { what: format!("download version {} from {}", ver.id, ver.url), error: e })? + .text().await.map_err(|e| VersionError::Request { what: format!("download version {} from {} (receive)", ver.id, ver.url), error: e })?; + + debug!("Validating downloaded {}...", ver.id); + // make sure it's valid + util::verify_sha1(ver.sha1, ver_text.as_str()) + .map_err(|e| VersionError::VersionIntegrity { + id: ver.id.clone(), + expect: ver.sha1, + got: e + })?; + + // make sure it's well-formed + let cver: CompleteVersion = serde_json::from_str(ver_text.as_str()).map_err(|e| VersionError::MalformedObject { what: format!("complete version {}", ver.id), error: e })?; + + debug!("Saving version {}...", ver.id); + + // write it out + tokio::fs::write(path, ver_text).await + .inspect_err(|e| warn!("Failed to save version {}: {}", ver.id, e)) + .map_err(|e| VersionError::IO { what: format!("writing version file at {}", path.display()), error: e })?; + + info!("Done downloading and verifying {}!", ver.id); + + Ok(cver) + } +} + +struct LocalVersionList { + versions: BTreeMap +} + +#[derive(Debug)] +enum LocalVersionError { + Sha1Mismatch { exp: Digest, got: Digest }, + VersionMismatch { fname: String, json: String }, + Unknown(Box) +} + +impl Display for LocalVersionError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LocalVersionError::Sha1Mismatch { exp, got } => { + write!(f, "sha1 mismatch (exp {exp}, got {got})") + }, + LocalVersionError::VersionMismatch { fname, json } => { + write!(f, "version ID mismatch (filename {fname}, json {json})") + }, + LocalVersionError::Unknown(err) => { + write!(f, "unknown version error: {err}") + } + } + } +} + +impl Error for LocalVersionError {} + +impl LocalVersionList { + async fn load_version(path: &Path, sha1: Option) -> Result { + // grumble grumble I don't like reading in the whole file at once + info!("Loading local version at {}.", path.display()); + let ver = tokio::fs::read_to_string(path).await.map_err(|e| LocalVersionError::Unknown(Box::new(e)))?; + if let Some(digest_exp) = sha1 { + debug!("Verifying local version {}.", path.display()); + util::verify_sha1(digest_exp, ver.as_str()) + .map_err(|got| { + warn!("Local version sha1 mismatch: {} (exp: {}, got: {})", path.display(), digest_exp, got); + LocalVersionError::Sha1Mismatch { exp: digest_exp.to_owned(), got } + })?; + } + + let ver: CompleteVersion = serde_json::from_str(ver.as_str()).map_err(|e| { + warn!("Invalid version JSON {}: {}", path.display(), e); + LocalVersionError::Unknown(Box::new(e)) + })?; + + let fname_id = path.file_stem() + .expect("tried to load a local version with no path") // should be impossible + .to_str() + .expect("tried to load a local version with invalid UTF-8 filename"); // we already checked if the filename is valid UTF-8 at this point + + if fname_id == ver.id.as_str() { + info!("Loaded local version {}.", ver.id); + Ok(ver) + } else { + warn!("Local version {} has a version ID conflict (filename: {}, json: {})!", path.display(), fname_id, ver.id); + Err(LocalVersionError::VersionMismatch { fname: fname_id.to_owned(), json: ver.id }) + } + } + + async fn load_versions(home: &Path, skip: impl Fn(&str) -> bool) -> Result { + info!("Loading local versions."); + let mut rd = tokio::fs::read_dir(home).await.map_err(|e| VersionError::IO { what: format!("open local versions directory {}", home.display()), error: e })?; + let mut versions = BTreeMap::new(); + + while let Some(ent) = rd.next_entry().await.map_err(|e| VersionError::IO { what: format!("read local versions directory {}", home.display()), error: e })? { + if !ent.file_type().await.map_err(|e| VersionError::IO { what: format!("version entry metadata {}", ent.path().display()), error: e} )?.is_dir() { continue; } + + // when the code is fugly + let path = match ent.file_name().to_str() { + Some(s) => { + if skip(s) { + debug!("Skipping local version {s} because (I assume) it is remotely tracked."); + continue + } + + /* FIXME: once https://github.com/rust-lang/rust/issues/127292 is closed, + * use add_extension to avoid extra heap allocations (they hurt my feelings) */ + let mut path = ent.path(); + + // can't use set_extension since s might contain a . (like 1.8.9) + path.push(format!("{s}.json")); + path + }, + + /* We just ignore directories with names that contain invalid unicode. Unfortunately, the laucher + * will not be supporting such custom versions. Name your version something sensible please. */ + None => { + warn!("Ignoring a local version {} because its id contains invalid unicode.", ent.file_name().to_string_lossy()); + continue + } + }; + + match Self::load_version(&path, None).await { + Ok(v) => { + versions.insert(v.id.clone(), v); + }, + Err(e) => { + // FIXME: just display the filename without to_string_lossy when https://github.com/rust-lang/rust/issues/120048 is closed + warn!("Ignoring local version {}: {e}", ent.file_name().to_string_lossy()); + } + } + } + + info!("Loaded {} local version(s).", versions.len()); + Ok(LocalVersionList { versions }) + } +} + +pub struct VersionList { + remote: Option, + local: LocalVersionList, + home: PathBuf +} + +pub enum VersionResult<'a> { + Complete(&'a CompleteVersion), + Remote(&'a VersionManifestVersion), + None +} + +impl<'a> From<&'a CompleteVersion> for VersionResult<'a> { + fn from(value: &'a CompleteVersion) -> Self { + Self::Complete(value) + } +} + +impl<'a> From<&'a VersionManifestVersion> for VersionResult<'a> { + fn from(value: &'a VersionManifestVersion) -> Self { + Self::Remote(value) + } +} + +impl<'a, T: Into>> From> for VersionResult<'a> { + fn from(value: Option) -> Self { + value.map_or(VersionResult::None, |v| v.into()) + } +} + +#[derive(Debug)] +pub enum VersionResolveError { + InheritanceLoop(String), + MissingVersion(String), + VersionLoad(VersionError) +} + +impl Display for VersionResolveError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + VersionResolveError::InheritanceLoop(s) => write!(f, "inheritance loop (saw {s} twice)"), + VersionResolveError::MissingVersion(s) => write!(f, "unknown version {s}"), + VersionResolveError::VersionLoad(err) => write!(f, "version load error: {err}") + } + } +} + +impl Error for VersionResolveError {} + +impl VersionList { + async fn create_dir_for(home: &Path) -> Result<(), io::Error> { + debug!("Creating versions directory."); + match fs::create_dir(home).await { + Ok(_) => Ok(()), + Err(e) if e.kind() == ErrorKind::AlreadyExists => Ok(()), + Err(e) => { + debug!("failed to create version home: {}", e); + Err(e) + } + } + } + + pub async fn online(home: &Path) -> Result { + Self::create_dir_for(home).await.map_err(|e| VersionError::IO { what: format!("create version directory {}", home.display()), error: e })?; + + let remote = RemoteVersionList::new().await?; + let local = LocalVersionList::load_versions(home, |s| remote.versions.contains_key(s)).await?; + + Ok(VersionList { + remote: Some(remote), + local, + home: home.to_path_buf() + }) + } + + pub async fn offline(home: &Path) -> Result { + Self::create_dir_for(home).await.map_err(|e| VersionError::IO { what: format!("create version directory {}", home.display()), error: e })?; + + let local = LocalVersionList::load_versions(home, |_| false).await?; + + Ok(VersionList { + remote: None, + local, + home: home.to_path_buf() + }) + } + + pub fn is_online(&self) -> bool { + self.remote.is_some() + } + + pub fn get_version_lazy(&self, id: &str) -> VersionResult { + self.remote.as_ref().and_then(|r| r.versions.get(id).map(VersionResult::from)) + .or_else(|| self.local.versions.get(id).map(VersionResult::from)) + .unwrap_or(VersionResult::None) + } + + pub fn get_profile_version_id<'v>(&self, ver: &'v ProfileVersion) -> Option> { + match ver { + ProfileVersion::LatestRelease => self.remote.as_ref().map(|r| Cow::Owned(r.latest.release.clone())), + ProfileVersion::LatestSnapshot => self.remote.as_ref().map(|r| Cow::Owned(r.latest.snapshot.clone())), + ProfileVersion::Specific(ver) => Some(Cow::Borrowed(ver)) + } + } + + pub fn get_remote_version(&self, id: &str) -> Option<&VersionManifestVersion> { + let remote = self.remote.as_ref().expect("get_remote_version called in offline mode!"); + + remote.versions.get(id) + } + + pub async fn load_remote_version(&self, ver: &VersionManifestVersion) -> Result { + let remote = self.remote.as_ref().expect("load_remote_version called in offline mode!"); + + let id = ver.id.as_str(); + let mut ver_path = self.home.join(id); + ver_path.push(format!("{id}.json")); + + debug!("Loading local copy of remote version {}", ver.id); + + match LocalVersionList::load_version(ver_path.as_path(), Some(ver.sha1)).await { + Ok(v) => return Ok(v), + Err(e) => { + info!("Redownloading {id}, since the local copy could not be loaded: {e}"); + } + } + + remote.download_version(ver, ver_path.as_path()).await + } + + pub async fn resolve_version<'v>(&self, ver: &'v CompleteVersion) -> Result, VersionResolveError> { + let mut seen: HashSet = HashSet::new(); + seen.insert(ver.id.clone()); + + let Some(inherit) = ver.inherits_from.as_ref() else { + return Ok(Cow::Borrowed(ver)); + }; + + if *inherit == ver.id { + warn!("Version {} directly inherits from itself!", ver.id); + return Err(VersionResolveError::InheritanceLoop(ver.id.clone())); + } + + debug!("Resolving version inheritance: {} (inherits from {})", ver.id, inherit); + + let mut ver = ver.clone(); + let mut inherit = inherit.clone(); + + loop { + if !seen.insert(inherit.clone()) { + warn!("Version inheritance loop detected in {}: {} transitively inherits from itself.", ver.id, inherit); + return Err(VersionResolveError::InheritanceLoop(inherit)); + } + + let inherited_ver = match self.get_version_lazy(inherit.as_str()) { + VersionResult::Complete(v) => Cow::Borrowed(v), + VersionResult::Remote(v) => + Cow::Owned(self.load_remote_version(v).await.map_err(VersionResolveError::VersionLoad)?), + VersionResult::None => { + warn!("Cannot resolve version {}, it inherits an unknown version {inherit}", ver.id); + return Err(VersionResolveError::MissingVersion(inherit)); + } + }; + + ver.apply_child(inherited_ver.as_ref()); + + let Some(new_inherit) = inherited_ver.inherits_from.as_ref() else { + break + }; + + inherit.replace_range(.., new_inherit.as_str()); + } + + Ok(Cow::Owned(ver)) + } +} diff --git a/ozone/src/lib.rs b/ozone/src/lib.rs new file mode 100644 index 0000000..a523091 --- /dev/null +++ b/ozone/src/lib.rs @@ -0,0 +1,6 @@ +mod util; +pub mod version; +pub mod assets; +pub mod launcher; +pub mod auth; +// temporarily public diff --git a/ozone/src/util.rs b/ozone/src/util.rs new file mode 100644 index 0000000..7510a33 --- /dev/null +++ b/ozone/src/util.rs @@ -0,0 +1,334 @@ +mod progress; + +use std::error::Error; +use std::fmt::{Display, Formatter}; +use std::io::ErrorKind; +use std::path::{Component, Path, PathBuf}; +use const_format::formatcp; +use log::{debug, info, warn}; +use sha1_smol::{Digest, Sha1}; +use tokio::fs::File; +use tokio::{fs, io}; +use tokio::io::{AsyncReadExt, AsyncWriteExt}; + +const PKG_NAME: &str = env!("CARGO_PKG_NAME"); +const PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); +const CRATE_NAME: &str = env!("CARGO_CRATE_NAME"); + +pub const USER_AGENT: &str = formatcp!("{PKG_NAME}/{PKG_VERSION} (in {CRATE_NAME})"); + +#[derive(Debug)] +pub enum IntegrityError { + SizeMismatch{ expect: usize, actual: usize }, + Sha1Mismatch{ expect: Digest, actual: Digest } +} + +impl Display for IntegrityError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + IntegrityError::SizeMismatch{ expect, actual } => + write!(f, "size mismatch (expect {expect} bytes, got {actual} bytes)"), + IntegrityError::Sha1Mismatch {expect, actual} => + write!(f, "sha1 mismatch (expect {expect}, got {actual})") + } + } +} + +impl Error for IntegrityError {} + +pub fn verify_sha1(expect: Digest, s: &str) -> Result<(), Digest> { + let dig = Sha1::from(s).digest(); + + if dig == expect { + return Ok(()); + } + + Err(dig) +} + +#[derive(Debug)] +pub enum FileVerifyError { + Integrity(PathBuf, IntegrityError), + Open(PathBuf, tokio::io::Error), + Read(PathBuf, tokio::io::Error), +} + +impl Display for FileVerifyError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + FileVerifyError::Integrity(path, e) => write!(f, "file integrity error {}: {}", path.display(), e), + FileVerifyError::Open(path, e) => write!(f, "error opening file {}: {}", path.display(), e), + FileVerifyError::Read(path, e) => write!(f, "error reading file {}: {}", path.display(), e) + } + } +} + +impl Error for FileVerifyError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + FileVerifyError::Integrity(_, e) => Some(e), + FileVerifyError::Open(_, e) => Some(e), + FileVerifyError::Read(_, e) => Some(e) + } + } +} + +pub async fn verify_file(path: impl AsRef, expect_size: Option, expect_sha1: Option) -> Result<(), FileVerifyError> { + let path = path.as_ref(); + + if expect_size.is_none() && expect_sha1.is_none() { + return match fs::metadata(path).await { + Ok(_) => { + debug!("No size or sha1 for {}, have to assume it's good.", path.display()); + Ok(()) + }, + Err(e) => { + Err(FileVerifyError::Open(path.to_path_buf(), e)) + } + } + } + + let mut file = File::open(path).await.map_err(|e| FileVerifyError::Open(path.to_owned(), e))?; + + let mut tally = 0usize; + let mut st = Sha1::new(); + let mut buf = [0u8; 4096]; + + loop { + let n = match file.read(&mut buf).await { + Ok(n) => n, + Err(e) => match e.kind() { + ErrorKind::Interrupted => continue, + _ => return Err(FileVerifyError::Read(path.to_owned(), e)) + } + }; + + if n == 0 { + break; + } + + st.update(&buf[..n]); + tally += n; + } + + let dig = st.digest(); + + if expect_size.is_some_and(|sz| sz != tally) { + return Err(FileVerifyError::Integrity(path.to_owned(), IntegrityError::SizeMismatch { + expect: expect_size.unwrap(), + actual: tally + })); + } else if expect_sha1.is_some_and(|exp_dig| exp_dig != dig) { + return Err(FileVerifyError::Integrity(path.to_owned(), IntegrityError::Sha1Mismatch { + expect: expect_sha1.unwrap(), + actual: dig + })); + } + + Ok(()) +} + +#[derive(Debug)] +pub enum EnsureFileError { + IO { what: &'static str, error: io::Error }, + Download { url: String, error: reqwest::Error }, + Integrity(IntegrityError), + Offline, + MissingURL +} + +impl Display for EnsureFileError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + EnsureFileError::IO { what, error } => write!(f, "i/o error ensuring file ({what}): {error}"), + EnsureFileError::Download { url, error } => write!(f, "error downloading file ({url}): {error}"), + EnsureFileError::Integrity(e) => write!(f, "integrity error for downloaded file: {e}"), + EnsureFileError::Offline => f.write_str("unable to download file while offline"), + EnsureFileError::MissingURL => f.write_str("missing url"), + } + } +} + +impl Error for EnsureFileError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + EnsureFileError::IO { error, .. } => Some(error), + EnsureFileError::Download { error, .. } => Some(error), + EnsureFileError::Integrity(error) => Some(error), + _ => None + } + } +} + +pub async fn should_download(path: impl AsRef, expect_size: Option, expect_sha1: Option) -> Result { + let path = path.as_ref(); + + match verify_file(path, expect_size, expect_sha1).await { + Ok(()) => { + debug!("Skipping download for file {}, integrity matches.", path.display()); + Ok(false) + }, + Err(FileVerifyError::Open(_, e)) if e.kind() == ErrorKind::NotFound => { + debug!("File {} is missing, downloading it.", path.display()); + Ok(true) + }, + Err(FileVerifyError::Integrity(p, e)) => { + warn!("Integrity error on file {}: {}", p.display(), e); + + // try to delete the file since it's bad + let _ = fs::remove_file(path).await + .map_err(|e| warn!("Error deleting corrupted/modified file {} (ignoring): {}", path.display(), e)); + Ok(true) + } + Err(FileVerifyError::Open(_, e) | FileVerifyError::Read(_, e)) => { + warn!("Error verifying file {} on disk: {}", path.display(), e); + Err(e) + } + } +} + +pub async fn ensure_file(path: impl AsRef, url: Option<&str>, expect_size: Option, expect_sha1: Option, online: bool, force_download: bool) -> Result { + let path = path.as_ref(); + + if !force_download && !should_download(path, expect_size, expect_sha1).await + .map_err(|e| EnsureFileError::IO { what: "verifying file on disk", error: e })? { + + return Ok(false); + } + + if !online { + warn!("Cannot download {} to {} while offline!", url.unwrap_or("(no url)"), path.display()); + return Err(EnsureFileError::Offline); + } + + // download the file + let Some(url) = url else { + return Err(EnsureFileError::MissingURL); + }; + + let mut file = File::create(path).await.map_err(|e| EnsureFileError::IO { + what: "save downloaded file (open)", + error: e + })?; + + debug!("File {} must be downloaded ({}).", path.display(), url); + + let mut response = reqwest::get(url).await.map_err(|e| EnsureFileError::Download { url: url.to_owned(), error: e })?; + let mut tally = 0usize; + let mut sha1 = Sha1::new(); + + while let Some(chunk) = response.chunk().await.map_err(|e| EnsureFileError::Download { url: url.to_owned(), error: e })? { + let slice = chunk.as_ref(); + + file.write_all(slice).await.map_err(|e| EnsureFileError::IO { + what: "save downloaded file (write)", + error: e + })?; + + tally += slice.len(); + sha1.update(slice); + } + + drop(file); // manually close file + + let del_file_silent = || async { + debug!("Deleting downloaded file {} since its integrity doesn't match :(", path.display()); + let _ = fs::remove_file(path).await.map_err(|e| warn!("failed to delete invalid downloaded file: {}", e)); + () + }; + + if expect_size.is_some_and(|s| s != tally) { + del_file_silent().await; + + return Err(EnsureFileError::Integrity(IntegrityError::SizeMismatch { + expect: expect_size.unwrap(), + actual: tally + })); + } + + let digest = sha1.digest(); + + if expect_sha1.is_some_and(|exp_dig| exp_dig != digest) { + del_file_silent().await; + + return Err(EnsureFileError::Integrity(IntegrityError::Sha1Mismatch { + expect: expect_sha1.unwrap(), + actual: digest + })); + } + + info!("File {} downloaded successfully.", path.display()); + Ok(true) +} + +pub fn check_path(name: &str) -> Result<&Path, &'static str> { + let entry_path: &Path = Path::new(name); + + let mut depth = 0usize; + for component in entry_path.components() { + depth = match component { + Component::Prefix(_) | Component::RootDir => + return Err("root path component in entry"), + Component::ParentDir => depth.checked_sub(1) + .map_or_else(|| Err("entry path escapes"), |s| Ok(s))?, + Component::Normal(_) => depth + 1, + _ => depth + } + } + + Ok(entry_path) +} + +#[cfg(windows)] +pub fn strip_verbatim(path: &Path) -> &Path { + let Some(Component::Prefix(p)) = path.components().next() else { + return path; + }; + + use std::path::Prefix; + use std::ffi::OsStr; + + match p.kind() { + Prefix::VerbatimDisk(_) => + Path::new(unsafe { OsStr::from_encoded_bytes_unchecked(&path.as_os_str().as_encoded_bytes()[4..]) }), + _ => path + } +} + +#[cfg(not(windows))] +pub fn strip_verbatim(path: &Path) -> &Path { + path +} + +pub trait AsJavaPath { + fn as_java_path(&self) -> &Path; +} + +impl AsJavaPath for Path { + fn as_java_path(&self) -> &Path { + strip_verbatim(self) + } +} + +#[cfg(test)] +mod tests { + #[allow(unused_imports)] + use super::*; + use std::path::Prefix; + + #[test] + #[cfg(windows)] + fn test_strip_verbatim() { + let path = Path::new(r"\\?\C:\Some\Verbatim\Path"); + match path.components().next().unwrap() { + Component::Prefix(p) => assert!(matches!(p.kind(), Prefix::VerbatimDisk(_)), "(TEST BUG) path does not start with verbatim disk"), + _ => panic!("(TEST BUG) path does not start with prefix") + } + + let path2 = path.as_java_path(); + match path2.components().next().unwrap() { + Component::Prefix(p) => assert!(matches!(p.kind(), Prefix::Disk(_))), + _ => panic!("path does not begin with prefix") + } + } +} diff --git a/ozone/src/util/progress.rs b/ozone/src/util/progress.rs new file mode 100644 index 0000000..46983c9 --- /dev/null +++ b/ozone/src/util/progress.rs @@ -0,0 +1,3 @@ +struct Progress { + +} diff --git a/ozone/src/version.rs b/ozone/src/version.rs new file mode 100644 index 0000000..ed5f95f --- /dev/null +++ b/ozone/src/version.rs @@ -0,0 +1,489 @@ +use core::fmt; +use std::{collections::BTreeMap, convert::Infallible, marker::PhantomData, ops::Deref, str::FromStr}; +use chrono::{DateTime, NaiveDateTime, Utc}; +use chrono::format::ParseErrorKind; +use regex::Regex; +use serde::{de::{self, Visitor}, Deserialize, Deserializer}; +use serde::de::{Error, SeqAccess}; +use sha1_smol::Digest; + +pub mod manifest; +use manifest::*; + +#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum RuleAction { + Allow, + Disallow +} + +// must derive an order on this because it's used as a key for a btreemap +#[derive(Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] +#[serde(rename_all = "lowercase")] +pub enum OperatingSystem { + Linux, // "linux" + Windows, // "windows" + + #[serde(alias = "osx")] // not technically correct but it works + MacOS, // "osx" + + #[serde(other)] + Unknown // (not used in official jsons) +} + +#[derive(Debug, Clone)] +pub struct WrappedRegex(Regex); + +impl Deref for WrappedRegex { + type Target = Regex; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +struct RegexVisitor; +impl Visitor<'_> for RegexVisitor { + type Value = WrappedRegex; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a valid regular expression") + } + + fn visit_str(self, v: &str) -> Result + where + E: Error, { + Regex::new(v).map_err(Error::custom).map(WrappedRegex) + } +} + +impl<'de> Deserialize<'de> for WrappedRegex { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de> { + deserializer.deserialize_any(RegexVisitor) + } +} + +#[derive(Deserialize, Debug, Clone)] +pub struct OSRestriction { + #[serde(rename = "name")] + pub os: Option, + + pub version: Option, + pub arch: Option +} + +#[derive(Deserialize, Debug, Clone)] +pub struct CompatibilityRule { + pub action: RuleAction, + pub features: Option>, + pub os: Option +} + +pub trait FeatureMatcher { + fn matches(&self, feature: &str) -> bool; +} + +impl CompatibilityRule { + pub fn features_match(&self, checker: &impl FeatureMatcher) -> bool { + if let Some(m) = self.features.as_ref() { + for (feat, expect) in m { + if checker.matches(feat) != *expect { + return false; + } + } + } + + true + } +} + +#[derive(Deserialize, Debug, Clone)] +pub struct Argument { + #[serde(default)] + pub rules: Option>, + + #[serde(default)] + #[serde(deserialize_with = "string_or_array")] + pub value: Vec +} + +#[derive(Debug, Clone)] +pub struct WrappedArgument(Argument); + +impl FromStr for Argument { + type Err = Infallible; + + fn from_str(s: &str) -> Result { + Ok(Argument { + value: vec![s.to_owned()], + rules: None + }) + } +} + +impl Deref for WrappedArgument { + type Target = Argument; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'de> Deserialize<'de> for WrappedArgument { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de> { + Ok(WrappedArgument(string_or_struct(deserializer)?)) + } +} + +#[derive(Deserialize, Debug, Clone)] +pub struct Arguments { + pub game: Option>, + pub jvm: Option> +} + +impl Arguments { + fn apply_child(&mut self, other: &Arguments) { + if let Some(game) = other.game.as_ref() { + self.game.get_or_insert_default().splice(0..0, game.iter().cloned()); + } + + if let Some(jvm) = other.jvm.as_ref() { + self.jvm.get_or_insert_default().splice(0..0, jvm.iter().cloned()); + } + } +} + +#[derive(Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] +#[serde(rename_all = "snake_case")] +pub enum DownloadType { + Client, + ClientMappings, + Server, + ServerMappings, + WindowsServer +} + +#[derive(Deserialize, Debug, Clone)] +pub struct DownloadInfo { + pub sha1: Option, + pub size: Option, + pub total_size: Option, // available for asset index + pub url: Option, // may not be present for libraries + pub id: Option, + pub path: Option +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct JavaVersionInfo { + pub component: String, + pub major_version: u32 +} + +#[derive(Deserialize, Debug, Clone)] +pub struct LibraryDownloads { + pub artifact: Option, + pub classifiers: Option> +} + +#[derive(Deserialize, Debug, Clone)] +pub struct LibraryExtractRule { + #[serde(default)] + pub exclude: Vec +} + +#[derive(Deserialize, Debug, Clone)] +pub struct Library { + pub downloads: Option, + pub name: String, + pub extract: Option, + pub natives: Option>, + pub rules: Option>, + + // old format + pub url: Option, + pub size: Option, + pub sha1: Option +} + +impl Library { + pub fn get_canonical_name(&self) -> String { + canonicalize_library_name(self.name.as_str(), self.natives.as_ref().map(|_| "__ozone_natives")) + } +} + +impl LibraryDownloads { + pub fn get_download_info(&self, classifier: Option<&str>) -> Option<&DownloadInfo> { + if let Some(classifier) = classifier { + self.classifiers.as_ref()?.get(classifier) + } else { + self.artifact.as_ref() + } + } +} + +#[derive(Deserialize, Debug, Clone)] +pub struct ClientLogging { + pub argument: String, + + #[serde(rename = "type")] + pub log_type: String, + pub file: DownloadInfo +} + +#[derive(Deserialize, Debug, Clone)] +pub struct Logging { + pub client: Option // other fields unknown +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct CompleteVersion { + pub arguments: Option, + pub minecraft_arguments: Option, + + pub asset_index: Option, + pub assets: Option, + + pub compliance_level: Option, + + pub java_version: Option, + + #[serde(default)] + pub downloads: BTreeMap, + + #[serde(default)] + pub libraries: Vec, + + pub id: String, + pub jar: Option, // used as the jar filename if specified? (no longer used officially) + + pub logging: Option, + + pub main_class: Option, + pub minimum_launcher_version: Option, + + #[serde(deserialize_with = "deserialize_datetime_lenient")] + pub release_time: Option>, + #[serde(deserialize_with = "deserialize_datetime_lenient")] + pub time: Option>, + + #[serde(rename = "type")] + pub version_type: Option, + + pub compatibility_rules: Option>, // + pub incompatibility_reason: Option, // message shown when compatibility rules fail for this version + + pub inherits_from: Option + + /* omitting field `savableVersion' because it seems like a vestigial part from old launcher versions + * (also it isn't even a string that is present in modern liblauncher.so, so I assume it will never be used.) + */ +} + +impl CompleteVersion { + pub fn get_jar(&self) -> &String { + self.jar.as_ref().unwrap_or(&self.id) + } + + pub fn apply_child(&mut self, other: &CompleteVersion) { + macro_rules! replace_missing { + ($name:ident) => { + if self.$name.is_none() { + if let Some($name) = other.$name.as_ref() { + self.$name.replace($name.to_owned()); + } + } + }; + } + + if let Some(arguments) = other.arguments.as_ref() { + if let Some(my_args) = self.arguments.as_mut() { + my_args.apply_child(arguments); + } else { + self.arguments.replace(arguments.to_owned()); + } + } + + replace_missing!(minecraft_arguments); + replace_missing!(asset_index); + replace_missing!(assets); + replace_missing!(compliance_level); + replace_missing!(java_version); + + for (dltype, dl) in other.downloads.iter().by_ref() { + self.downloads.entry(*dltype).or_insert_with(|| dl.clone()); + } + + // we use extend here instead of splice for library resolution priority reasons + // (libraries earlier in the list will override libraries later in the list) + self.libraries.extend(other.libraries.iter().cloned()); + + replace_missing!(logging); + replace_missing!(main_class); + replace_missing!(minimum_launcher_version); + replace_missing!(release_time); + replace_missing!(time); + replace_missing!(version_type); + + if let Some(rules) = other.compatibility_rules.as_ref() { + if let Some(my_rules) = self.compatibility_rules.as_mut() { + my_rules.splice(0..0, rules.iter().cloned()); + } else { + self.compatibility_rules.replace(rules.to_owned()); + } + } + + replace_missing!(incompatibility_reason); + } +} + +fn canonicalize_library_name(name: &str, suffix: Option<&str>) -> String { + name.split(':') + .enumerate() + .filter(|(i, _)| *i != 2) + .map(|(_, s)| s.to_ascii_lowercase()) + .chain(suffix.into_iter().map(|s| s.to_owned())) + .collect::>() + .join(":") +} + +fn deserialize_datetime_lenient<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de> +{ + struct DateTimeVisitor; + + impl Visitor<'_> for DateTimeVisitor { + type Value = Option>; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a valid datetime") + } + + fn visit_str(self, value: &str) -> Result + where + E: Error + { + match value.parse::>() { + Ok(dt) => Ok(Some(dt)), + Err(e) if e.kind() == ParseErrorKind::TooShort => { + // this probably just doesn't have an offset for some reason + match value.parse::() { + Ok(ndt) => Ok(Some(ndt.and_utc())), + Err(e) => Err(Error::custom(e)) + } + }, + Err(e) => Err(Error::custom(e)) + } + } + } + + deserializer.deserialize_str(DateTimeVisitor) +} + +// https://serde.rs/string-or-struct.html +fn string_or_struct<'de, T, D>(deserializer: D) -> Result +where + T: Deserialize<'de> + FromStr, + D: Deserializer<'de>, +{ + struct StringOrStruct(PhantomData T>); + + impl<'de, T> Visitor<'de> for StringOrStruct + where + T: Deserialize<'de> + FromStr, + { + type Value = T; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("string or map") + } + + fn visit_str(self, v: &str) -> Result + where + E: Error, { + Ok(FromStr::from_str(v).unwrap()) + } + + fn visit_map(self, map: A) -> Result + where + A: de::MapAccess<'de>, { + // wizardry (check comment in link) + Deserialize::deserialize(de::value::MapAccessDeserializer::new(map)) + } + } + + deserializer.deserialize_any(StringOrStruct(PhantomData)) +} + +// adapted from above +fn string_or_array<'de, T, D>(deserializer: D) -> Result, D::Error> +where + T: Deserialize<'de> + FromStr, + D: Deserializer<'de>, +{ + struct StringOrVec(PhantomData T>); + + impl<'de, T> Visitor<'de> for StringOrVec + where + T: Deserialize<'de> + FromStr, + { + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("string or array") + } + + fn visit_str(self, v: &str) -> Result + where + E: Error, { + Ok(vec![FromStr::from_str(v).unwrap()]) + } + + fn visit_seq(self, seq: A) -> Result + where + A: SeqAccess<'de>, { + Deserialize::deserialize(de::value::SeqAccessDeserializer::new(seq)) + } + } + + deserializer.deserialize_any(StringOrVec(PhantomData)) +} + +#[cfg(test)] +mod tests { + use std::fs; + + use super::*; + + #[test] + fn test_it() { + let s = fs::read_to_string("test_stuff/versions/1.7.10.json"); + + let arg: CompleteVersion = serde_json::from_str(s.unwrap().as_str()).unwrap(); + dbg!(arg); + } + + #[test] + fn test_it2() { + let s = fs::read_to_string("test_stuff/version_manifest_v2.json"); + + let arg: VersionManifest = serde_json::from_str(s.unwrap().as_str()).unwrap(); + dbg!(arg); + } + + #[test] + fn test_it3() { + assert_eq!(canonicalize_library_name("group:artifact:version", None), String::from("group:artifact")); + assert_eq!(canonicalize_library_name("group:artifact:version:specifier", None), String::from("group:artifact:specifier")); + assert_eq!(canonicalize_library_name("not_enough:fields", None), String::from("not_enough:fields")); + assert_eq!(canonicalize_library_name("word", None), String::from("word")); + assert_eq!(canonicalize_library_name("", None), String::from("")); + assert_eq!(canonicalize_library_name("group:artifact:version", Some("suffix")), String::from("group:artifact:suffix")); + } +} diff --git a/ozone/src/version/manifest.rs b/ozone/src/version/manifest.rs new file mode 100644 index 0000000..b2b8524 --- /dev/null +++ b/ozone/src/version/manifest.rs @@ -0,0 +1,91 @@ +use core::fmt; +use std::convert::Infallible; +use std::str::FromStr; +use chrono::{DateTime, Utc}; +use serde::{de::Visitor, Deserialize}; +use sha1_smol::Digest; + +#[derive(Deserialize, Debug)] +pub struct LatestVersions { + pub release: String, + pub snapshot: String +} + +#[derive(Debug, Clone)] +pub enum VersionType { + Snapshot, + Release, + OldBeta, + OldAlpha, + Other(String) +} + +impl FromStr for VersionType { + type Err = Infallible; + + fn from_str(s: &str) -> Result { + match s { + "snapshot" => Ok(Self::Snapshot), + "release" => Ok(Self::Release), + "old_beta" => Ok(Self::OldBeta), + "old_alpha" => Ok(Self::OldAlpha), + _ => Ok(Self::Other(s.to_owned())) + } + } +} + +impl VersionType { + pub fn to_str(&self) -> &str { + match self { + Self::Snapshot => "snapshot", + Self::Release => "release", + Self::OldBeta => "old_beta", + Self::OldAlpha => "old_alpha", + Self::Other(s) => s + } + } +} + +struct VersionTypeVisitor; + +impl Visitor<'_> for VersionTypeVisitor { + type Value = VersionType; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a Minecraft release type") + } + + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, { + Ok(VersionType::from_str(v).unwrap(/* infallible */)) + } +} + +impl<'de> Deserialize<'de> for VersionType { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de> { + deserializer.deserialize_string(VersionTypeVisitor) + } +} + +// https://piston-meta.mojang.com/mc/game/version_manifest_v2.json +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct VersionManifestVersion { + pub id: String, + #[serde(rename = "type")] + pub version_type: VersionType, + pub url: String, + pub time: DateTime, + pub release_time: DateTime, + pub sha1: Digest, + pub compliance_level: u32 +} + +#[derive(Deserialize, Debug)] +pub struct VersionManifest { + pub latest: LatestVersions, + pub versions: Vec +} diff --git a/src/assets.rs b/src/assets.rs deleted file mode 100644 index 15087c9..0000000 --- a/src/assets.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::collections::HashMap; -use std::fmt::Formatter; -use std::marker::PhantomData; -use serde::{Deserialize, Deserializer}; -use serde::de::{MapAccess, Visitor}; -use sha1_smol::Digest; - -#[derive(Debug, Deserialize)] -pub struct Asset { - #[serde(skip)] - pub name: String, - pub hash: Digest, - pub size: usize -} - -#[derive(Debug, Deserialize)] -pub struct AssetIndex { - #[serde(rename = "virtual", default)] - pub virtual_assets: bool, - #[serde(default)] - pub map_to_resources: bool, - - #[serde(deserialize_with = "deserialize_assets")] - pub objects: HashMap -} - -trait SetName { - fn set_name(&mut self, name: String); -} - -impl SetName for Asset { - fn set_name(&mut self, name: String) { - self.name = name; - } -} - -fn deserialize_assets<'de, D, T>(deserializer: D) -> Result, D::Error> -where - D: Deserializer<'de>, - T: SetName + Deserialize<'de> -{ - struct AssetVisitor(PhantomData); - - impl<'de, T> Visitor<'de> for AssetVisitor - where - T: SetName + Deserialize<'de> - { - type Value = HashMap; - - fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { - formatter.write_str("asset objects map") - } - - fn visit_map(self, mut map: A) -> Result - where - A: MapAccess<'de>, - { - let mut out = HashMap::new(); - - while let Some((key, mut asset)) = map.next_entry::()? { - asset.set_name(key.clone()); - out.insert(key, asset); - } - - Ok(out) - } - } - - deserializer.deserialize_any(AssetVisitor(PhantomData)) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_it() { - dbg!(serde_json::from_str::(r#"{ - "virtual": true, - "objects": { - "object1": { "hash": "0d000710b71ca9aafabd8f587768431d0b560b32", "size": 100 }, - "object2/abc": { "hash": "0e000710b71ca9aafabd8f587768431d0b560b32", "size": 10000 } - } - }"#).unwrap()); - } -} diff --git a/src/auth.rs b/src/auth.rs deleted file mode 100644 index 057cceb..0000000 --- a/src/auth.rs +++ /dev/null @@ -1,336 +0,0 @@ -mod types; -mod msa; -mod mcservices; - -use std::error::Error; -use std::fmt::{Display, Formatter}; -use std::future::Future; -use std::time::{Duration, SystemTime}; -use chrono::{DateTime, TimeDelta, Utc}; -use log::debug; -use oauth2::{AccessToken, DeviceAuthorizationUrl, DeviceCodeErrorResponse, EndpointNotSet, EndpointSet, HttpClientError, RequestTokenError, Scope, StandardDeviceAuthorizationResponse, StandardRevocableToken, TokenResponse, TokenUrl}; -use oauth2::basic::{BasicErrorResponse, BasicErrorResponseType, BasicRevocationErrorResponse, BasicTokenIntrospectionResponse, BasicTokenResponse}; -use reqwest::{IntoUrl, Method, RequestBuilder}; -pub use types::*; -use crate::auth::msa::{XSTS_RP_MINECRAFT_SERVICES, XSTS_RP_XBOX_LIVE}; -use crate::util::USER_AGENT; - -#[derive(Debug)] -pub enum AuthError { - // An unexpected error happened while performing a request - Request { what: &'static str, error: reqwest::Error }, - OAuthRequestToken { what: &'static str, error: RequestTokenError, BasicErrorResponse> }, - OAuthRequestDeviceCode { what: &'static str, error: RequestTokenError, DeviceCodeErrorResponse> }, - - // Some internal auth error (unrecoverable) - Internal(String), - - // Device code auth was cancelled - Cancel(Option>), - - // Device code auth timed out - Timeout, - - // Requires interactive authentication - RequireInteractive(&'static str), - - // XSTS error - AuthXError { what: &'static str, x_error: u64, message: Option }, - - // You don't own the game! - EntitlementError -} - -impl Display for AuthError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - AuthError::Request { what, error } => write!(f, "auth request error ({}): {}", what, error), - AuthError::OAuthRequestToken { what, error } => write!(f, "oauth error requesting token ({what}): {error}"), - AuthError::OAuthRequestDeviceCode { what, error } => write!(f, "oauth error with device code ({what}): {error}"), - AuthError::Internal(msg) => write!(f, "internal auth error: {}", msg), - AuthError::Cancel(Some(error)) => write!(f, "operation cancelled: {error}"), - AuthError::Cancel(None) => f.write_str("operation cancelled"), - AuthError::Timeout => f.write_str("interactive authentication timed out"), - AuthError::RequireInteractive(why) => write!(f, "user must log in interactively: {why}"), - AuthError::AuthXError { what, x_error, message } => write!(f, "XSTS error: {what} ({x_error} -> {})", message.as_ref().map_or("", |s| s.as_str())), - AuthError::EntitlementError => f.write_str("no minecraft entitlement (do you own the game?)") - } - } -} - -impl Error for AuthError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - AuthError::Request { error, .. } => Some(error), - AuthError::OAuthRequestToken { error, .. } => Some(error), - AuthError::OAuthRequestDeviceCode { error, .. } => Some(error), - AuthError::Cancel(Some(error)) => Some(error.as_ref()), - _ => None - } - } -} - -impl Token { - fn is_expired(&self, now: DateTime) -> bool { - self.expire.is_some_and(|exp| now >= exp) - } -} - -macro_rules! create_oauth_client { - ($is_azure_client_id:expr, $client_id:expr) => { - oauth2::Client::new($client_id) - .set_token_uri(TokenUrl::new(if $is_azure_client_id { AZURE_TOKEN_URL.into() } else { NON_AZURE_TOKEN_URL.into() }).expect("hardcoded url")) - .set_device_authorization_url(DeviceAuthorizationUrl::new(if $is_azure_client_id { AZURE_DEVICE_CODE_URL.into() } else { NON_AZURE_DEVICE_CODE_URL.into() }).expect("hardcoded url")) - as oauth2::Client - } -} - -const AZURE_TOKEN_URL: &str = "https://login.microsoftonline.com/consumers/oauth2/v2.0/token"; -const AZURE_DEVICE_CODE_URL: &str = "https://login.microsoftonline.com/consumers/oauth2/v2.0/devicecode"; -const NON_AZURE_TOKEN_URL: &str = "https://login.live.com/oauth20_token.srf"; -const NON_AZURE_DEVICE_CODE_URL: &str = "https://login.live.com/oauth20_connect.srf"; - -const AZURE_LOGIN_SCOPES: &[&str] = ["XboxLive.signin", "offline_access"].as_slice(); -const NON_AZURE_LOGIN_SCOPES: &[&str] = ["service::user.auth.xboxlive.com::MBI_SSL"].as_slice(); - -fn build_json_request(client: &reqwest::Client, url: impl IntoUrl, method: Method) -> RequestBuilder { - client.request(method, url) - .header(reqwest::header::USER_AGENT, USER_AGENT) - .header(reqwest::header::ACCEPT, "application/json") -} - -impl MsaUser { - pub fn create_client() -> reqwest::Client { - reqwest::ClientBuilder::new() - .redirect(reqwest::redirect::Policy::none()) - .build().expect("building client should succeed") - } - - fn scopes_iter(&self) -> impl Iterator { - let to_scope = |f: &&str| Scope::new(String::from(*f)); - - if self.is_azure_client_id { - AZURE_LOGIN_SCOPES.iter().map(to_scope) - } else { - NON_AZURE_LOGIN_SCOPES.iter().map(to_scope) - } - } - - // uses an access token from, for example, a device code grant logs into xbox live - async fn xbl_login(&mut self, client: &reqwest::Client, token: &AccessToken) -> Result<(), AuthError> { - debug!("Logging into xbox live using access token"); - self.xbl_token = Some(msa::xbox_live_login(client, token, self.is_azure_client_id).await?); - - Ok(()) - } - - // logs into xbox live using a refresh token - // (panics if no refresh token present) - async fn xbl_login_refresh(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { - debug!("Using refresh token for XBL login"); - let oauth_client = create_oauth_client!(self.is_azure_client_id, self.client_id.clone()); - let refresh_token = self.refresh_token.as_ref().expect("refresh_access_token called with no refresh token"); - - let tokenres: BasicTokenResponse = oauth_client - .exchange_refresh_token(refresh_token) - .add_scopes(self.scopes_iter()) - .add_extra_param("response_type", "device_code") - .request_async(client) - .await.map_err(|e| AuthError::OAuthRequestToken { what: "refresh", error: e })?; - - self.refresh_token = tokenres.refresh_token().cloned(); - - self.xbl_login(client, tokenres.access_token()).await - } - - pub async fn xbl_login_device(&mut self, client: &reqwest::Client, handle_device: D) -> Result<(), AuthError> - where - D: FnOnce(StandardDeviceAuthorizationResponse) -> DF, - DF: Future - { - debug!("Using device authorization for XBL login"); - let oauth_client = create_oauth_client!(self.is_azure_client_id, self.client_id.clone()); - let device_auth: StandardDeviceAuthorizationResponse = oauth_client.exchange_device_code() - .add_scopes(self.scopes_iter()) - .add_extra_param("response_type", "device_code") - .request_async(client) - .await.map_err(|e| AuthError::OAuthRequestToken { what: "device code", error: e })?; - - handle_device(device_auth.clone()).await; - - let tokenres = oauth_client.exchange_device_access_token(&device_auth) - .set_max_backoff_interval(Duration::from_secs(20u64)) - .request_async(client, tokio::time::sleep, None) - .await.map_err(|e| AuthError::OAuthRequestDeviceCode { what: "device access code", error: e })?; - - self.refresh_token = tokenres.refresh_token().cloned(); - - self.xbl_login(client, tokenres.access_token()).await - } - - // ensure we have an xbox live token for this person - // tasks for this function: - // - check if the XBL token is valid/not expired - // - if it is expired, try to use refresh token to get a new one - // - get rid of auth token if yeah - async fn ensure_xbl(&mut self, client: &reqwest::Client, now: DateTime) -> Result<(), AuthError> { - if self.xbl_token.as_ref().is_some_and(|tok| !tok.is_expired(now)) { - debug!("XBL token valid. Using it."); - return Ok(()) - } - - if self.refresh_token.is_none() { - return Err(AuthError::RequireInteractive("no refresh token")); - } - - debug!("XBL token expired. Trying to refresh it."); - self.xbl_login_refresh(client).await - .map_err(|e| match &e { - AuthError::OAuthRequestToken { error: RequestTokenError::ServerResponse(res), .. } => match res.error() { - BasicErrorResponseType::Extension(s) if s == "interaction_required" || s == "consent_required" => { - AuthError::RequireInteractive("msa requested interactive logon") - }, - _ => e - }, - _ => e - })?; - - self.mc_token = None; - - Ok(()) - } - - // function's tasks: - // - if the minecraft services token invalid/expired/missing, do the following - // - get minecraftservices xsts token - // - use minecraftservices to get mojang token with that xsts token - async fn ensure_mc_token(&mut self, client: &reqwest::Client, now: DateTime) -> Result<(), AuthError> { - if self.mc_token.as_ref().is_some_and(|tok| !tok.is_expired(now)) { - debug!("Mojang token valid. Using it."); - return Ok(()) - } - - debug!("Mojang token has expired. Must log in again."); - let xbl_token = self.xbl_token.as_ref().expect("ensure_mc_token requires xbl token").value.as_str(); - let (user_hash, mc_xsts_tok) = match msa::xsts_request(client, xbl_token, XSTS_RP_MINECRAFT_SERVICES).await? { - msa::XSTSAuthResponse::Success(res) => { - let user_hash = res.get_user_hash() - .map_or(Err(AuthError::Internal("malformed response: no user hash".into())), |h| Ok(h.to_owned()))?; - (user_hash, res.into_token()) - }, - msa::XSTSAuthResponse::Error(e) => return Err(e.into()) - }; - - debug!("Got MinecraftServices XSTS, logging in."); - self.mc_token = Some(mcservices::login_with_xbox(client, mc_xsts_tok.as_str(), user_hash.as_str()).await?); - - Ok(()) - } - - async fn load_xbox_info(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { - debug!("Loading Xbox info..."); - let xbl_token = self.xbl_token.as_ref().expect("xbl token missing").value.as_str(); - - let res = match msa::xsts_request(client, xbl_token, XSTS_RP_XBOX_LIVE).await? { - msa::XSTSAuthResponse::Success(res) => res, - msa::XSTSAuthResponse::Error(e) => return Err(e.into()) - }; - - let Some(xuid) = res.get_xuid() else { - return Err(AuthError::Internal("missing xuid for user".into())); - }; - - self.xuid = Some(xuid.to_owned()); - self.gamertag = res.get_gamertag().map(|s| s.to_owned()); - - debug!("Xbox info loaded: (xuid {xuid}, gamertag {})", res.get_gamertag().unwrap_or("")); - - Ok(()) - } - - async fn load_profile(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { - self.load_xbox_info(client).await?; - - let mc_token = self.mc_token.as_ref().expect("minecraft token missing").value.as_str(); - - debug!("Checking if you own the game..."); - if !mcservices::owns_the_game(client, mc_token).await? { - return Err(AuthError::EntitlementError); - } - - debug!("Getting your profile info..."); - let player_info = mcservices::get_player_info(client, mc_token).await?; - let player_profile = mcservices::get_player_profile(client, player_info.id).await - .map_err(|e| AuthError::Request { what: "looking up profile", error: e })?; - - self.player_info = Some(player_info); - self.player_profile = Some(player_profile); - - Ok(()) - } - - pub async fn log_in_silent(&mut self, client: &reqwest::Client) -> Result<(), AuthError> { - let now: DateTime = DateTime::from(SystemTime::now()) + TimeDelta::hours(12); - - self.ensure_xbl(client, now).await?; - self.ensure_mc_token(client, now).await?; - self.load_profile(client).await?; - - Ok(()) - } -} - -#[cfg(test)] -mod test { - use oauth2::ClientId; - use super::*; - - #[tokio::test] - async fn abc() { - simple_logger::SimpleLogger::new().with_colors(true).with_level(log::LevelFilter::Trace).init().unwrap(); - - let mut user = match tokio::fs::read_to_string("test_stuff/test.json").await { - Ok(s) => serde_json::from_str::(&s).unwrap(), - Err(e) if e.kind() == tokio::io::ErrorKind::NotFound => { - MsaUser { - player_profile: None, - xuid: None, - gamertag: None, - player_info: None, - //client_id: ClientId::new("00000000402b5328".into()), - client_id: ClientId::new("60b6cc54-fc07-4bab-bca9-cbe9aa713c80".into()), - is_azure_client_id: true, - mc_token: None, - xbl_token: None, - refresh_token: None - } - }, - Err(e) => panic!("i/o error: {}", e) - }; - - let client = MsaUser::create_client(); - - loop { - match user.log_in_silent(&client).await { - Ok(_) => break, - Err(AuthError::RequireInteractive(s)) => { - debug!("Requires interactive auth: {s}") - }, - Err(e) => { - panic!("{}", e); - } - } - - user.xbl_login_device(&client, |d| async move { - let d = dbg!(d); - debug!("User code: {}", d.user_code().secret()); - }).await.unwrap() - } - - debug!("User: {user:?}"); - - let user_str = serde_json::to_string_pretty(&user).unwrap(); - tokio::fs::write("test_stuff/test.json", user_str.as_str()).await.unwrap(); - } -} diff --git a/src/auth/mcservices.rs b/src/auth/mcservices.rs deleted file mode 100644 index 45ef795..0000000 --- a/src/auth/mcservices.rs +++ /dev/null @@ -1,92 +0,0 @@ -use std::time::{Duration, SystemTime}; -use chrono::{DateTime, Utc}; -use reqwest::{IntoUrl, Method, RequestBuilder}; -use serde::{Deserialize, Serialize}; -use uuid::Uuid; -use super::{AuthError, MinecraftPlayerInfo, PlayerProfile}; -use super::types::Token; - -const MINECRAFT_LOGIN: &str = "https://api.minecraftservices.com/authentication/login_with_xbox"; -const MINECRAFT_ENTITLEMENTS: &str = "https://api.minecraftservices.com/entitlements"; -const MINECRAFT_PROFILE: &str = "https://api.minecraftservices.com/minecraft/profile"; - -const MINECRAFT_SESSION_PROFILE: &str = "https://sessionserver.mojang.com/session/minecraft/profile/"; - -fn build_authenticated(client: &reqwest::Client, url: impl IntoUrl, method: Method, mc_token: &str) -> RequestBuilder { - super::build_json_request(client, url, method) - .bearer_auth(mc_token) -} - -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] -struct MinecraftXboxLoginRequest<'a> { - identity_token: &'a str, - ensure_legacy_enabled: bool -} - -#[derive(Deserialize, Debug)] -struct MinecraftXboxLoginResponse { - access_token: String, - expires_in: u64 -} - -pub async fn login_with_xbox(client: &reqwest::Client, xsts_token: &str, user_hash: &str) -> Result { - let tok = format!("XBL3.0 x={user_hash};{xsts_token}"); - let req = MinecraftXboxLoginRequest { - identity_token: tok.as_str(), - ensure_legacy_enabled: true - }; - - let res: MinecraftXboxLoginResponse = super::build_json_request(client, MINECRAFT_LOGIN, Method::POST) - .json(&req).send().await - .and_then(|r| r.error_for_status()) - .map_err(|e| AuthError::Request { what: "minecraft xbox login", error: e })? - .json().await - .map_err(|e| AuthError::Request { what: "minecraft xbox login (decode)", error: e })?; - - let now: DateTime = SystemTime::now().into(); - - Ok(Token { - value: res.access_token, - expire: Some(now + Duration::from_secs(res.expires_in)) - }) -} - -#[derive(Deserialize, Debug)] -struct EntitlementItem { - name: String - // we don't care about the signature -} - -#[derive(Deserialize, Debug)] -struct EntitlementResponse { - #[serde(default)] - items: Vec -} - -pub async fn owns_the_game(client: &reqwest::Client, token: &str) -> Result { - let res: EntitlementResponse = build_authenticated(client, MINECRAFT_ENTITLEMENTS, Method::GET, token) - .send().await - .and_then(|r| r.error_for_status()) - .map_err(|e| AuthError::Request { what: "entitlements", error: e })? - .json().await - .map_err(|e| AuthError::Request { what: "entitlements (receive)", error: e})?; - - Ok(res.items.iter().any(|i| i.name == "game_minecraft" || i.name == "product_minecraft")) -} - -pub async fn get_player_info(client: &reqwest::Client, token: &str) -> Result { - build_authenticated(client, MINECRAFT_PROFILE, Method::GET, token) - .send().await - .and_then(|r| r.error_for_status()) - .map_err(|e| AuthError::Request { what: "player info", error: e })? - .json().await - .map_err(|e| AuthError::Request { what: "player info (receive)", error: e }) -} - -pub async fn get_player_profile(client: &reqwest::Client, uuid: Uuid) -> Result { - super::build_json_request(client, format!("{}{}?unsigned=false", MINECRAFT_SESSION_PROFILE, uuid.as_simple()), Method::GET) - .send().await - .and_then(|r| r.error_for_status())? - .json().await -} diff --git a/src/auth/msa.rs b/src/auth/msa.rs deleted file mode 100644 index add345c..0000000 --- a/src/auth/msa.rs +++ /dev/null @@ -1,170 +0,0 @@ -use std::borrow::Cow; -use std::collections::HashMap; -use chrono::{DateTime, Utc}; -use log::debug; -use oauth2::AccessToken; -use reqwest::{Method}; -use serde::{Deserialize, Serialize}; -use uuid::Uuid; -use crate::auth::AuthError; -use crate::auth::types::Token; - -const XBOX_LIVE_AUTH: &str = "https://user.auth.xboxlive.com/user/authenticate"; -const XBOX_LIVE_XSTS: &str = "https://xsts.auth.xboxlive.com/xsts/authorize"; - -#[derive(Debug, Serialize)] -#[serde(rename_all = "PascalCase")] -struct XboxLiveAuthRequestProperties<'a> { - auth_method: &'a str, - site_name: &'a str, - rps_ticket: &'a str -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "PascalCase")] -struct XboxLiveAuthRequest<'a> { - properties: XboxLiveAuthRequestProperties<'a>, - relying_party: &'a str, - token_type: &'a str -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "PascalCase")] -struct XboxLiveAuthResponse { - token: String, - not_after: DateTime -} - -pub async fn xbox_live_login(client: &reqwest::Client, access_token: &AccessToken, azure: bool) -> Result { - debug!("MSA performing xbox live login ({azure})"); - - let ticket = match azure { - true => Cow::Owned(format!("d={}", access_token.secret())), - _ => Cow::Borrowed(access_token.secret().as_str()) - }; - - let request = XboxLiveAuthRequest { - properties: XboxLiveAuthRequestProperties { - auth_method: "RPS", - site_name: "user.auth.xboxlive.com", - rps_ticket: ticket.as_ref() - }, - relying_party: "http://auth.xboxlive.com", - token_type: "JWT" - }; - - let res: XboxLiveAuthResponse = super::build_json_request(client, XBOX_LIVE_AUTH, Method::POST).json(&request).send().await - .and_then(|r| r.error_for_status()) - .map_err(|e| AuthError::Request { what: "xbox live auth", error: e })? - .json().await.map_err(|e| AuthError::Request { what: "xbox live auth (decode)", error: e })?; - - Ok(Token { - value: res.token, - expire: Some(res.not_after) - }) -} - -#[derive(Serialize, Debug)] -#[serde(rename_all = "PascalCase")] -struct XSTSAuthRequest<'a> { - properties: XSTSAuthRequestProperties<'a>, - relying_party: &'a str, - token_type: &'a str -} - -#[derive(Serialize, Debug)] -#[serde(rename_all = "PascalCase")] -struct XSTSAuthRequestProperties<'a> { - sandbox_id: &'a str, - user_tokens: &'a[&'a str] -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "PascalCase")] -pub(super) struct XSTSAuthSuccessResponse { - token: String, - #[serde(default)] - display_claims: XSTSAuthResponseDisplayClaims -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "PascalCase")] -pub(super) struct XSTSAuthErrorResponse { - x_err: u64, - message: Option -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "PascalCase", untagged)] -pub(super) enum XSTSAuthResponse { - Success(XSTSAuthSuccessResponse), - Error(XSTSAuthErrorResponse) -} - -#[derive(Deserialize, Debug, Default)] -pub(super) struct XSTSAuthResponseDisplayClaims { - xui: Vec> -} - -impl XSTSAuthSuccessResponse { - pub(super) fn into_token(self) -> String { - self.token - } - - fn get_display_claim(&self, name: &str) -> Option<&str> { - self.display_claims.xui.iter().find(|m| m.contains_key(name)).and_then(|f| f.get(name).map(|s| s.as_str())) - } - - pub(super) fn get_user_hash(&self) -> Option<&str> { - self.get_display_claim("uhs") - } - - pub(super) fn get_xuid(&self) -> Option<&str> { - self.get_display_claim("xid") - } - - pub(super) fn get_gamertag(&self) -> Option<&str> { - self.get_display_claim("gtg") - } -} - -#[allow(clippy::from_over_into)] -impl Into for XSTSAuthErrorResponse { - fn into(self) -> AuthError { - AuthError::AuthXError { - // some well-known error values - what: match self.x_err { - 2148916238u64 => "Microsoft account held by a minor outside of a family.", - 2148916233u64 => "Account is not on Xbox.", - _ => "Unknown error." - }, - x_error: self.x_err, - message: self.message - } - } -} - -pub(super) const XSTS_RP_MINECRAFT_SERVICES: &str = "rp://api.minecraftservices.com/"; -pub(super) const XSTS_RP_XBOX_LIVE: &str = "http://xboxlive.com"; - -pub async fn xsts_request(client: &reqwest::Client, xbl_token: &str, relying_party: &str) -> Result { - debug!("Performing XSTS auth {relying_party}"); - - let token_array = [xbl_token]; - let req = XSTSAuthRequest { - properties: XSTSAuthRequestProperties { - sandbox_id: "RETAIL", - user_tokens: token_array.as_slice() - }, - relying_party, - token_type: "JWT" - }; - - let res: XSTSAuthResponse = super::build_json_request(client, XBOX_LIVE_XSTS, Method::POST).json(&req).send().await - .and_then(|r| r.error_for_status()) - .map_err(|e| AuthError::Request { what: "xsts", error: e })? - .json().await - .map_err(|e| AuthError::Request { what: "xsts (decode)", error: e })?; - - Ok(res) -} diff --git a/src/auth/types.rs b/src/auth/types.rs deleted file mode 100644 index b9cdaad..0000000 --- a/src/auth/types.rs +++ /dev/null @@ -1,130 +0,0 @@ -pub mod property_map; -pub use property_map::PropertyMap; - -use std::fmt::{Debug, Formatter}; -use chrono::{DateTime, Utc}; -use multimap::MultiMap; -use oauth2::RefreshToken; -use serde::{Deserialize, Serialize}; -use uuid::Uuid; - -#[derive(Debug, Serialize, Deserialize)] -pub struct Property { - pub name: String, - pub value: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub signature: Option -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct PlayerProfile { - #[serde(with = "uuid::serde::simple")] - pub id: Uuid, - pub name: String, - - #[serde(default, skip_serializing_if = "MultiMap::is_empty", with = "property_map")] - pub properties: PropertyMap -} - -#[derive(Serialize, Deserialize)] -pub(super) struct Token { - pub value: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub expire: Option> -} - -struct RedactedValue; -impl Debug for RedactedValue { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.write_str("[redacted]") - } -} - -impl Debug for Token { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Token") - .field("value", &RedactedValue) - .field("expire", &self.expire) - .finish() - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "UPPERCASE")] -pub enum SkinState { - Active, - Inactive -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "UPPERCASE")] -pub enum SkinVariant { - Classic, - Slim -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SkinInfo { - pub id: Uuid, - pub state: SkinState, - pub url: String, - pub texture_key: Option, - pub variant: Option, - pub alias: Option -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct MinecraftPlayerInfo { - #[serde(with = "uuid::serde::simple")] - pub id: Uuid, - pub name: String, - - #[serde(default)] - pub skins: Vec, - #[serde(default)] - pub capes: Vec, - - #[serde(default)] - pub demo: bool, - - #[serde(default)] - pub legacy: bool, - - // todo: profile actions (idk the format) -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct MsaUser { - #[serde(skip_serializing_if = "Option::is_none")] - pub player_profile: Option, - pub xuid: Option, - pub gamertag: Option, - - #[serde(skip)] // this information is transient - pub player_info: Option, - - pub(super) client_id: oauth2::ClientId, - - #[serde(default, skip_serializing_if = "std::ops::Not::not")] - pub(super) is_azure_client_id: bool, - - pub(super) mc_token: Option, - pub(super) xbl_token: Option, - pub(super) refresh_token: Option -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "type", rename_all = "lowercase")] -pub enum User { - Dummy(PlayerProfile), - MSA(Box) -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct AuthenticationDatabase { - pub users: Vec -} diff --git a/src/auth/types/property_map.rs b/src/auth/types/property_map.rs deleted file mode 100644 index ddfc9ce..0000000 --- a/src/auth/types/property_map.rs +++ /dev/null @@ -1,61 +0,0 @@ -use std::fmt::Formatter; -use multimap::MultiMap; -use serde::de::{SeqAccess, Visitor}; -use serde::{Deserializer, Serializer}; -use crate::auth::Property; - -pub type PropertyMap = MultiMap; - -pub mod legacy { - use serde::Serializer; - use super::PropertyMap; - - pub fn serialize(value: &PropertyMap, serializer: S) -> Result - where S: Serializer - { - serializer.collect_map(value.iter_all() - .filter_map(|(k, v)| { - if v.is_empty() { - None - } else { - Some((k, v.iter().map(|p| &p.value).collect::>())) - } - })) - } -} - -pub fn serialize(value: &PropertyMap, serializer: S) -> Result -where - S: Serializer -{ - serializer.collect_seq(value.flat_iter().map(|(_, v)| v)) -} - -pub fn deserialize<'de, D>(deserializer: D) -> Result -where - D: Deserializer<'de> -{ - struct PropertyMapVisitor; - - impl<'de> Visitor<'de> for PropertyMapVisitor { - type Value = PropertyMap; - - fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { - formatter.write_str("a property map") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let mut map = MultiMap::new() as PropertyMap; - while let Some(prop) = seq.next_element::()? { - map.insert(prop.name.clone(), prop); - } - - Ok(map) - } - } - - deserializer.deserialize_seq(PropertyMapVisitor) -} diff --git a/src/launcher.rs b/src/launcher.rs deleted file mode 100644 index 2836db5..0000000 --- a/src/launcher.rs +++ /dev/null @@ -1,765 +0,0 @@ -mod constants; -mod version; -mod strsub; -mod download; -mod rules; -mod assets; -mod extract; -mod settings; -mod runner; -mod jre; - -use std::borrow::Cow; -use std::cmp::min; -use std::env::consts::{ARCH, OS}; -use std::error::Error; -use std::ffi::{OsStr, OsString}; -use std::fmt::{Display, Formatter}; -use std::io::ErrorKind; -use std::io::ErrorKind::AlreadyExists; -use std::path::{Component, Path, PathBuf}; -use std::{env, process}; -use std::env::JoinPathsError; -use std::time::{Instant, SystemTime, UNIX_EPOCH}; -use const_format::formatcp; -use futures::{StreamExt, TryStreamExt}; -use indexmap::IndexMap; -use log::{debug, info, trace, warn}; -use reqwest::Client; -use sysinfo::System; -use tokio::{fs, io}; -use tokio_stream::wrappers::ReadDirStream; -use download::{MultiDownloader, VerifiedDownload}; -use rules::{CompatCheck, IncompatibleError}; -use version::{VersionList, VersionResolveError, VersionResult}; -use crate::version::{Library, OSRestriction, OperatingSystem, DownloadType, LibraryExtractRule, FeatureMatcher, ClientLogging}; - -use assets::{AssetError, AssetRepository}; -use crate::util::{self, AsJavaPath}; - -pub use settings::*; -pub use runner::run_the_game; -pub use crate::util::{EnsureFileError, FileVerifyError, IntegrityError}; -use crate::assets::AssetIndex; -use runner::ArgumentType; -use strsub::SubFunc; -use crate::launcher::download::FileDownload; -use crate::launcher::jre::{JavaRuntimeError, JavaRuntimeRepository}; -use crate::launcher::version::VersionError; -use crate::version::manifest::VersionType; - -#[derive(Debug)] -pub enum LogConfigError { - UnknownType(String), - InvalidId(Option), - MissingURL, - IO{ what: &'static str, error: io::Error }, - Offline, - Download{ url: String, error: reqwest::Error }, - - Integrity(IntegrityError) -} - -impl Display for LogConfigError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - LogConfigError::UnknownType(log_type) => write!(f, "unknown log type {}", log_type), - LogConfigError::InvalidId(oid) => match oid { - Some(id) => write!(f, "invalid log config id: {}", id), - None => f.write_str("missing log config id") - }, - LogConfigError::MissingURL => f.write_str("missing log config download URL"), - LogConfigError::IO { what, error} => write!(f, "i/o error ({}): {}", what, error), - LogConfigError::Offline => f.write_str("launcher in offline mode"), - LogConfigError::Download { url, error } => write!(f, "failed to download log config ({}): {}", url, error), - LogConfigError::Integrity(e) => write!(f, "log config verify error: {}", e) - } - } -} - -impl Error for LogConfigError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - LogConfigError::IO { error, .. } => Some(error), - LogConfigError::Download {error, ..} => Some(error), - LogConfigError::Integrity(error) => Some(error), - _ => None - } - } -} - -struct SystemInfo { - os: OperatingSystem, - os_version: String, - arch: String -} - -struct LibraryRepository { - home: PathBuf, - natives: PathBuf -} - -pub struct Launcher { - online: bool, - home: PathBuf, - versions: VersionList, - - system_info: SystemInfo, - - libraries: LibraryRepository, - assets: AssetRepository, - java_runtimes: JavaRuntimeRepository -} - -#[derive(Debug)] -pub enum LaunchError { - UnknownInstance(String), - - // version resolution errors - VersionInit(VersionError), - UnknownVersion(String), - LoadVersion(VersionError), - ResolveVersion(VersionResolveError), - IncompatibleVersion(IncompatibleError), - MissingMainClass, - - // library errors - LibraryDirError(PathBuf, io::Error), - LibraryVerifyError(FileVerifyError), - LibraryDownloadError, - LibraryExtractError(extract::ZipExtractError), - LibraryClasspathError(JoinPathsError), - - // ensure file errors - EnsureFile(EnsureFileError), - IO { what: &'static str, error: io::Error }, - - // log errors - UnknownLogType(String), - InvalidLogId(Option), - - // asset errors - Assets(AssetError), - - // java runtime errors - ResolveJavaRuntime { what: &'static str, error: io::Error }, - MissingJavaRuntime, - JavaRuntimeRepo(JavaRuntimeError) -} - -impl Display for LaunchError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match &self { - LaunchError::UnknownInstance(inst) => write!(f, "unknown instance: {inst}"), - LaunchError::VersionInit(e) => write!(f, "initializing version: {e}"), - LaunchError::UnknownVersion(id) => write!(f, "unknown version id: {id}"), - LaunchError::LoadVersion(e) => write!(f, "error loading remote version: {e}"), - LaunchError::ResolveVersion(e) => write!(f, "error resolving remote version: {e}"), - LaunchError::IncompatibleVersion(e) => e.fmt(f), - LaunchError::MissingMainClass => f.write_str("main class not specified"), - LaunchError::LibraryDirError(path, e) => write!(f, "failed to create library directory {}: {}", path.display(), e), - LaunchError::LibraryVerifyError(e) => write!(f, "failed to verify library: {}", e), - LaunchError::LibraryDownloadError => f.write_str("library download failed (see above logs for details)"), // TODO: booo this sucks - LaunchError::LibraryExtractError(e) => write!(f, "library extract zip error: {e}"), - LaunchError::LibraryClasspathError(e) => write!(f, "error building classpath: {e}"), - LaunchError::IO { what, error } => write!(f, "i/o error ({}): {}", what, error), - LaunchError::EnsureFile(e) => e.fmt(f), - LaunchError::UnknownLogType(t) => write!(f, "unknown log type: {}", t), - LaunchError::InvalidLogId(Some(id)) => write!(f, "invalid log id: {}", id), - LaunchError::InvalidLogId(None) => write!(f, "missing log id"), - LaunchError::Assets(e) => write!(f, "failed to fetch assets: {}", e), - LaunchError::ResolveJavaRuntime { what, error } => write!(f, "failed to find java runtime ({}): {}", what, error), - LaunchError::MissingJavaRuntime => f.write_str("suitable java executable not found"), - LaunchError::JavaRuntimeRepo(e) => write!(f, "runtime repository error: {e}") - } - } -} - -impl Error for LaunchError { - fn cause(&self) -> Option<&dyn Error> { - match &self { - LaunchError::VersionInit(e) => Some(e), - LaunchError::LoadVersion(e) => Some(e), - LaunchError::ResolveVersion(e) => Some(e), - LaunchError::IncompatibleVersion(e) => Some(e), - LaunchError::LibraryDirError(_, e) => Some(e), - LaunchError::LibraryVerifyError(e) => Some(e), - LaunchError::LibraryExtractError(e) => Some(e), - LaunchError::LibraryClasspathError(e) => Some(e), - LaunchError::IO { error: e, .. } => Some(e), - LaunchError::EnsureFile(e) => Some(e), - LaunchError::Assets(e) => Some(e), - LaunchError::ResolveJavaRuntime { error: e, .. } => Some(e), - LaunchError::JavaRuntimeRepo(e) => Some(e), - _ => None - } - } -} - -struct LaunchInfo<'l, F: FeatureMatcher> { - launcher: &'l Launcher, - feature_matcher: &'l F, - - asset_index_name: Option, - classpath: String, - virtual_assets_path: Option, - instance_home: PathBuf, - natives_path: PathBuf, - client_jar: Option, - version_id: String, - version_type: Option, - asset_index: Option -} - -#[derive(Debug)] -pub struct Launch { - jvm_args: Vec, - game_args: Vec, - main_class: String, - instance_path: PathBuf, - runtime_path: PathBuf, - runtime_legacy_launch: bool -} - -struct ProfileFeatureMatcher<'prof> { - profile: &'prof Profile -} - -impl FeatureMatcher for ProfileFeatureMatcher<'_> { - fn matches(&self, feature: &str) -> bool { - match feature { - "has_custom_resolution" => self.profile.get_resolution().is_some(), - _ => false - } - } -} - -impl Launcher { - // FIXME: more descriptive error type por favor - pub async fn new(home: impl AsRef, online: bool) -> Result { - match tokio::fs::create_dir_all(home.as_ref()).await { - Err(e) if e.kind() != AlreadyExists => { - warn!("Failed to create launcher home directory: {}", e); - return Err(LaunchError::IO { what: "create launcher home", error: e }); - }, - _ => () - } - - let home = fs::canonicalize(home.as_ref()).await - .map_err(|e| LaunchError::IO { what: "resolve home path", error: e })?; - - let versions_home = home.join("versions"); - - debug!("Version list online?: {online}"); - let versions = if online { - VersionList::online(versions_home.as_ref()).await.map_err(LaunchError::VersionInit)? - } else { - VersionList::offline(versions_home.as_ref()).await.map_err(LaunchError::VersionInit)? - }; - - let assets_path = home.join("assets"); - - let java_runtimes = JavaRuntimeRepository::new(home.join("jre"), online).await.map_err(LaunchError::JavaRuntimeRepo)?; - - Ok(Launcher { - online, - versions, - system_info: SystemInfo::new(), - libraries: LibraryRepository { - home: home.join("libraries"), - natives: home.join("natives") - }, - assets: AssetRepository::new(online, &assets_path).await.map_err(|e| LaunchError::IO { what: "setting up assets", error: e })?, - java_runtimes, - home - }) - } - - fn choose_lib_classifier<'lib>(&self, lib: &'lib Library) -> Option<&'lib str> { - lib.natives.as_ref().and_then(|n| n.get(&self.system_info.os)).map(|s| s.as_str()) - } - - async fn log_config_ensure(&self, config: &ClientLogging) -> Result { - info!("Ensuring log configuration exists and is valid."); - - if config.log_type != "log4j2-xml" { - return Err(LaunchError::UnknownLogType(config.log_type.clone())); - } - - let dlinfo = &config.file; - let Some(id) = dlinfo.id.as_ref() else { - return Err(LaunchError::InvalidLogId(None)); - }; - - let mut path = self.home.join("logging"); - fs::create_dir_all(path.as_path()).await - .map_err(|e| LaunchError::IO{ what: "creating log directory", error: e })?; - - let Some(Component::Normal(filename)) = Path::new(id).components().last() else { - return Err(LaunchError::InvalidLogId(Some(id.clone()))); - }; - - path.push(filename); - - debug!("Logger config {} is at {}", id, path.display()); - - util::ensure_file(&path, dlinfo.url.as_deref(), dlinfo.size, dlinfo.sha1, self.online, false).await - .map_err(LaunchError::EnsureFile)?; - - struct PathSub<'a>(&'a Path); - impl<'a> SubFunc<'a> for PathSub<'a> { - fn substitute(&self, key: &str) -> Option> { - match key { - "path" => Some(self.0.as_java_path().to_string_lossy()), - _ => None - } - } - } - - Ok(strsub::replace_string(config.argument.as_str(), &PathSub(path.as_ref())).to_string()) - } - - /* TODO: - * - launch game using JNI - * - auth - */ - pub async fn prepare_launch(&self, profile: &Profile, instance: &Instance) -> Result { - let start = Instant::now(); - let feature_matcher = ProfileFeatureMatcher { profile }; - let version_id = profile.get_version(); - - let Some(version_id) = self.versions.get_profile_version_id(version_id) else { - // idk how common this use case actually is - warn!("Can't use latest release/snapshot profiles while offline!"); - return Err(LaunchError::UnknownVersion("".into())); - }; - - info!("Preparing launch for \"{}\"...", version_id); - - let inst_home = instance.get_path(&self.home).await.map_err(|e| LaunchError::IO { - what: "resolving instance directory", - error: e - })?; - - fs::create_dir_all(inst_home.as_path()).await.map_err(|e| LaunchError::IO { - what: "creating instance directory", - error: e - })?; - - info!("Launching the game in {}", inst_home.display()); - - let ver_res = self.versions.get_version_lazy(version_id.as_ref()); - let ver = match ver_res { - VersionResult::Remote(mv) => Cow::Owned(self.versions.load_remote_version(mv).await.map_err(LaunchError::LoadVersion)?), - VersionResult::Complete(cv) => Cow::Borrowed(cv), - VersionResult::None => { - return Err(LaunchError::UnknownVersion(version_id.into_owned())) - } - }; - - let ver = self.versions.resolve_version(ver.as_ref()).await.map_err(LaunchError::ResolveVersion)?; - ver.rules_apply(&self.system_info, &feature_matcher).map_err(LaunchError::IncompatibleVersion)?; - - info!("Resolved launch version {}!", ver.id); - - let mut extract_jobs = Vec::new(); - let mut downloads = IndexMap::new(); - - for lib in ver.libraries.iter() { - if lib.rules_apply(&self.system_info, &feature_matcher).is_err() { - trace!("Skipping library {}, compatibility rules failed", lib.name); - continue; - } - - let classifier = self.choose_lib_classifier(lib); - - if let Some(dl) = self.libraries.create_download(lib, classifier) { - let canon_name = lib.get_canonical_name(); - if downloads.contains_key(&canon_name) { - debug!("Skipping library {}, we already have another version of that library.", lib.name); - continue; - } - - trace!("Using library {} ({})", lib.name, classifier.unwrap_or("None")); - dl.make_dirs().await.map_err(|e| LaunchError::LibraryDirError(dl.get_path().to_path_buf(), e))?; - - if lib.natives.is_some() { - extract_jobs.push(LibraryExtractJob { - source: dl.get_path().to_owned(), - rule: lib.extract.clone() - }); - } - - downloads.insert(canon_name, dl); - } else { - trace!("Skipping library {} ({}), no download", lib.name, classifier.unwrap_or("None")); - } - } - - if self.online { - info!("Downloading {} libraries...", downloads.len()); - let client = Client::new(); - MultiDownloader::new(downloads.values_mut()).perform(&client).await - .inspect_err(|e| warn!("library download failed: {e}")) - .try_fold((), |_, _| async {Ok(())}) - .await - .map_err(|_| LaunchError::LibraryDownloadError)?; - } else { - info!("Verifying {} libraries...", downloads.len()); - download::verify_files(downloads.values_mut()).await.map_err(|e| { - warn!("A library could not be verified: {}", e); - warn!("Since the launcher is in offline mode, libraries cannot be downloaded. Please try again in online mode."); - LaunchError::LibraryVerifyError(e) - })?; - } - - let log_arg; - if let Some(logging) = ver.logging.as_ref().and_then(|l| l.client.as_ref()) { - log_arg = Some(self.log_config_ensure(logging).await?); - } else { - log_arg = None; - } - - // download assets - - let (asset_idx_name, asset_idx) = - if let Some(idx_download) = ver.asset_index.as_ref() { - let asset_idx_name = idx_download.id.as_ref().or(ver.assets.as_ref()).map(String::as_str); - let asset_idx = self.assets.load_index(idx_download, asset_idx_name).await - .map_err(LaunchError::Assets)?; - - self.assets.ensure_assets(&asset_idx).await.map_err(LaunchError::Assets)?; - - (asset_idx_name, Some(asset_idx)) - } else { - (None, None) - }; - - // download client jar - - let client_jar_path; - if let Some(client) = ver.downloads.get(&DownloadType::Client) { - let mut client_path: PathBuf = [self.home.as_ref(), OsStr::new("versions"), OsStr::new(&ver.id)].iter().collect(); - fs::create_dir_all(&client_path).await.map_err(|e| LaunchError::IO{ what: "creating client download directory", error: e })?; - - client_path.push(format!("{}.jar", ver.id)); - - info!("Downloading client jar {}", client_path.display()); - - util::ensure_file(client_path.as_path(), client.url.as_deref(), client.size, client.sha1, self.online, false).await - .map_err(LaunchError::EnsureFile)?; - - client_jar_path = Some(client_path); - } else { - client_jar_path = None; - } - - // clean up old natives - let nnatives = self.libraries.clean_old_natives().await?; - info!("Cleaned up {} old natives directories.", nnatives); - - // extract natives (execute this function unconditionally because we still need the natives dir to exist) - info!("Extracting natives from libraries"); - let natives_dir = self.libraries.extract_natives(extract_jobs).await?; - - let game_assets = if let Some(asset_idx) = asset_idx.as_ref() { - info!("Reconstructing assets"); - self.assets.reconstruct_assets(asset_idx, inst_home.as_path(), asset_idx_name).await - .map_err(LaunchError::Assets)? - } else { - None - }; - - info!("Building classpath"); - let classpath = env::join_paths(downloads.values() - .map(|job| job.get_path().as_java_path()) - .chain(client_jar_path.iter().map(|p| p.as_path().as_java_path()))) - .map_err(LaunchError::LibraryClasspathError)? - .into_string() - .unwrap_or_else(|os| { - warn!("Classpath contains invalid UTF-8. The game may not launch correctly."); - os.to_string_lossy().to_string() - }); - - trace!("Classpath: {classpath}"); - - info!("Resolving java runtime environment path"); - let runtime_path; - - if let Some(ref profile_jre) = profile.get_java_runtime() { - runtime_path = fs::canonicalize(profile_jre).await - .map_err(|e| LaunchError::ResolveJavaRuntime {what: "resolving jre path", error: e})?; - } else { - let Some(ref java_ver) = ver.java_version else { - warn!("Version {} does not specify java version information. You must select a runtime manually.", ver.id); - return Err(LaunchError::MissingJavaRuntime); - }; - - let runtime = self.java_runtimes.choose_runtime(java_ver.component.as_str()).await.map_err(LaunchError::JavaRuntimeRepo)?; - runtime_path = self.java_runtimes.ensure_jre(java_ver.component.as_str(), runtime).await.map_err(LaunchError::JavaRuntimeRepo)?; - } - - let Some(runtime_exe_path) = runner::find_java(runtime_path.as_path(), profile.is_legacy_launch()).await - .map_err(|e| LaunchError::ResolveJavaRuntime {what: "finding java executable", error: e})? else { - return Err(LaunchError::MissingJavaRuntime); - }; - - - debug!("Found runtime exe: {}", runtime_exe_path.display()); - - info!("Deriving launch arguments"); - let info = LaunchInfo { - launcher: self, - feature_matcher: &feature_matcher, - - asset_index_name: asset_idx_name.map(|s| s.to_owned()), - classpath, - virtual_assets_path: game_assets, - instance_home: inst_home.clone(), - natives_path: natives_dir, - client_jar: client_jar_path, - version_id: ver.id.to_string(), - version_type: ver.version_type.clone(), - asset_index: asset_idx - }; - - let Some(ref main_class) = ver.main_class else { - return Err(LaunchError::MissingMainClass); - }; - - // yuck - let jvm_args = profile.iter_arguments().map(OsString::from) - .chain(runner::build_arguments(&info, ver.as_ref(), ArgumentType::Jvm).drain(..)) - .chain(log_arg.iter().map(OsString::from)).collect(); - let game_args = runner::build_arguments(&info, ver.as_ref(), ArgumentType::Game); - - let diff = Instant::now().duration_since(start); - info!("Finished preparing launch for {} in {:.02} seconds!", ver.id, diff.as_secs_f32()); - - Ok(Launch { - jvm_args, - game_args, - main_class: main_class.to_string(), - instance_path: inst_home, - runtime_path: runtime_exe_path, - runtime_legacy_launch: profile.is_legacy_launch() - }) - } -} - -#[derive(Debug)] -enum LibraryError { - InvalidName(String), - IO { what: &'static str, error: io::Error } -} - -impl Display for LibraryError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - LibraryError::InvalidName(name) => write!(f, "invalid name: {name}"), - LibraryError::IO { what, error } => write!(f, "library i/o error ({what}): {error}"), - } - } -} - -impl Error for LibraryError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - LibraryError::IO { error, .. } => Some(error), - _ => None - } - } -} - -#[derive(Debug)] -struct LibraryExtractJob { - source: PathBuf, - rule: Option -} - -const ARCH_BITS: &str = formatcp!("{}", usize::BITS); - -impl LibraryRepository { - fn get_artifact_base_dir(name: &str) -> Option { - let end_of_gid = name.find(':')?; - - Some(name[..end_of_gid].split('.').chain(name.split(':').skip(1).take(2)).collect()) - } - - fn get_artifact_filename(name: &str, classifier: Option<&str>) -> Option { - let n: Vec<&str> = name.splitn(4, ':').skip(1).collect(); - - struct LibReplace; - impl SubFunc<'static> for LibReplace { - fn substitute(&self, key: &str) -> Option> { - match key { - "arch" => Some(Cow::Borrowed(ARCH_BITS)), - _ => None - } - } - } - - if let Some(classifier) = classifier { - match n.len() { - 2 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}-{}.jar", n[0], n[1], classifier).as_str(), &LibReplace).as_ref())), - 3 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}-{}-{}.jar", n[0], n[1], classifier, n[2]).as_str(), &LibReplace).as_ref())), - _ => None - } - } else { - match n.len() { - 2 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}.jar", n[0], n[1]).as_str(), &LibReplace).as_ref())), - 3 => Some(PathBuf::from(strsub::replace_string(format!("{}-{}-{}.jar", n[0], n[1], n[2]).as_str(), &LibReplace).as_ref())), - _ => None - } - } - } - - fn get_artifact_path(name: &str, classifier: Option<&str>) -> Option { - let mut p = Self::get_artifact_base_dir(name)?; - - p.push(Self::get_artifact_filename(name, classifier)?); - Some(p) - } - - fn create_download(&self, lib: &Library, classifier: Option<&str>) -> Option { - if let Some(ref url) = lib.url { - let path = Self::get_artifact_path(lib.name.as_str(), classifier)?; - let url = [url.as_str(), path.to_string_lossy().as_ref()].into_iter().collect::(); - Some(VerifiedDownload::new(url.as_ref(), self.home.join(path).as_path(), lib.size, lib.sha1)) // TODO: could download sha1 - } else if let Some(ref downloads) = lib.downloads { - let dlinfo = downloads.get_download_info(classifier)?; - // drinking game: take a shot once per heap allocation - let path = self.home.join(dlinfo.path.as_ref().map(PathBuf::from).or_else(|| Self::get_artifact_path(lib.name.as_str(), classifier))?); - - Some(VerifiedDownload::new(dlinfo.url.as_ref()?, path.as_path(), dlinfo.size, dlinfo.sha1)) - } else { - let path = Self::get_artifact_path(lib.name.as_str(), classifier)?; - let url = ["https://libraries.minecraft.net/", path.to_string_lossy().as_ref()].into_iter().collect::(); - Some(VerifiedDownload::new(url.as_ref(), self.home.join(path).as_path(), lib.size, lib.sha1)) // TODO: could download sha1 - } - } - - async fn clean_old_natives(&self) -> Result { - info!("Cleaning up old natives folders..."); - - let boot_time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() - min(System::uptime(), 7u64*24*60*60); - - let readdir = match fs::read_dir(&self.natives).await { - Ok(readdir) => readdir, - Err(e) if e.kind() == ErrorKind::NotFound => return Ok(0), - Err(e) => return Err(LaunchError::IO { what: "reading natives directory", error: e }) - }; - - ReadDirStream::new(readdir) - .map(|entry| Ok(async move { - let entry = entry.map_err(|e| LaunchError::IO { what: "reading natives entry", error: e })?; - let ftype = entry.file_type().await.map_err(|e| LaunchError::IO { what: "'stat'ing natives entry", error: e })?; - - if !ftype.is_dir() { return Ok(false); } - - let Some(ftime) = entry.file_name().to_str() - .and_then(|s| constants::NATIVES_DIR_PATTERN.captures(s)) - .and_then(|c| c.get(1)) - .and_then(|cap| cap.as_str().parse::().ok()) else { - return Ok(false); - }; - - if ftime < boot_time { - let path = entry.path(); - info!("Deleting old natives directory {}", path.display()); - - fs::remove_dir_all(&path).await.map_err(|e| LaunchError::IO { - what: "reading natives entry", - error: e - })?; - - return Ok(true); - } - - Ok(false) - })) - .try_buffer_unordered(32) - .try_fold(0usize, |accum, res| async move { - match res { - true => Ok(accum + 1), - _ => Ok(accum) - } - }).await - } - - async fn extract_natives(&self, libs: Vec) -> Result { - fs::create_dir_all(&self.natives).await.map_err(|e| LaunchError::IO { - what: "creating natives directory", - error: e - })?; - - let time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); - let natives_dir = self.natives.join(format!("{}{}-{}", constants::NATIVES_PREFIX, time, process::id())); - - // create_dir_all suppresses "AlreadyExists", but this is a fatal error here. - fs::create_dir(&natives_dir).await.map_err(|e| LaunchError::IO { - what: "creating natives directory", - error: e - })?; - - let (path_again, extracted) = tokio::task::spawn_blocking(move || { - let mut tally = 0usize; - - for job in libs { - debug!("Extracting natives for {}", job.source.display()); - tally += extract::extract_zip(&job.source, &natives_dir, |name| - job.rule.as_ref().is_none_or(|rules| - rules.exclude.iter().any(|ex| - name.starts_with(ex.as_str()))))?; - } - - Ok((natives_dir, tally)) - }).await.unwrap().map_err(LaunchError::LibraryExtractError)?; - - info!("Done extracting natives! Copied {} files.", extracted); - - Ok(path_again) - } -} - -impl SystemInfo { - fn new() -> SystemInfo { - let os = match OS { - "windows" => OperatingSystem::Windows, - "macos" => OperatingSystem::MacOS, - "linux" => OperatingSystem::Linux, - _ => OperatingSystem::Unknown // could probably consider "hurd" and "*bsd" to be linux... - }; - - let mut os_version = System::os_version().unwrap_or_default(); - if os == OperatingSystem::Windows && (os_version.starts_with("10") || os_version.starts_with("11")) { - os_version.replace_range(..2, "10.0"); // minecraft expects this funny business... - } - - let mut arch = ARCH.to_owned(); - if arch == "x86_64" { - // this nomenclature is preferred, since some versions expect the arch containing "x86" to mean 32-bit. - arch.replace_range(.., "amd64"); - } - - SystemInfo { - os, - os_version, - arch - } - } - - fn is_our_os(&self, os: OperatingSystem) -> bool { - if self.os == OperatingSystem::Unknown { - return false; - } - - self.os == os - } - - fn applies(&self, restriction: &OSRestriction) -> bool { - restriction.os.is_none_or(|os| self.is_our_os(os)) - && restriction.version.as_deref().is_none_or(|pat| pat.is_match(&self.os_version)) - && restriction.arch.as_deref().is_none_or(|pat| pat.is_match(&self.arch)) - } -} diff --git a/src/launcher/assets.rs b/src/launcher/assets.rs deleted file mode 100644 index 7c5dcf3..0000000 --- a/src/launcher/assets.rs +++ /dev/null @@ -1,322 +0,0 @@ -use std::error::Error; -use std::ffi::OsStr; -use std::fmt::{Display, Formatter}; -use std::io::ErrorKind; -use std::path::{Path, PathBuf}; -use std::path::Component::Normal; -use futures::{stream, TryStreamExt}; -use log::{debug, info, warn}; -use reqwest::Client; -use sha1_smol::Sha1; -use tokio::{fs, io}; -use tokio::fs::File; -use crate::assets::{Asset, AssetIndex}; -use crate::launcher::download::{MultiDownloader, VerifiedDownload}; -use crate::util; -use crate::util::{FileVerifyError, IntegrityError}; -use crate::version::DownloadInfo; - -const INDEX_PATH: &str = "indexes"; -const OBJECT_PATH: &str = "objects"; - -pub struct AssetRepository { - online: bool, - home: PathBuf -} - -#[derive(Debug)] -pub enum AssetError { - InvalidId(Option), - IO { what: &'static str, error: io::Error }, - IndexParse(serde_json::Error), - Offline, - MissingURL, - DownloadIndex(reqwest::Error), - Integrity(IntegrityError), - AssetObjectDownload, - AssetVerifyError(FileVerifyError), - AssetNameError(&'static str) -} - -impl Display for AssetError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - AssetError::InvalidId(None) => f.write_str("missing asset index id"), - AssetError::InvalidId(Some(id)) => write!(f, "invalid asset index id: {}", id), - AssetError::IO { what, error } => write!(f, "i/o error ({}): {}", what, error), - AssetError::IndexParse(error) => write!(f, "error parsing asset index: {}", error), - AssetError::Offline => f.write_str("cannot download asset index while offline"), - AssetError::MissingURL => f.write_str("missing asset index URL"), - AssetError::DownloadIndex(e) => write!(f, "error downloading asset index: {}", e), - AssetError::Integrity(e) => write!(f, "asset index integrity error: {}", e), - AssetError::AssetObjectDownload => f.write_str("asset object download failed"), - AssetError::AssetVerifyError(e) => write!(f, "error verifying asset object: {e}"), - AssetError::AssetNameError(e) => write!(f, "invalid asset name: {e}") - } - } -} - -impl Error for AssetError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - AssetError::IO { error, .. } => Some(error), - AssetError::IndexParse(error) => Some(error), - AssetError::DownloadIndex(error) => Some(error), - AssetError::Integrity(error) => Some(error), - AssetError::AssetVerifyError(error) => Some(error), - _ => None - } - } -} - -impl From<(&'static str, io::Error)> for AssetError { - fn from((what, error): (&'static str, io::Error)) -> Self { - AssetError::IO { what, error } - } -} - -impl AssetRepository { - pub async fn new(online: bool, home: impl AsRef) -> Result { - let home = home.as_ref().to_owned(); - - match fs::create_dir_all(&home).await { - Ok(_) => (), - Err(e) => match e.kind() { - ErrorKind::AlreadyExists => (), - _ => return Err(e) - } - }; - - Ok(AssetRepository { - online, - home - }) - } - - pub fn get_home(&self) -> &Path { - self.home.as_path() - } - - fn get_index_path(&self, id: &str) -> Result { - let mut indexes_path: PathBuf = [self.home.as_ref(), OsStr::new(INDEX_PATH)].iter().collect(); - let Some(Normal(path)) = Path::new(id).components().last() else { - return Err(AssetError::InvalidId(Some(id.into()))); - }; - - let path = path.to_str().ok_or(AssetError::InvalidId(Some(path.to_string_lossy().into())))?; - - // FIXME: change this once "add_extension" is stabilized - indexes_path.push(format!("{}.json", path)); - - Ok(indexes_path) - } - - pub async fn load_index(&self, index: &DownloadInfo, id: Option<&str>) -> Result { - let Some(id) = id else { - return Err(AssetError::InvalidId(None)); - }; - - info!("Loading asset index {}", id); - - let path = self.get_index_path(id)?; - debug!("Asset index {} is located at {}", id, path.display()); - - match util::verify_file(&path, index.size, index.sha1).await { - Ok(_) => { - debug!("Asset index {} verified on disk. Loading it.", id); - let idx_data = fs::read_to_string(&path).await.map_err(|e| AssetError::IO { - what: "reading asset index", - error: e - })?; - - return serde_json::from_str(&idx_data).map_err(AssetError::IndexParse); - }, - Err(FileVerifyError::Open(_, e)) => match e.kind() { - ErrorKind::NotFound => { - debug!("Asset index {} not found on disk. Must download it.", id); - }, - _ => return Err(("opening asset index", e).into()) - }, - Err(FileVerifyError::Integrity(_, e)) => { - info!("Asset index {} has mismatched integrity: {}, must download it.", id, e); - let _ = fs::remove_file(&path).await.map_err(|e| warn!("Error deleting modified index {}: {} (ignored)", id, e)); - }, - Err(FileVerifyError::Read(_, e)) => return Err(("reading asset index", e).into()) - } - - if !self.online { - warn!("Must download asset index {}, but the launcher is in offline mode. Please try again in online mode.", id); - return Err(AssetError::Offline); - } - - let Some(url) = index.url.as_ref() else { - return Err(AssetError::MissingURL); - }; - - debug!("Downloading asset index {} from {}", id, url); - - if let Some(parent) = path.parent() { - fs::create_dir_all(parent).await.map_err(|e| AssetError::IO { - what: "creating asset index folder", - error: e - })?; - } - - let idx_text = reqwest::get(url).await - .map_err(AssetError::DownloadIndex)? - .text().await - .map_err(AssetError::DownloadIndex)?; - - if index.size.is_some_and(|s| s != idx_text.len()) { - return Err(AssetError::Integrity(IntegrityError::SizeMismatch { - expect: index.size.unwrap(), - actual: idx_text.len() - })); - } - - if let Some(expect) = index.sha1 { - let actual = Sha1::from(&idx_text).digest(); - - if actual != expect { - return Err(AssetError::Integrity(IntegrityError::Sha1Mismatch { expect, actual })); - } - } - - debug!("Saving downloaded asset index to {}", path.display()); - fs::write(&path, &idx_text).await.map_err(|e| AssetError::IO { - what: "writing asset index", - error: e - })?; - - serde_json::from_str(&idx_text).map_err(AssetError::IndexParse) - } - - fn get_object_url(obj: &Asset) -> String { - format!("{}{:02x}/{}", super::constants::URL_RESOURCE_BASE, obj.hash.bytes()[0], obj.hash) - } - - pub fn get_object_path(&self, obj: &Asset) -> PathBuf { - let hex_digest = obj.hash.to_string(); - [self.home.as_ref(), OsStr::new(OBJECT_PATH), OsStr::new(&hex_digest[..2]), OsStr::new(&hex_digest)].iter().collect() - } - - async fn ensure_dir(path: impl AsRef) -> Result<(), io::Error> { - match fs::create_dir(path).await { - Ok(_) => Ok(()), - Err(e) if e.kind() == ErrorKind::AlreadyExists => Ok(()), - Err(e) => Err(e) - } - } - - pub async fn ensure_assets(&self, index: &AssetIndex) -> Result<(), AssetError> { - let mut downloads = Vec::new(); - let objects_path = [self.home.as_ref(), OsStr::new(OBJECT_PATH)].iter().collect::(); - - Self::ensure_dir(&objects_path).await.map_err(|e| AssetError::IO { - what: "creating objects directory", - error: e - })?; - - for object in index.objects.values() { - let path = self.get_object_path(object); - - Self::ensure_dir(path.parent().unwrap()).await.map_err(|error| AssetError::IO { error, what: "creating directory for object" })?; - - downloads.push(VerifiedDownload::new(&Self::get_object_url(object), &path, Some(object.size), Some(object.hash))); - } - - if self.online { - info!("Downloading {} asset objects...", downloads.len()); - let client = Client::new(); - MultiDownloader::with_concurrent(downloads.iter_mut(), 32).perform(&client).await - .inspect_err(|e| warn!("asset download failed: {e}")) - .try_fold((), |_, _| async {Ok(())}) - .await - .map_err(|_| AssetError::AssetObjectDownload)?; - } else { - info!("Verifying {} asset objects...", downloads.len()); - super::download::verify_files(downloads.iter_mut()).await.map_err(AssetError::AssetVerifyError)?; - } - - Ok(()) - } - - pub async fn reconstruct_assets(&self, index: &AssetIndex, instance_path: &Path, index_id: Option<&str>) -> Result, AssetError> { - let target_path: PathBuf; - let Some(index_id) = index_id else { - return Err(AssetError::InvalidId(None)); - }; - - if index.virtual_assets { - target_path = [self.home.as_ref(), OsStr::new("virtual"), OsStr::new(index_id)].iter().collect(); - } else if index.map_to_resources { - target_path = [instance_path, Path::new("resources")].iter().collect(); - } else { - info!("This asset index does not request a virtual assets folder. Nothing to be done."); - return Ok(None); - } - - info!("Reconstructing virtual assets for {}", index_id); - - fs::create_dir_all(&target_path).await.map_err(|e| AssetError::from(("creating virtual assets directory", e)))?; - - stream::iter(index.objects.values() - .map(|object| { - let obj_path = util::check_path(object.name.as_str()).map_err(AssetError::AssetNameError)?; - let obj_path = target_path.join(obj_path); - - Ok((object, obj_path)) - })) - .try_filter_map(|(object, obj_path)| async move { - match util::verify_file(&obj_path, Some(object.size), Some(object.hash)).await { - Ok(_) => { - debug!("Not copying asset {}, integrity matches.", object.name); - Ok(None) - } - Err(FileVerifyError::Open(_, e)) if e.kind() == ErrorKind::NotFound => { - debug!("Copying asset {}, file does not exist.", object.name); - Ok(Some((object, obj_path))) - }, - Err(FileVerifyError::Integrity(_, e)) => { - debug!("Copying asset {}: {}", object.name, e); - Ok(Some((object, obj_path))) - }, - Err(e) => { - debug!("Error while reconstructing assets: {e}"); - Err(AssetError::AssetVerifyError(e)) - } - } - }) - .try_for_each_concurrent(32, |(object, obj_path)| async move { - if let Some(parent) = obj_path.parent() { - fs::create_dir_all(parent).await - .inspect_err(|e| debug!("Error creating directory for asset object {}: {e}", object.name)) - .map_err(|e| AssetError::from(("creating asset object directory", e)))?; - } - - let mut fromfile = File::open(self.get_object_path(object)).await - .map_err(|e| AssetError::from(("opening source object", e)))?; - let mut tofile = File::create(&obj_path).await - .map_err(|e| AssetError::from(("creating target object", e)))?; - - io::copy(&mut fromfile, &mut tofile).await.map_err(|e| AssetError::from(("copying asset object", e)))?; - debug!("Copied object {} to {}.", object.name, obj_path.display()); - Ok(()) - }).await.map(|_| Some(target_path)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_it() { - let digest_str = "ad1115931887a73cd596300f2c93f84adf39521d"; - assert_eq!(AssetRepository::get_object_url(&Asset { - name: String::from("test"), - hash: digest_str.parse().unwrap(), - size: 0usize - }), "https://resources.download.minecraft.net/ad/ad1115931887a73cd596300f2c93f84adf39521d"); - } -} diff --git a/src/launcher/constants.rs b/src/launcher/constants.rs deleted file mode 100644 index 4506ab5..0000000 --- a/src/launcher/constants.rs +++ /dev/null @@ -1,18 +0,0 @@ -use lazy_static::lazy_static; -use regex::Regex; - -pub const URL_VERSION_MANIFEST: &str = "https://piston-meta.mojang.com/mc/game/version_manifest_v2.json"; -pub const URL_RESOURCE_BASE: &str = "https://resources.download.minecraft.net/"; -pub const URL_JRE_MANIFEST: &str = "https://piston-meta.mojang.com/v1/products/java-runtime/2ec0cc96c44e5a76b9c8b7c39df7210883d12871/all.json"; - -pub const NATIVES_PREFIX: &str = "natives-"; - -pub const DEF_INSTANCE_NAME: &str = "default"; -pub const DEF_PROFILE_NAME: &str = "default"; - -// https://github.com/unmojang/FjordLauncher/pull/14/files -// https://login.live.com/oauth20_authorize.srf?client_id=00000000402b5328&redirect_uri=ms-xal-00000000402b5328://auth&response_type=token&display=touch&scope=service::user.auth.xboxlive.com::MBI_SSL%20offline_access&prompt=select_account - -lazy_static! { - pub static ref NATIVES_DIR_PATTERN: Regex = Regex::new("^natives-(\\d+)").unwrap(); -} diff --git a/src/launcher/download.rs b/src/launcher/download.rs deleted file mode 100644 index 132cd7f..0000000 --- a/src/launcher/download.rs +++ /dev/null @@ -1,267 +0,0 @@ -use std::error::Error; -use std::fmt::{Debug, Display, Formatter}; -use std::path::{Path, PathBuf}; -use futures::{stream, StreamExt, TryStream, TryStreamExt}; -use log::debug; -use reqwest::{Client, Method, RequestBuilder}; -use sha1_smol::{Digest, Sha1}; -use tokio::fs; -use tokio::fs::File; -use tokio::io::{self, AsyncWriteExt}; -use crate::util; -use crate::util::{FileVerifyError, IntegrityError, USER_AGENT}; - -pub trait Download: Debug + Display { - // return Ok(None) to skip downloading this file - async fn prepare(&mut self, client: &Client) -> Result, Box>; - async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box>; - async fn finish(&mut self) -> Result<(), Box>; -} - -pub trait FileDownload: Download { - fn get_path(&self) -> &Path; -} - -pub struct MultiDownloader<'j, T: Download + 'j, I: Iterator> { - jobs: I, - nconcurrent: usize -} - -#[derive(Debug, Clone, Copy)] -pub enum Phase { - Prepare, - Send, - Receive, - HandleChunk, - Finish -} - -impl Display for Phase { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - /* an error occurred while (present participle) ... */ - Self::Prepare => f.write_str("preparing the request"), - Self::Send => f.write_str("sending the request"), - Self::Receive => f.write_str("receiving response data"), - Self::HandleChunk => f.write_str("handling response data"), - Self::Finish => f.write_str("finishing the request"), - } - } -} - -pub struct PhaseDownloadError<'j, T: Download> { - phase: Phase, - inner: Box, - job: &'j T -} - -impl Debug for PhaseDownloadError<'_, T> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("PhaseDownloadError") - .field("phase", &self.phase) - .field("inner", &self.inner) - .field("job", &self.job) - .finish() - } -} - -impl Display for PhaseDownloadError<'_, T> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "error while {} ({}): {}", self.phase, self.job, self.inner) - } -} - -impl Error for PhaseDownloadError<'_, T> { - fn source(&self) -> Option<&(dyn Error + 'static)> { - Some(&*self.inner) - } -} - -impl<'j, T: Download> PhaseDownloadError<'j, T> { - fn new(phase: Phase, inner: Box, job: &'j T) -> Self { - PhaseDownloadError { - phase, inner, job - } - } -} - -impl<'j, T: Download + 'j, I: Iterator> MultiDownloader<'j, T, I> { - pub fn new(jobs: I) -> MultiDownloader<'j, T, I> { - Self::with_concurrent(jobs, 24) - } - - pub fn with_concurrent(jobs: I, n: usize) -> MultiDownloader<'j, T, I> { - assert!(n > 0); - - MultiDownloader { - jobs, - nconcurrent: n - } - } - - pub async fn perform(self, client: &'j Client) -> impl TryStream> { - stream::iter(self.jobs).map(move |job| Ok(async move { - macro_rules! map_err { - ($result:expr, $phase:expr, $job:expr) => { - match $result { - Ok(v) => v, - Err(e) => return Err(PhaseDownloadError::new($phase, e.into(), $job)) - } - } - } - - let Some(rq) = map_err!(job.prepare(client).await, Phase::Prepare, job) else { - return Ok(()) - }; - - let rq = rq.header(reqwest::header::USER_AGENT, USER_AGENT); - - let mut data = map_err!(map_err!(rq.send().await, Phase::Send, job).error_for_status(), Phase::Send, job).bytes_stream(); - - while let Some(bytes) = data.next().await { - let bytes = map_err!(bytes, Phase::Receive, job); - - map_err!(job.handle_chunk(bytes.as_ref()).await, Phase::HandleChunk, job); - } - - job.finish().await.map_err(|e| PhaseDownloadError::new(Phase::Finish, e, job))?; - - Ok(()) - })).try_buffer_unordered(self.nconcurrent) - } -} - -pub struct VerifiedDownload { - url: String, - expect_size: Option, - expect_sha1: Option, - - path: PathBuf, - file: Option, - sha1: Sha1, - tally: usize -} - -impl Debug for VerifiedDownload { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("VerifiedDownload") - .field("url", &self.url) - .field("expect_size", &self.expect_size) - .field("expect_sha1", &self.expect_sha1) - .field("path", &self.path).finish() - } -} - -impl Display for VerifiedDownload { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "downloading {} to {}", self.url, self.path.display()) - } -} - -impl VerifiedDownload { - pub fn new(url: &str, path: &Path, expect_size: Option, expect_sha1: Option) -> VerifiedDownload { - VerifiedDownload { - url: url.to_owned(), - path: path.to_owned(), - - expect_size, - expect_sha1, - - file: None, - sha1: Sha1::new(), - tally: 0 - } - } - - pub fn with_size(mut self, expect: usize) -> VerifiedDownload { - self.expect_size = Some(expect); - self - } - - pub fn with_sha1(mut self, expect: Digest) -> VerifiedDownload { - self.expect_sha1.replace(expect); - self - } - - pub fn get_url(&self) -> &str { - &self.url - } - - pub fn get_expect_size(&self) -> Option { - self.expect_size - } - - pub fn get_expect_sha1(&self) -> Option { - self.expect_sha1 - } - - pub async fn make_dirs(&self) -> Result<(), io::Error> { - fs::create_dir_all(self.path.parent().expect("download created with no containing directory (?)")).await - } - - async fn open_output(&mut self) -> Result<(), io::Error> { - self.file.replace(File::create(&self.path).await?); - Ok(()) - } -} - -impl Download for VerifiedDownload { - async fn prepare(&mut self, client: &Client) -> Result, Box> { - if !util::should_download(&self.path, self.expect_size, self.expect_sha1).await? { - return Ok(None) - } - - // potentially racy to close the file and reopen it... :/ - self.open_output().await?; - - Ok(Some(client.request(Method::GET, &self.url))) - } - - async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box> { - self.file.as_mut().unwrap().write_all(chunk).await?; - self.tally += chunk.len(); - self.sha1.update(chunk); - - Ok(()) - } - - async fn finish(&mut self) -> Result<(), Box> { - let digest = self.sha1.digest(); - - if let Some(d) = self.expect_sha1 { - if d != digest { - debug!("Could not download {}: sha1 mismatch (exp {}, got {}).", self.path.display(), d, digest); - return Err(IntegrityError::Sha1Mismatch { expect: d, actual: digest }.into()); - } - } else if let Some(s) = self.expect_size { - if s != self.tally { - debug!("Could not download {}: size mismatch (exp {}, got {}).", self.path.display(), s, self.tally); - return Err(IntegrityError::SizeMismatch { expect: s, actual: self.tally }.into()); - } - } - - debug!("Successfully downloaded {} ({} bytes).", self.path.display(), self.tally); - - // release the file descriptor (don't want to wait until it's dropped automatically because idk when that would be) - drop(self.file.take().unwrap()); - - Ok(()) - } -} - -impl FileDownload for VerifiedDownload { - fn get_path(&self) -> &Path { - &self.path - } -} - -pub async fn verify_files(files: impl Iterator) -> Result<(), FileVerifyError> { - stream::iter(files) - .map(|dl| Ok(async move { - debug!("Verifying library {}", dl.get_path().display()); - util::verify_file(dl.get_path(), dl.get_expect_size(), dl.get_expect_sha1()).await - })) - .try_buffer_unordered(32) - .try_fold((), |_, _| async {Ok(())}) - .await -} diff --git a/src/launcher/extract.rs b/src/launcher/extract.rs deleted file mode 100644 index 8c5f2b8..0000000 --- a/src/launcher/extract.rs +++ /dev/null @@ -1,136 +0,0 @@ -use std::error::Error; -use std::fmt::{Display, Formatter}; -use std::{fs, io, os}; -use std::fs::File; -use std::io::{BufReader, Error as IOError, Read}; -use std::path::{Path, PathBuf}; -use log::{debug, trace}; -use zip::result::ZipError; -use zip::ZipArchive; -use crate::util; - -#[derive(Debug)] -pub enum ZipExtractError { - IO { what: &'static str, error: IOError }, - Zip { what: &'static str, error: ZipError }, - InvalidEntry { why: &'static str, name: String } -} - -impl From<(&'static str, IOError)> for ZipExtractError { - fn from((what, error): (&'static str, IOError)) -> Self { - ZipExtractError::IO { what, error } - } -} - -impl From<(&'static str, ZipError)> for ZipExtractError { - fn from((what, error): (&'static str, ZipError)) -> Self { - ZipExtractError::Zip { what, error } - } -} - -impl Display for ZipExtractError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - ZipExtractError::IO { what, error } => write!(f, "i/o error ({what}): {error}"), - ZipExtractError::Zip { what, error } => write!(f, "zip error ({what}): {error}"), - ZipExtractError::InvalidEntry { why, name } => write!(f, "invalid entry in zip file ({why}): {name}") - } - } -} - -impl Error for ZipExtractError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - ZipExtractError::IO { error, .. } => Some(error), - ZipExtractError::Zip { error, .. } => Some(error), - _ => None - } - } -} - -fn check_entry_path(name: &str) -> Result<&Path, ZipExtractError> { - util::check_path(name).map_err(|e| ZipExtractError::InvalidEntry { - why: e, - name: name.to_owned() - }) -} - -#[cfg(unix)] -fn extract_symlink(path: impl AsRef, target: &str) -> io::Result<()> { - os::unix::fs::symlink(target, path) -} - -#[cfg(windows)] -fn extract_symlink(path: impl AsRef, target: &str) -> io::Result<()> { - os::windows::fs::symlink_file(target, path) -} - -#[cfg(not(any(unix, windows)))] -fn extract_symlink(path: impl AsRef, _target: &str) -> io::Result<()> { - warn!("Refusing to extract symbolic link to {}. I don't know how to do it on this platform!", path.as_ref().display()); - Ok(()) -} - -pub fn extract_zip(zip_path: impl AsRef, extract_root: impl AsRef, condition: F) -> Result -where - F: Fn(&str) -> bool -{ - debug!("Extracting zip file {} into {}", zip_path.as_ref().display(), extract_root.as_ref().display()); - - fs::create_dir_all(&extract_root).map_err(|e| ZipExtractError::from(("create extract root", e)))?; - - let mut extracted = 0usize; - - let file = File::open(&zip_path).map_err(|e| ZipExtractError::from(("extract zip file (open)", e)))?; - let read = BufReader::new(file); - - let mut archive = ZipArchive::new(read).map_err(|e| ZipExtractError::from(("read zip archive", e)))?; - - // create directories - for n in 0..archive.len() { - let entry = archive.by_index(n).map_err(|e| ZipExtractError::from(("read zip entry (1)", e)))?; - if !entry.is_dir() { continue; } - - let name = entry.name(); - if !condition(name) { - continue; - } - - let entry_path = check_entry_path(name)?; - let entry_path: PathBuf = [extract_root.as_ref(), entry_path].iter().collect(); - - trace!("Extracting directory {} from {}", entry.name(), zip_path.as_ref().display()); - fs::create_dir_all(entry_path).map_err(|e| ZipExtractError::from(("extract directory", e)))?; - } - - // extract the files - for n in 0..archive.len() { - let mut entry = archive.by_index(n).map_err(|e| ZipExtractError::from(("read zip entry (2)", e)))?; - let name = entry.name(); - - if entry.is_dir() { continue; } - - if !condition(name) { - continue; - } - - let entry_path = check_entry_path(name)?; - let entry_path: PathBuf = [extract_root.as_ref(), entry_path].iter().collect(); - - if entry.is_symlink() { - let mut target = String::new(); - entry.read_to_string(&mut target).map_err(|e| ZipExtractError::from(("read to symlink target", e)))?; - - trace!("Extracting symbolic link {} -> {} from {}", entry.name(), target, zip_path.as_ref().display()); - extract_symlink(entry_path.as_path(), target.as_str()).map_err(|e| ZipExtractError::from(("extract symlink", e)))?; - } else if entry.is_file() { - let mut outfile = File::create(&entry_path).map_err(|e| ZipExtractError::from(("extract zip entry (open)", e)))?; - - trace!("Extracting file {} from {}", entry.name(), zip_path.as_ref().display()); - io::copy(&mut entry, &mut outfile).map_err(|e| ZipExtractError::from(("extract zip entry (write)", e)))?; - extracted += 1; - } - } - - Ok(extracted) -} diff --git a/src/launcher/jre.rs b/src/launcher/jre.rs deleted file mode 100644 index 31034b5..0000000 --- a/src/launcher/jre.rs +++ /dev/null @@ -1,330 +0,0 @@ -use std::error::Error; -use std::fmt::{Debug, Display, Formatter}; -use std::path::{Component, Path, PathBuf}; -use std::sync::Arc; -use futures::{stream, StreamExt, TryStreamExt}; -use log::{debug, info, warn}; -use reqwest::Client; -use tokio::{fs, io, io::ErrorKind}; - -mod arch; -mod manifest; -mod download; - -use arch::JRE_ARCH; -use manifest::JavaRuntimesManifest; -use manifest::JavaRuntimeManifest; -use crate::launcher::download::MultiDownloader; -use crate::launcher::jre::download::{LzmaDownloadError, LzmaDownloadJob}; -use crate::launcher::jre::manifest::JavaRuntimeFile; -use crate::util; -use crate::util::{EnsureFileError, IntegrityError}; -use crate::version::DownloadInfo; -use super::constants; - -pub struct JavaRuntimeRepository { - online: bool, - home: PathBuf, - manifest: JavaRuntimesManifest -} - -impl JavaRuntimeRepository { - pub async fn new(home: impl AsRef, online: bool) -> Result { - info!("Java runtime architecture is \"{}\".", JRE_ARCH); - - fs::create_dir_all(&home).await.map_err(|e| JavaRuntimeError::IO { what: "creating home directory", error: e })?; - - let manifest_path = home.as_ref().join("manifest.json"); - match util::ensure_file(manifest_path.as_path(), Some(constants::URL_JRE_MANIFEST), None, None, online, true).await { - Ok(_) => (), - Err(EnsureFileError::Offline) => { - info!("Launcher is offline, cannot download runtime manifest."); - }, - Err(e) => return Err(JavaRuntimeError::EnsureFile(e)) - }; - - let manifest_file = fs::read_to_string(&manifest_path).await - .map_err(|e| JavaRuntimeError::IO { what: "reading runtimes manifest", error: e })?; - - Ok(JavaRuntimeRepository { - online, - home: home.as_ref().to_path_buf(), - manifest: serde_json::from_str(&manifest_file).map_err(|e| JavaRuntimeError::Deserialize { what: "runtimes manifest", error: e })?, - }) - } - - fn get_component_dir(&self, component: &str) -> PathBuf { - [self.home.as_path(), Path::new(JRE_ARCH), Path::new(component)].into_iter().collect() - } - - async fn load_runtime_manifest(&self, component: &str, info: &DownloadInfo) -> Result { - let comp_dir = self.get_component_dir(component); - let manifest_path = comp_dir.join("manifest.json"); - - debug!("Ensuring manifest for runtime {JRE_ARCH}.{component}"); - - fs::create_dir_all(comp_dir.as_path()).await - .inspect_err(|e| warn!("Failed to create directory for JRE component {}: {}", component, e)) - .map_err(|e| JavaRuntimeError::IO { what: "creating component directory", error: e })?; - - util::ensure_file(&manifest_path, info.url.as_deref(), info.size, info.sha1, self.online, false).await - .map_err(JavaRuntimeError::EnsureFile)?; - - let manifest_file = fs::read_to_string(&manifest_path).await - .map_err(|e| JavaRuntimeError::IO { what: "reading runtimes manifest", error: e })?; - - serde_json::from_str(&manifest_file).map_err(|e| JavaRuntimeError::Deserialize { what: "runtime manifest", error: e }) - } - - // not very descriptive function name - pub async fn choose_runtime(&self, component: &str) -> Result { - let Some(runtime_components) = self.manifest.get(JRE_ARCH) else { - return Err(JavaRuntimeError::UnsupportedArch(JRE_ARCH)); - }; - - let Some(runtime_component) = runtime_components.get(component) else { - return Err(JavaRuntimeError::UnsupportedComponent { arch: JRE_ARCH, component: component.to_owned() }); - }; - - let Some(runtime) = runtime_component.iter().find(|r| r.availability.progress == 100) else { - if !runtime_components.is_empty() { - warn!("Weird: the only java runtimes in {JRE_ARCH}.{component} has a progress of less than 100!"); - } - - return Err(JavaRuntimeError::UnsupportedComponent { arch: JRE_ARCH, component: component.to_owned() }); - }; - - self.load_runtime_manifest(component, &runtime.manifest).await - } - - fn clean_up_runtime_sync(path: &Path, manifest: Arc) -> Result<(), io::Error> { - for entry in walkdir::WalkDir::new(path).contents_first(true) { - let entry = entry?; - let rel_path = entry.path().strip_prefix(path).expect("walkdir escaped root (???)"); - - if !rel_path.components().any(|c| !matches!(&c, Component::CurDir)) { - // if this path is trivial (points at the root), ignore it - continue; - } - - let rel_path_str = if std::path::MAIN_SEPARATOR != '/' { - rel_path.to_str().map(|s| s.replace(std::path::MAIN_SEPARATOR, "/")) - } else { - rel_path.to_str().map(String::from) - }; - - if !rel_path_str.as_ref().is_some_and(|s| manifest.files.get(s) - .is_some_and(|f| (f.is_file() == entry.file_type().is_file()) - || (f.is_directory() == entry.file_type().is_dir()) - || (f.is_link() == entry.file_type().is_symlink()))) { - // path is invalid utf-8, extraneous, or of the wrong type - debug!("File {} is extraneous or of wrong type ({:?}). Deleting it.", entry.path().display(), entry.file_type()); - - if entry.file_type().is_dir() { - std::fs::remove_dir(entry.path())?; - } else { - std::fs::remove_file(entry.path())?; - } - } - } - - Ok(()) - } - - async fn clean_up_runtime(path: &Path, manifest: Arc) -> Result<(), io::Error> { - let (tx, rx) = tokio::sync::oneshot::channel(); - - let path = path.to_owned(); - let manifest = manifest.clone(); - - tokio::task::spawn_blocking(move || { - let res = Self::clean_up_runtime_sync(&path, manifest); - let _ = tx.send(res); - }).await.expect("clean_up_runtime_sync panicked"); - - rx.await.expect("clean_up_runtime_sync hung up") - } - - async fn ensure_jre_dirs(&self, path: &Path, manifest: &JavaRuntimeManifest) -> Result<(), JavaRuntimeError> { - stream::iter(manifest.files.iter().filter(|(_, f)| f.is_directory())) - .map::, _>(|(name, _)| Ok(name)) - .try_for_each(|name| async move { - let ent_path = util::check_path(name).map_err(JavaRuntimeError::MalformedManifest)?; - let ent_path = [path, ent_path].into_iter().collect::(); - - match fs::metadata(&ent_path).await { - Ok(meta) => { - if !meta.is_dir() { - debug!("Deleting misplaced file at {}", ent_path.display()); - fs::remove_file(&ent_path).await.map_err(|e| JavaRuntimeError::IO { - what: "deleting misplaced file", - error: e - })?; - } - }, - Err(e) if e.kind() == ErrorKind::NotFound => (), - Err(e) => return Err(JavaRuntimeError::IO { what: "'stat'ing directory", error: e }) - } - - match fs::create_dir(&ent_path).await { - Ok(_) => { - debug!("Created directory at {}", ent_path.display()); - Ok(()) - }, - Err(e) if e.kind() == ErrorKind::AlreadyExists => Ok(()), - Err(e) => { - warn!("Could not create directory {} for JRE!", ent_path.display()); - Err(JavaRuntimeError::IO { what: "creating directory", error: e }) - } - } - }).await - } - - async fn ensure_jre_files(path: &Path, manifest: &JavaRuntimeManifest) -> Result<(), JavaRuntimeError> { - let mut downloads = Vec::new(); - for (name, file) in manifest.files.iter().filter(|(_, f)| f.is_file()) { - let file_path = util::check_path(name).map_err(JavaRuntimeError::MalformedManifest)?; - let file_path = [path, file_path].into_iter().collect::(); - - downloads.push(LzmaDownloadJob::try_from((file, file_path)).map_err(|e| { - match e { - LzmaDownloadError::MissingURL => JavaRuntimeError::MalformedManifest("runtime manifest missing URL"), - LzmaDownloadError::NotAFile => unreachable!("we just made sure this was a file") - } - })?); - } - - let dl = MultiDownloader::new(downloads.iter_mut()); - let client = Client::new(); - - dl.perform(&client).await - .inspect_err(|e| warn!("jre file download failed: {e}")) - .try_fold((), |_, _| async { Ok(()) }) - .await - .map_err(|_| JavaRuntimeError::MultiDownloadError) - } - - async fn ensure_links(root_path: &Path, manifest: &JavaRuntimeManifest) -> Result<(), JavaRuntimeError> { - stream::iter(manifest.files.iter().filter(|(_, f)| f.is_link())) - .map::, _>(|(name, file)| Ok(async move { - let JavaRuntimeFile::Link { target } = file else { - unreachable!(); - }; - - let target_exp = PathBuf::from(target); - - let path = util::check_path(name.as_str()).map_err(JavaRuntimeError::MalformedManifest)?; - let link_path = [root_path, path].into_iter().collect::(); - - match fs::read_link(&link_path).await { - Ok(target_path) => { - if target_path == target_exp { - debug!("Symbolic link at {} matches! Nothing to be done.", link_path.display()); - return Ok(()) - } - - debug!("Symbolic link at {} does not match (exp {}, got {}). Recreating it.", link_path.display(), target_exp.display(), target_path.display()); - fs::remove_file(&link_path).await.map_err(|e| JavaRuntimeError::IO { - what: "deleting bad symlink", - error: e - })?; - } - Err(e) if e.kind() == ErrorKind::NotFound => (), - Err(e) => return Err(JavaRuntimeError::IO { what: "reading jre symlink", error: e }) - } - - debug!("Creating symbolic link at {} to {}", link_path.display(), target_exp.display()); - - let symlink; - #[cfg(unix)] - { - symlink = |targ, path| async { fs::symlink(targ, path).await }; - } - - #[cfg(windows)] - { - symlink = |targ, path| async { fs::symlink_file(targ, path).await }; - } - - #[cfg(not(any(unix, windows)))] - { - symlink = |_, _| async { Ok(()) }; - } - - symlink(target_exp, link_path).await.map_err(|e| JavaRuntimeError::IO { - what: "creating symlink", - error: e - })?; - - Ok(()) - })) - .try_buffer_unordered(32) - .try_fold((), |_, _| async { Ok(()) }).await - } - - pub async fn ensure_jre(&self, component: &str, manifest: JavaRuntimeManifest) -> Result { - let runtime_path = self.get_component_dir(component); - let runtime_path = runtime_path.join("runtime"); - let manifest = Arc::new(manifest); - - fs::create_dir_all(&runtime_path).await - .map_err(|e| JavaRuntimeError::IO { what: "creating runtime directory", error: e })?; - - debug!("Cleaning up JRE directory for {component}"); - Self::clean_up_runtime(runtime_path.as_path(), manifest.clone()).await - .map_err(|e| JavaRuntimeError::IO { what: "cleaning up runtime directory", error: e })?; - - debug!("Building directory structure for {component}"); - self.ensure_jre_dirs(&runtime_path, manifest.as_ref()).await?; - - debug!("Downloading JRE files for {component}"); - Self::ensure_jre_files(&runtime_path, manifest.as_ref()).await?; - - debug!("Ensuring symbolic links for {component}"); - Self::ensure_links(&runtime_path, manifest.as_ref()).await?; - - Ok(runtime_path) - } -} - -#[derive(Debug)] -pub enum JavaRuntimeError { - EnsureFile(EnsureFileError), - IO { what: &'static str, error: io::Error }, - Download { what: &'static str, error: reqwest::Error }, - Deserialize { what: &'static str, error: serde_json::Error }, - UnsupportedArch(&'static str), - UnsupportedComponent { arch: &'static str, component: String }, - MalformedManifest(&'static str), - Integrity(IntegrityError), - MultiDownloadError -} - -impl Display for JavaRuntimeError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - JavaRuntimeError::EnsureFile(e) => std::fmt::Display::fmt(e, f), - JavaRuntimeError::IO { what, error } => write!(f, "i/o error ({}): {}", what, error), - JavaRuntimeError::Download { what, error } => write!(f, "error downloading {}: {}", what, error), - JavaRuntimeError::Deserialize { what, error } => write!(f, "error deserializing ({what}): {error}"), - JavaRuntimeError::UnsupportedArch(arch) => write!(f, r#"unsupported architecture "{arch}""#), - JavaRuntimeError::UnsupportedComponent { arch, component } => write!(f, r#"unsupported component "{component}" for architecture "{arch}""#), - JavaRuntimeError::MalformedManifest(what) => write!(f, "malformed runtime manifest: {what} (launcher bug?)"), - JavaRuntimeError::Integrity(e) => std::fmt::Display::fmt(e, f), - JavaRuntimeError::MultiDownloadError => f.write_str("error in multi downloader (see logs for more details)") - } - } -} - -impl Error for JavaRuntimeError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - JavaRuntimeError::EnsureFile(error) => Some(error), - JavaRuntimeError::IO { error, .. } => Some(error), - JavaRuntimeError::Download { error, .. } => Some(error), - JavaRuntimeError::Deserialize { error, .. } => Some(error), - JavaRuntimeError::Integrity(error) => Some(error), - _ => None - } - } -} diff --git a/src/launcher/jre/arch.rs b/src/launcher/jre/arch.rs deleted file mode 100644 index e984171..0000000 --- a/src/launcher/jre/arch.rs +++ /dev/null @@ -1,45 +0,0 @@ -use cfg_if::cfg_if; - -macro_rules! define_arch { - ($arch:expr) => { - pub const JRE_ARCH: &str = $arch; - } -} - -cfg_if! { - if #[cfg(target_os = "windows")] { - cfg_if! { - if #[cfg(target_arch = "x86_64")] { - define_arch!("windows-x64"); - } else if #[cfg(target_arch = "x86")] { - define_arch!("windows-x86"); - } else if #[cfg(target_arch = "aarch64")] { - define_arch!("windows-arm64"); - } else { - define_arch!("gamecore"); - } - } - } else if #[cfg(target_os = "linux")] { - cfg_if! { - if #[cfg(target_arch = "x86_64")] { - define_arch!("linux"); - } else if #[cfg(target_arch = "x86")] { - define_arch!("linux-i386"); - } else { - define_arch!("gamecore"); - } - } - } else if #[cfg(target_os = "macos")] { - cfg_if! { - if #[cfg(target_arch = "aarch64")] { - define_arch!("mac-os-arm64"); - } else if #[cfg(target_arch = "x86_64")] { - define_arch!("mac-os"); - } else { - define_arch!("gamecore"); - } - } - } else { - define_arch!("gamecore"); - } -} diff --git a/src/launcher/jre/download.rs b/src/launcher/jre/download.rs deleted file mode 100644 index ddf1ff6..0000000 --- a/src/launcher/jre/download.rs +++ /dev/null @@ -1,195 +0,0 @@ -use std::error::Error; -use std::fmt::{Debug, Display, Formatter}; -use std::io::Write; -use std::path::{PathBuf}; -use log::debug; -use lzma_rs::decompress; -use reqwest::{Client, RequestBuilder}; -use sha1_smol::{Digest, Sha1}; -use tokio::io::AsyncWriteExt; -use tokio::fs::File; -use crate::launcher::download::Download; -use crate::launcher::jre::manifest::JavaRuntimeFile; -use crate::util; -use crate::util::IntegrityError; -use crate::version::DownloadInfo; - -pub enum LzmaDownloadError { - NotAFile, - MissingURL -} - -pub struct LzmaDownloadJob { - url: String, - path: PathBuf, - inflate: bool, - executable: bool, - - raw_size: Option, - raw_sha1: Option, - - raw_sha1_st: Sha1, - raw_tally: usize, - - stream: Option>>, - out_file: Option -} - -impl LzmaDownloadJob { - fn new_inflate(raw: &DownloadInfo, lzma: &DownloadInfo, exe: bool, path: PathBuf) -> Result { - Ok(LzmaDownloadJob { - url: lzma.url.as_ref().map_or_else(|| Err(LzmaDownloadError::MissingURL), |u| Ok(u.to_owned()))?, - path, - inflate: true, - executable: exe, - - raw_size: raw.size, - raw_sha1: raw.sha1, - - raw_sha1_st: Sha1::new(), - raw_tally: 0, - - stream: Some(decompress::Stream::new(Vec::new())), - out_file: None - }) - } - - fn new_raw(raw: &DownloadInfo, exe: bool, path: PathBuf) -> Result { - Ok(LzmaDownloadJob { - url: raw.url.as_ref().map_or_else(|| Err(LzmaDownloadError::MissingURL), |u| Ok(u.to_owned()))?, - path, - inflate: false, - executable: exe, - - raw_size: raw.size, - raw_sha1: raw.sha1, - - raw_sha1_st: Sha1::new(), - raw_tally: 0, - - stream: None, - out_file: None - }) - } -} - -impl TryFrom<(&JavaRuntimeFile, PathBuf)> for LzmaDownloadJob { - type Error = LzmaDownloadError; - - fn try_from((file, path): (&JavaRuntimeFile, PathBuf)) -> Result { - if !file.is_file() { - return Err(LzmaDownloadError::NotAFile); - } - - let JavaRuntimeFile::File { executable, downloads } = file else { - unreachable!("we just made sure this was a file"); - }; - - match downloads.lzma.as_ref() { - Some(lzma) => LzmaDownloadJob::new_inflate(&downloads.raw, lzma, *executable, path), - None => LzmaDownloadJob::new_raw(&downloads.raw, *executable, path) - } - } -} - -impl Debug for LzmaDownloadJob { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("LzmaDownloadJob") - .field("url", &self.url) - .field("path", &self.path) - .field("inflate", &self.inflate) - .finish() - } -} - -impl Display for LzmaDownloadJob { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - if self.inflate { - write!(f, "download and inflate {} to {}", &self.url, self.path.display()) - } else { - write!(f, "download {} to {}", &self.url, self.path.display()) - } - } -} - -impl Download for LzmaDownloadJob { - async fn prepare(&mut self, client: &Client) -> Result, Box> { - if !util::should_download(&self.path, self.raw_size, self.raw_sha1).await? { - return Ok(None) - } - - let mut options = File::options(); - - #[cfg(unix)] - { - options.mode(match self.executable { - true => 0o775, - _ => 0o664 - }); - } - - let file = options.create(true).write(true).truncate(true).open(&self.path).await?; - self.out_file = Some(file); - - Ok(Some(client.get(&self.url))) - } - - async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box> { - let out_file = self.out_file.as_mut().expect("output file gone"); - - if let Some(ref mut stream) = self.stream { - stream.write_all(chunk)?; - let buf = stream.get_output_mut().expect("stream output missing before finish()"); - - out_file.write_all(buf.as_slice()).await?; - - self.raw_sha1_st.update(buf.as_slice()); - self.raw_tally += buf.len(); - - buf.truncate(0); - } else { - out_file.write_all(chunk).await?; - - self.raw_sha1_st.update(chunk); - self.raw_tally += chunk.len(); - } - - Ok(()) - } - - async fn finish(&mut self) -> Result<(), Box> { - let mut out_file = self.out_file.take().expect("output file gone"); - - if let Some(stream) = self.stream.take() { - let buf = stream.finish()?; - - out_file.write_all(buf.as_slice()).await?; - - self.raw_sha1_st.update(buf.as_slice()); - self.raw_tally += buf.len(); - } - - let inf_digest = self.raw_sha1_st.digest(); - if let Some(sha1) = self.raw_sha1 { - if inf_digest != sha1 { - debug!("Could not download {}: sha1 mismatch (exp {}, got {}).", self.path.display(), sha1, inf_digest); - return Err(IntegrityError::Sha1Mismatch { - expect: sha1, - actual: inf_digest - }.into()); - } - } - - if let Some(size) = self.raw_size { - if self.raw_tally != size { - debug!("Could not download {}: size mismatch (exp {}, got {}).", self.path.display(), size, self.raw_tally); - return Err(IntegrityError::SizeMismatch { - expect: size, - actual: self.raw_tally - }.into()); - } - } - - Ok(()) - } -} diff --git a/src/launcher/jre/manifest.rs b/src/launcher/jre/manifest.rs deleted file mode 100644 index 3fd6484..0000000 --- a/src/launcher/jre/manifest.rs +++ /dev/null @@ -1,65 +0,0 @@ -use std::collections::HashMap; -use indexmap::IndexMap; -use serde::Deserialize; -use crate::version::DownloadInfo; - -#[derive(Debug, Deserialize)] -pub struct Availability { - pub group: u32, // unknown meaning - pub progress: u32 // unknown meaning -} - -#[derive(Debug, Deserialize)] -pub struct Version { - pub name: String, - pub version: String -} - -#[derive(Debug, Deserialize)] -pub struct JavaRuntimeInfo { - // I don't see how half of this information is useful with how the JRE system currently functions -figboot - pub availability: Availability, - pub manifest: DownloadInfo, - //pub version: Version -} - -pub type JavaRuntimesManifest = HashMap>>; - -#[derive(Debug, Deserialize)] -pub struct FileDownloads { - pub lzma: Option, - pub raw: DownloadInfo -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "lowercase", tag = "type")] -pub enum JavaRuntimeFile { - File { - #[serde(default)] - executable: bool, - downloads: Box - }, - Directory, - Link { - target: String - } -} - -impl JavaRuntimeFile { - pub fn is_file(&self) -> bool { - matches!(*self, JavaRuntimeFile::File { .. }) - } - - pub fn is_directory(&self) -> bool { - matches!(*self, JavaRuntimeFile::Directory) - } - - pub fn is_link(&self) -> bool { - matches!(*self, JavaRuntimeFile::Link { .. }) - } -} - -#[derive(Debug, Deserialize)] -pub struct JavaRuntimeManifest { - pub files: IndexMap -} diff --git a/src/launcher/rules.rs b/src/launcher/rules.rs deleted file mode 100644 index 29a36d1..0000000 --- a/src/launcher/rules.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::error::Error; -use std::fmt::Display; -use crate::version::{Argument, CompatibilityRule, CompleteVersion, FeatureMatcher, Library, OSRestriction, RuleAction}; -use super::SystemInfo; - -#[derive(Debug)] -pub struct IncompatibleError { - what: &'static str, - reason: Option -} - -impl Display for IncompatibleError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if let Some(reason) = self.reason.as_ref() { - write!(f, "{} incompatible: {}", self.what, reason) - } else { - write!(f, "{} incompatible", self.what) - } - } -} - -impl Error for IncompatibleError {} - -mod seal { - pub trait CompatCheckInner { - const WHAT: &'static str; - - fn get_rules(&self) -> Option>; - fn get_incompatibility_reason(&self) -> Option<&str>; - } -} - -pub trait CompatCheck: seal::CompatCheckInner { - fn rules_apply(&self, system: &SystemInfo, feature_matcher: &impl FeatureMatcher) -> Result<(), IncompatibleError> { - let Some(rules) = self.get_rules() else { return Ok(()) }; - let mut action = RuleAction::Disallow; - - fn match_os(os: &OSRestriction, system: &SystemInfo) -> bool { - os.os.is_none_or(|o| system.is_our_os(o)) - && os.version.as_ref().is_none_or(|v| v.is_match(system.os_version.as_str())) - && os.arch.as_ref().is_none_or(|a| a.is_match(system.arch.as_str())) - } - - for rule in rules { - if rule.os.as_ref().is_none_or(|o| match_os(o, system)) - && rule.features_match(feature_matcher) { - action = rule.action; - } - } - - if action == RuleAction::Disallow { - Err(IncompatibleError { - what: Self::WHAT, - reason: self.get_incompatibility_reason().map(|s| s.to_owned()) - }) - } else { - Ok(()) - } - } -} - -// trivial -impl seal::CompatCheckInner for CompatibilityRule { - const WHAT: &'static str = "rule"; - - fn get_rules(&self) -> Option> { - Some(Some(self)) - } - - fn get_incompatibility_reason(&self) -> Option<&str> { - None - } -} - -impl seal::CompatCheckInner for CompleteVersion { - const WHAT: &'static str = "version"; - - fn get_rules(&self) -> Option> { - self.compatibility_rules.as_ref() - } - - fn get_incompatibility_reason(&self) -> Option<&str> { - self.incompatibility_reason.as_deref() - } -} - -impl seal::CompatCheckInner for Library { - const WHAT: &'static str = "library"; - - fn get_rules(&self) -> Option> { - self.rules.as_ref() - } - - fn get_incompatibility_reason(&self) -> Option<&str> { - None - } -} - -impl seal::CompatCheckInner for Argument { - const WHAT: &'static str = "argument"; - - fn get_rules(&self) -> Option> { - self.rules.as_ref() - } - - fn get_incompatibility_reason(&self) -> Option<&str> { - None - } -} - -impl CompatCheck for CompatibilityRule {} -impl CompatCheck for CompleteVersion {} -impl CompatCheck for Library {} -impl CompatCheck for Argument {} \ No newline at end of file diff --git a/src/launcher/runner.rs b/src/launcher/runner.rs deleted file mode 100644 index afdfc7f..0000000 --- a/src/launcher/runner.rs +++ /dev/null @@ -1,222 +0,0 @@ -use std::borrow::Cow; -use std::ffi::{OsStr, OsString}; -use std::iter; -use std::path::{Path, PathBuf}; -use std::process::Command; -use log::{debug, warn}; -use tokio::{fs, io}; -use crate::util::AsJavaPath; -use crate::version::{CompleteVersion, FeatureMatcher, OperatingSystem}; -use super::rules::CompatCheck; -use super::strsub::{self, SubFunc}; -use super::{Launch, LaunchInfo}; - -#[derive(Clone, Copy)] -struct LaunchArgSub<'a, 'l, F: FeatureMatcher>(&'a LaunchInfo<'l, F>); - -// FIXME: this is not correct -#[cfg(windows)] -const PATH_SEP: &str = ";"; - -#[cfg(not(windows))] -const PATH_SEP: &str = ":"; - -impl<'rep, F: FeatureMatcher> SubFunc<'rep> for LaunchArgSub<'rep, '_, F> { - fn substitute(&self, key: &str) -> Option> { - match key { - "assets_index_name" => self.0.asset_index_name.as_ref().map(|s| Cow::Borrowed(s.as_str())), - "assets_root" => Some(self.0.launcher.assets.get_home().as_java_path().to_string_lossy()), - "auth_access_token" => Some(Cow::Borrowed("-")), // TODO - "auth_player_name" => Some(Cow::Borrowed("Player")), // TODO - "auth_session" => Some(Cow::Borrowed("-")), // TODO - "auth_uuid" => Some(Cow::Borrowed("00000000-0000-0000-0000-000000000000")), // TODO - "auth_xuid" => Some(Cow::Borrowed("00000000-0000-0000-0000-000000000000")), // TODO - "classpath" => Some(Cow::Borrowed(self.0.classpath.as_str())), - "classpath_separator" => Some(Cow::Borrowed(PATH_SEP)), - "game_assets" => self.0.virtual_assets_path.as_ref() - .map(|s| s.as_path().as_java_path().to_string_lossy()), - "game_directory" => Some(self.0.instance_home.as_java_path().to_string_lossy()), - "language" => Some(Cow::Borrowed("en-us")), // ??? - "launcher_name" => Some(Cow::Borrowed("ozone (olauncher 3)")), // TODO - "launcher_version" => Some(Cow::Borrowed("yeah")), // TODO - "library_directory" => Some(self.0.launcher.libraries.home.as_java_path().to_string_lossy()), - "natives_directory" => Some(self.0.natives_path.as_java_path().to_string_lossy()), - "primary_jar" => self.0.client_jar.as_ref().map(|p| p.as_path().as_java_path().to_string_lossy()), - "quickPlayMultiplayer" => None, // TODO - "quickPlayPath" => None, // TODO - "quickPlayRealms" => None, // TODO - "quickPlaySingleplayer" => None, // TODO - "resolution_height" => None, // TODO - "resolution_width" => None, // TODO - "user_properties" => Some(Cow::Borrowed("{}")), // TODO - "user_property_map" => Some(Cow::Borrowed("[]")), // TODO - "user_type" => Some(Cow::Borrowed("legacy")), // TODO - "version_name" => Some(Cow::Borrowed(self.0.version_id.as_ref())), - "version_type" => self.0.version_type.as_ref().map(|s| Cow::Borrowed(s.to_str())), - _ => { - if let Some(asset_key) = key.strip_prefix("asset=") { - return self.0.asset_index.as_ref().and_then(|idx| idx.objects.get(asset_key)) - .map(|obj| Cow::Owned(self.0.launcher.assets.get_object_path(obj).as_java_path().to_string_lossy().into_owned())) - } - - None - } - } - } -} - -#[derive(Clone, Copy)] -pub enum ArgumentType { - Jvm, - Game -} - -pub fn build_arguments(launch: &LaunchInfo<'_, F>, version: &CompleteVersion, arg_type: ArgumentType) -> Vec { - let sub = LaunchArgSub(launch); - let system_info = &launch.launcher.system_info; - - if let Some(arguments) = version.arguments.as_ref().and_then(|args| match arg_type { - ArgumentType::Jvm => args.jvm.as_ref(), - ArgumentType::Game => args.game.as_ref() - }) { - arguments.iter() - .filter(|wa| wa.rules_apply(system_info, launch.feature_matcher).is_ok()) - .flat_map(|wa| &wa.value) - .map(|s| OsString::from(strsub::replace_string(s, &sub).into_owned())).collect() - } else if let Some(arguments) = version.minecraft_arguments.as_ref() { - match arg_type { - ArgumentType::Jvm => { - [ - "-Djava.library.path=${natives_directory}", - "-Dminecraft.launcher.brand=${launcher_name}", - "-Dminecraft.launcher.version=${launcher_version}", - "-Dminecraft.client.jar=${primary_jar}", - "-cp", - "${classpath}" - ].into_iter() - .chain(iter::once("-XX:HeapDumpPath=MojangTricksIntelDriversForPerformance_javaw.exe_minecraft.exe.heapdump") - .take_while(|_| system_info.os == OperatingSystem::Windows)) - .chain(iter::once(["-Dos.name=Windows 10", "-Dos.version=10.0"]) - .take_while(|_| launch.feature_matcher.matches("__ozone_win10_hack")) - .flatten()) - .chain(iter::once(["-Xdock:icon=${asset=icons/minecraft.icns}", "-Xdock:name=Minecraft"]) - .take_while(|_| system_info.os == OperatingSystem::MacOS) - .flatten()) - .map(|s| OsString::from(strsub::replace_string(s, &sub).into_owned())) - .collect() - }, - ArgumentType::Game => { - arguments.split(' ') - .chain(iter::once("--demo") - .take_while(|_| launch.feature_matcher.matches("is_demo_user"))) - .chain(iter::once(["--width", "${resolution_width}", "--height", "${resolution_height}"]) - .take_while(|_| launch.feature_matcher.matches("has_custom_resolution")) - .flatten()) - .map(|s| OsString::from(strsub::replace_string(s, &sub).into_owned())) - .collect() - } - } - } else { - Vec::default() - } -} - -pub fn run_the_game(launch: &Launch) -> Result<(), Box> { - if launch.runtime_legacy_launch { - Command::new(launch.runtime_path.as_path().as_java_path()) - .args(launch.jvm_args.iter() - .map(|o| o.as_os_str()) - .chain(iter::once(OsStr::new(launch.main_class.as_str()))) - .chain(launch.game_args.iter().map(|o| o.as_os_str()))) - .current_dir(launch.instance_path.as_path().as_java_path()).spawn()?.wait()?; - } else { - todo!("jni launch not supported :(") - } - - Ok(()) -} - -#[allow(dead_code)] -mod windows { - pub const JNI_SEARCH_PATH: Option<&str> = Some("server/jvm.dll"); - pub const JAVA_SEARCH_PATH: Option<&str> = Some("bin/java.exe"); - pub const JRE_PLATFORM_KNOWN: bool = true; -} - -#[allow(dead_code)] -mod linux { - pub const JNI_SEARCH_PATH: Option<&str> = Some("server/libjvm.so"); - pub const JAVA_SEARCH_PATH: Option<&str> = Some("bin/java"); - pub const JRE_PLATFORM_KNOWN: bool = true; -} - -#[allow(dead_code)] -mod macos { - pub const JNI_SEARCH_PATH: Option<&str> = Some("server/libjvm.dylib"); - pub const JAVA_SEARCH_PATH: Option<&str> = Some("bin/java"); - pub const JRE_PLATFORM_KNOWN: bool = true; -} - -#[allow(dead_code)] -mod unknown { - pub const JNI_SEARCH_PATH: Option<&str> = None; - pub const JAVA_SEARCH_PATH: Option<&str> = None; - pub const JRE_PLATFORM_KNOWN: bool = false; -} - -#[cfg(target_os = "windows")] -use self::windows::*; -#[cfg(target_os = "linux")] -use self::linux::*; -#[cfg(target_os = "macos")] -use self::macos::*; -#[cfg(not(any(target_os = "windows", target_os = "linux", target_os = "macos")))] -use self::unknown::*; - -fn search_java_sync(base: impl AsRef, legacy: bool) -> Result, io::Error> { - assert!(JRE_PLATFORM_KNOWN); - let search_path = Path::new(match legacy { - true => JAVA_SEARCH_PATH, - _ => JNI_SEARCH_PATH - }.unwrap()); - - let walker = walkdir::WalkDir::new(base.as_ref()).into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.file_type().is_dir()); - - for entry in walker { - let check_path = [base.as_ref(), entry.path(), Path::new(search_path)].into_iter().collect::(); - match std::fs::metadata(check_path.as_path()) { - Err(e) if e.kind() == io::ErrorKind::NotFound => (), - Err(e) => return Err(e), - Ok(meta) if meta.is_file() => return Ok(Some(check_path)), - _ => () - } - } - - Ok(None) // not found (sadface) -} - -//noinspection RsConstantConditionIf -pub async fn find_java(base: impl AsRef, legacy: bool) -> Result, io::Error> { - let meta = fs::metadata(&base).await?; - if meta.is_dir() { // do search - if !JRE_PLATFORM_KNOWN { - warn!("Unknown platform! Cannot search for java executable in {}. Please specify the executable file manually.", base.as_ref().display()); - return Ok(None); - } - - let (tx, rx) = tokio::sync::oneshot::channel(); - let base = base.as_ref().to_path_buf(); // idc - - tokio::task::spawn_blocking(move || { - let res = search_java_sync(base, legacy); - let _ = tx.send(res); // I really don't care if the reader hung up - }).await.expect("jre search panicked"); - - rx.await.expect("jre search didn't send us a result") - } else { // we are pointed directly at a file. assume it's what we want - debug!("JRE path {} is a file ({}). Assuming it's what we want.", base.as_ref().display(), legacy); - Ok(Some(base.as_ref().to_path_buf())) - } -} diff --git a/src/launcher/settings.rs b/src/launcher/settings.rs deleted file mode 100644 index 8453653..0000000 --- a/src/launcher/settings.rs +++ /dev/null @@ -1,232 +0,0 @@ -use std::collections::HashMap; -use std::error::Error; -use std::fmt::{Display, Formatter}; -use std::io::ErrorKind; -use std::path::{Path, PathBuf}; -use log::warn; -use serde::{Deserialize, Serialize}; -use tokio::{fs, io}; -use tokio::fs::File; -use tokio::io::AsyncWriteExt; -use super::constants; - -#[derive(Debug, Clone, Serialize, Deserialize)] -struct SettingsInner { - profiles: HashMap, - instances: HashMap -} - -pub struct Settings { - path: Option, - inner: SettingsInner -} - -#[derive(Debug)] -pub enum SettingsError { - IO { what: &'static str, error: io::Error }, - Format(serde_json::Error), - Inconsistent(String) -} - -impl Display for SettingsError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - SettingsError::IO { what, error } => write!(f, "settings i/o error ({}): {}", what, error), - SettingsError::Format(err) => write!(f, "settings format error: {}", err), - SettingsError::Inconsistent(err) => write!(f, "inconsistent settings: {}", err), - } - } -} - -impl Error for SettingsError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - SettingsError::IO { error: err, .. } => Some(err), - SettingsError::Format(err) => Some(err), - _ => None - } - } -} - -impl Default for SettingsInner { - fn default() -> Self { - SettingsInner { - instances: [(String::from(constants::DEF_INSTANCE_NAME), PathBuf::from(constants::DEF_INSTANCE_NAME).into())].into_iter().collect(), - profiles: [(String::from(constants::DEF_PROFILE_NAME), Profile::new(constants::DEF_INSTANCE_NAME))].into_iter().collect() - } - } -} - -impl Settings { - async fn load_inner(path: impl AsRef) -> Result { - match fs::read_to_string(&path).await { - Ok(data) => serde_json::from_str(data.as_str()).map_err(SettingsError::Format), - Err(e) if e.kind() == ErrorKind::NotFound => Ok(SettingsInner::default()), - Err(e) => Err(SettingsError::IO { what: "loading settings", error: e }) - } - } - - fn check_consistent(mut inner: SettingsInner, path: Option>) -> Result { - inner.profiles.retain(|name, profile| { - if !inner.instances.contains_key(&profile.instance) { - warn!("Settings inconsistency: profile {} refers to instance {}, which does not exist. Ignoring this profile.", name, profile.instance); - false - } else { - true - } - }); - - // there will be more checks later maybe - - Ok(Settings { - path: path.map(|p| p.as_ref().to_owned()), - inner - }) - } - - pub async fn load(path: impl AsRef) -> Result { - Self::check_consistent(Self::load_inner(&path).await?, Some(path)) - } - - pub fn get_path(&self) -> Option<&Path> { - self.path.as_deref() - } - - pub async fn save_to(&self, path: impl AsRef) -> Result<(), SettingsError> { - let path = path.as_ref(); - - if let Some(parent) = path.parent() { - fs::create_dir_all(parent).await - .map_err(|e| SettingsError::IO { what: "saving settings (creating directory)", error: e })?; - } - - let mut file = File::create(path).await - .map_err(|e| SettingsError::IO { what: "saving settings (open)", error: e })?; - - file.write_all(serde_json::to_string_pretty(&self.inner).map_err(SettingsError::Format)?.as_bytes()).await - .map_err(|e| SettingsError::IO { what: "saving settings (write)", error: e })?; - - Ok(()) - } - - pub async fn save(&self) -> Result<(), SettingsError> { - self.save_to(self.path.as_ref().expect("save() called on Settings instance not loaded from file")).await - } - - pub fn get_instance(&self, name: &str) -> Option<&Instance> { - self.inner.instances.get(name) - } - - pub fn get_profile(&self, name: &str) -> Option<&Profile> { - self.inner.profiles.get(name) - } - - pub fn get_instance_for(&self, profile: &Profile) -> &Instance { - self.inner.instances.get(&profile.instance).unwrap() - } -} - -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct Instance { - path: PathBuf // relative to launcher home (or absolute) -} - -#[derive(Deserialize, Serialize, Debug, Clone)] -#[serde(rename_all = "snake_case")] -pub enum ProfileVersion { - LatestSnapshot, - LatestRelease, - #[serde(untagged)] - Specific(String) -} - -#[derive(Deserialize, Serialize, Debug, Clone, Copy)] -pub struct Resolution { - width: u32, - height: u32 -} - -impl Default for Resolution { - fn default() -> Self { - Resolution { width: 864, height: 480 } - } -} - -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct Profile { - game_version: ProfileVersion, - java_runtime: Option, - instance: String, - - #[serde(default)] - jvm_arguments: Vec, - #[serde(default)] - legacy_launch: bool, - - resolution: Option -} - -impl> From

for Instance { - fn from(path: P) -> Self { - Self { path: path.as_ref().into() } - } -} - -impl Instance { - pub async fn get_path(&self, home: impl AsRef) -> Result { - let path = self.path.as_path(); - - if path.is_relative() { - Ok([home.as_ref(), Path::new("instances"), path].iter().collect::()) - } else { - fs::canonicalize(path).await - } - } -} - -const DEF_JVM_ARGUMENTS: [&str; 7] = [ - "-Xmx2G", - "-XX:+UnlockExperimentalVMOptions", - "-XX:+UseG1GC", - "-XX:G1NewSizePercent=20", - "-XX:G1ReservePercent=20", - "-XX:MaxGCPauseMillis=50", - "-XX:G1HeapRegionSize=32M" -]; - -impl Profile { - fn new(instance_name: &str) -> Self { - Self { - game_version: ProfileVersion::LatestRelease, - java_runtime: None, - instance: instance_name.into(), - jvm_arguments: DEF_JVM_ARGUMENTS.iter().map(|s| String::from(*s)).collect(), - legacy_launch: false, - resolution: None - } - } - - pub fn get_version(&self) -> &ProfileVersion { - &self.game_version - } - - pub fn get_instance_name(&self) -> &str { - &self.instance - } - - pub fn iter_arguments(&self) -> impl Iterator { - self.jvm_arguments.iter() - } - - pub fn get_resolution(&self) -> Option { - self.resolution - } - - pub fn get_java_runtime(&self) -> Option<&String> { - self.java_runtime.as_ref() - } - - pub fn is_legacy_launch(&self) -> bool { - self.legacy_launch - } -} diff --git a/src/launcher/strsub.rs b/src/launcher/strsub.rs deleted file mode 100644 index 5764405..0000000 --- a/src/launcher/strsub.rs +++ /dev/null @@ -1,192 +0,0 @@ -// a cheap-o implementation of StrSubstitutor from apache commons -// (does not need to support recursive evaluation or preserving escapes, it was never enabled in - -use std::borrow::Cow; - -const ESCAPE: char = '$'; -const VAR_BEGIN: &str = "${"; -const VAR_END: &str = "}"; -const VAR_DEFAULT: &str = ":-"; - -pub trait SubFunc<'rep> { - fn substitute(&self, key: &str) -> Option>; -} - -/* NOTE: the in-place implementation has been replaced for the following reasons: - * - it was annoying to get lifetimes to work, so you could only either pass a trait implementation - * or a closure - * - it was probably slower than doing it out-of-place anyway, since you keep having to copy the - * tail of the string for each replacement - */ - -// handles ${replacements} on this string IN-PLACE. Calls the "sub" function for each key it receives. -// if "sub" returns None, it will use a default value or ignore the ${substitution}. -// There are no "invalid inputs" and this function should never panic unless "sub" panics. -/*pub fn replace_string(input: &mut String, sub: impl SubFunc) { - let mut cursor = input.len(); - while let Some(idx) = input[..cursor].rfind(VAR_BEGIN) { - // note: for some reason, apache processes escapes BEFORE checking if it's even a valid - // replacement expression. strange behavior IMO. - if let Some((pidx, ESCAPE)) = prev_char(input.as_ref(), idx) { - // this "replacement" is escaped. remove the escape marker and continue. - input.remove(pidx); - cursor = pidx; - continue; - } - - let Some(endidx) = input[idx..cursor].find(VAR_END).map(|v| v + idx) else { - // unclosed replacement expression. ignore. - cursor = idx; - continue; - }; - - let spec = &input[(idx + VAR_BEGIN.len())..endidx]; - let name; - let def_opt; - - if let Some(def) = spec.find(VAR_DEFAULT) { - name = &spec[..def]; - def_opt = Some(&spec[(def + VAR_DEFAULT.len())..]); - } else { - name = spec; - def_opt = None; - } - - if let Some(sub_val) = sub.substitute(name).map_or_else(|| def_opt.map(|d| Cow::Owned(d.to_owned())), |v| Some(v)) { - input.replace_range(idx..(endidx + VAR_END.len()), sub_val.as_ref()); - } - - cursor = idx; - } -}*/ - -pub fn replace_string<'inp, 'rep>(input: &'inp str, sub: &impl SubFunc<'rep>) -> Cow<'inp, str> { - let mut ret: Option = None; - let mut cursor = 0usize; - - while let Some(idx) = input[cursor..].find(VAR_BEGIN) { - let idx = idx + cursor; // make idx an absolute index into 'input' - let spec_start = idx + VAR_BEGIN.len(); // the start of the "spec" (area inside {}) - - // first, check if this is escaped - if let Some((prev_idx, ESCAPE)) = input[..idx].char_indices().next_back() { - let s = ret.get_or_insert_default(); - s.push_str(&input[cursor..prev_idx]); - - // advance past this so we don't match it again - s.push_str(&input[idx..spec_start]); - cursor = spec_start; - continue; - } - - // now, find the closing tag - let Some(spec_end) = input[spec_start..].find(VAR_END).map(|v| v + spec_start) else { - break; // reached the end of the string - }; - - let full_spec = &input[spec_start..spec_end]; - - // check for a default argument - let (name, def) = if let Some(defidx) = full_spec.find(VAR_DEFAULT) { - (&full_spec[..defidx], Some(&full_spec[(defidx + VAR_DEFAULT.len())..])) - } else { - (full_spec, None) - }; - - let after = spec_end + VAR_END.len(); - if let Some(subst) = sub.substitute(name).map_or_else(|| def.map(Cow::Borrowed), Some) { - let s = ret.get_or_insert_default(); - s.push_str(&input[cursor..idx]); - s.push_str(subst.as_ref()); - } else { - ret.get_or_insert_default().push_str(&input[cursor..after]); - } - - cursor = after; - } - - if let Some(ret) = ret.as_mut() { - ret.push_str(&input[cursor..]); - } - - ret.map_or(Cow::Borrowed(input), Cow::Owned) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[derive(Clone, Copy)] - struct TestSub; - impl SubFunc<'static> for TestSub { - fn substitute(&self, key: &str) -> Option> { - match key { - "exists" => Some(Cow::Borrowed("value123")), - "empty" => None, - "borger" => Some(Cow::Borrowed("\u{1f354}")), - _ => panic!("replace_fun called with unexpected key: {}", key) - } - } - } - - #[test] - fn test_standard_replace() { - assert_eq!(replace_string("this has ${exists} and more", &TestSub), "this has value123 and more"); - assert_eq!(replace_string("multiple ${exists} repl${exists}ace", &TestSub), "multiple value123 replvalue123ace"); - assert_eq!(replace_string("${exists}${exists}", &TestSub), "value123value123"); - } - - #[test] - fn test_empty_replace() { - assert_eq!(replace_string("this has ${empty} and more", &TestSub), "this has ${empty} and more"); - assert_eq!(replace_string("multiple ${empty} repl${empty}ace", &TestSub), "multiple ${empty} repl${empty}ace"); - assert_eq!(replace_string("${empty}${empty}", &TestSub), "${empty}${empty}"); - } - - #[test] - fn test_homogenous_replace() { - assert_eq!(replace_string("some ${exists} and ${empty} ...", &TestSub), "some value123 and ${empty} ..."); - assert_eq!(replace_string("some ${empty} and ${exists} ...", &TestSub), "some ${empty} and value123 ..."); - assert_eq!(replace_string("${exists}${empty}", &TestSub), "value123${empty}"); - assert_eq!(replace_string("${empty}${exists}", &TestSub), "${empty}value123"); - } - - #[test] - fn test_default_replace() { - assert_eq!(replace_string("some ${exists:-def1} and ${empty:-def2} ...", &TestSub), "some value123 and def2 ..."); - assert_eq!(replace_string("some ${empty:-def1} and ${exists:-def2} ...", &TestSub), "some def1 and value123 ..."); - assert_eq!(replace_string("abc${empty:-}def", &TestSub), "abcdef"); - assert_eq!(replace_string("${empty:-}${empty:-}", &TestSub), ""); - } - - #[test] - fn test_escape() { - assert_eq!(replace_string("an $${escaped} replacement (${exists})", &TestSub), "an ${escaped} replacement (value123)"); - assert_eq!(replace_string("${exists}$${escaped}${exists}", &TestSub), "value123${escaped}value123"); - - // make sure this weird behavior is preserved... (the original code seemed to show it) - assert_eq!(replace_string("some $${ else", &TestSub), "some ${ else"); - } - - #[test] - fn test_weird() { - assert_eq!(replace_string("${exists}", &TestSub), "value123"); - assert_eq!(replace_string("$${empty}", &TestSub), "${empty}"); - assert_eq!(replace_string("${empty:-a}", &TestSub), "a"); - assert_eq!(replace_string("${empty:-}", &TestSub), ""); - } - - // these make sure it doesn't chop up multibyte characters illegally - #[test] - fn test_multibyte_surround() { - assert_eq!(replace_string("\u{1f354}$${}\u{1f354}", &TestSub), "\u{1f354}${}\u{1f354}"); - assert_eq!(replace_string("\u{1f354}${exists}\u{1f354}${empty:-}\u{1f354}", &TestSub), "\u{1f354}value123\u{1f354}\u{1f354}"); - } - - #[test] - fn test_multibyte_replace() { - assert_eq!(replace_string("borger ${borger}", &TestSub), "borger \u{1f354}"); - assert_eq!(replace_string("${exists:-\u{1f354}}${empty:-\u{1f354}}", &TestSub), "value123\u{1f354}"); - assert_eq!(replace_string("${borger}$${}${borger}", &TestSub), "\u{1f354}${}\u{1f354}"); - } -} diff --git a/src/launcher/version.rs b/src/launcher/version.rs deleted file mode 100644 index 0f55223..0000000 --- a/src/launcher/version.rs +++ /dev/null @@ -1,398 +0,0 @@ -use std::{collections::{BTreeMap, HashMap}, error::Error, io::ErrorKind}; -use std::borrow::Cow; -use std::collections::HashSet; -use std::fmt::Display; -use std::path::{Path, PathBuf}; - -use log::{debug, info, warn}; -use sha1_smol::Digest; -use tokio::{fs, io}; -use crate::launcher::settings::ProfileVersion; -use crate::util; -use crate::version::{*, manifest::*}; - -use super::constants::*; - -#[derive(Debug)] -pub enum VersionError { - IO { what: String, error: io::Error }, - Request { what: String, error: reqwest::Error }, - MalformedObject { what: String, error: serde_json::Error }, - VersionIntegrity { id: String, expect: Digest, got: Digest } -} - -impl Display for VersionError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - VersionError::IO { what, error } => write!(f, "i/o error ({what}): {error}"), - VersionError::Request { what, error } => write!(f, "request error ({what}): {error}"), - VersionError::MalformedObject { what, error } => write!(f, "malformed {what}: {error}"), - VersionError::VersionIntegrity { id, expect, got } => write!(f, "version {id} integrity mismatch (expect {expect}, got {got})") - } - } -} - -impl Error for VersionError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - VersionError::IO { error, .. } => Some(error), - VersionError::Request { error, .. } => Some(error), - VersionError::MalformedObject { error, .. } => Some(error), - _ => None - } - } -} - -struct RemoteVersionList { - versions: HashMap, - latest: LatestVersions -} - -impl RemoteVersionList { - async fn new() -> Result { - debug!("Looking up remote version manifest."); - let text = reqwest::get(URL_VERSION_MANIFEST).await - .and_then(|r| r.error_for_status()) - .map_err(|e| VersionError::Request { what: "download version manifest".into(), error: e })? - .text().await.map_err(|e| VersionError::Request { what: "download version manifest (decode)".into(), error: e })?; - - debug!("Parsing version manifest."); - let manifest: VersionManifest = serde_json::from_str(text.as_str()).map_err(|e| VersionError::MalformedObject { what: "version manifest".into(), error: e })?; - - let mut versions = HashMap::new(); - for v in manifest.versions { - versions.insert(v.id.clone(), v); - } - - debug!("Done loading remote versions!"); - Ok(RemoteVersionList { - versions, - latest: manifest.latest - }) - } - - async fn download_version(&self, ver: &VersionManifestVersion, path: &Path) -> Result { - // ensure parent directory exists - info!("Downloading version {}.", ver.id); - tokio::fs::create_dir_all(path.parent().expect("version .json has no parent (impossible)")).await - .inspect_err(|e| warn!("failed to create {} parent dirs: {e}", path.display())) - .map_err(|e| VersionError::IO { what: format!("creating version directory for {}", path.display()), error: e })?; - - // download it - let ver_text = reqwest::get(ver.url.as_str()).await - .and_then(|r| r.error_for_status()) - .map_err(|e| VersionError::Request { what: format!("download version {} from {}", ver.id, ver.url), error: e })? - .text().await.map_err(|e| VersionError::Request { what: format!("download version {} from {} (receive)", ver.id, ver.url), error: e })?; - - debug!("Validating downloaded {}...", ver.id); - // make sure it's valid - util::verify_sha1(ver.sha1, ver_text.as_str()) - .map_err(|e| VersionError::VersionIntegrity { - id: ver.id.clone(), - expect: ver.sha1, - got: e - })?; - - // make sure it's well-formed - let cver: CompleteVersion = serde_json::from_str(ver_text.as_str()).map_err(|e| VersionError::MalformedObject { what: format!("complete version {}", ver.id), error: e })?; - - debug!("Saving version {}...", ver.id); - - // write it out - tokio::fs::write(path, ver_text).await - .inspect_err(|e| warn!("Failed to save version {}: {}", ver.id, e)) - .map_err(|e| VersionError::IO { what: format!("writing version file at {}", path.display()), error: e })?; - - info!("Done downloading and verifying {}!", ver.id); - - Ok(cver) - } -} - -struct LocalVersionList { - versions: BTreeMap -} - -#[derive(Debug)] -enum LocalVersionError { - Sha1Mismatch { exp: Digest, got: Digest }, - VersionMismatch { fname: String, json: String }, - Unknown(Box) -} - -impl Display for LocalVersionError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - LocalVersionError::Sha1Mismatch { exp, got } => { - write!(f, "sha1 mismatch (exp {exp}, got {got})") - }, - LocalVersionError::VersionMismatch { fname, json } => { - write!(f, "version ID mismatch (filename {fname}, json {json})") - }, - LocalVersionError::Unknown(err) => { - write!(f, "unknown version error: {err}") - } - } - } -} - -impl Error for LocalVersionError {} - -impl LocalVersionList { - async fn load_version(path: &Path, sha1: Option) -> Result { - // grumble grumble I don't like reading in the whole file at once - info!("Loading local version at {}.", path.display()); - let ver = tokio::fs::read_to_string(path).await.map_err(|e| LocalVersionError::Unknown(Box::new(e)))?; - if let Some(digest_exp) = sha1 { - debug!("Verifying local version {}.", path.display()); - util::verify_sha1(digest_exp, ver.as_str()) - .map_err(|got| { - warn!("Local version sha1 mismatch: {} (exp: {}, got: {})", path.display(), digest_exp, got); - LocalVersionError::Sha1Mismatch { exp: digest_exp.to_owned(), got } - })?; - } - - let ver: CompleteVersion = serde_json::from_str(ver.as_str()).map_err(|e| { - warn!("Invalid version JSON {}: {}", path.display(), e); - LocalVersionError::Unknown(Box::new(e)) - })?; - - let fname_id = path.file_stem() - .expect("tried to load a local version with no path") // should be impossible - .to_str() - .expect("tried to load a local version with invalid UTF-8 filename"); // we already checked if the filename is valid UTF-8 at this point - - if fname_id == ver.id.as_str() { - info!("Loaded local version {}.", ver.id); - Ok(ver) - } else { - warn!("Local version {} has a version ID conflict (filename: {}, json: {})!", path.display(), fname_id, ver.id); - Err(LocalVersionError::VersionMismatch { fname: fname_id.to_owned(), json: ver.id }) - } - } - - async fn load_versions(home: &Path, skip: impl Fn(&str) -> bool) -> Result { - info!("Loading local versions."); - let mut rd = tokio::fs::read_dir(home).await.map_err(|e| VersionError::IO { what: format!("open local versions directory {}", home.display()), error: e })?; - let mut versions = BTreeMap::new(); - - while let Some(ent) = rd.next_entry().await.map_err(|e| VersionError::IO { what: format!("read local versions directory {}", home.display()), error: e })? { - if !ent.file_type().await.map_err(|e| VersionError::IO { what: format!("version entry metadata {}", ent.path().display()), error: e} )?.is_dir() { continue; } - - // when the code is fugly - let path = match ent.file_name().to_str() { - Some(s) => { - if skip(s) { - debug!("Skipping local version {s} because (I assume) it is remotely tracked."); - continue - } - - /* FIXME: once https://github.com/rust-lang/rust/issues/127292 is closed, - * use add_extension to avoid extra heap allocations (they hurt my feelings) */ - let mut path = ent.path(); - - // can't use set_extension since s might contain a . (like 1.8.9) - path.push(format!("{s}.json")); - path - }, - - /* We just ignore directories with names that contain invalid unicode. Unfortunately, the laucher - * will not be supporting such custom versions. Name your version something sensible please. */ - None => { - warn!("Ignoring a local version {} because its id contains invalid unicode.", ent.file_name().to_string_lossy()); - continue - } - }; - - match Self::load_version(&path, None).await { - Ok(v) => { - versions.insert(v.id.clone(), v); - }, - Err(e) => { - // FIXME: just display the filename without to_string_lossy when https://github.com/rust-lang/rust/issues/120048 is closed - warn!("Ignoring local version {}: {e}", ent.file_name().to_string_lossy()); - } - } - } - - info!("Loaded {} local version(s).", versions.len()); - Ok(LocalVersionList { versions }) - } -} - -pub struct VersionList { - remote: Option, - local: LocalVersionList, - home: PathBuf -} - -pub enum VersionResult<'a> { - Complete(&'a CompleteVersion), - Remote(&'a VersionManifestVersion), - None -} - -impl<'a> From<&'a CompleteVersion> for VersionResult<'a> { - fn from(value: &'a CompleteVersion) -> Self { - Self::Complete(value) - } -} - -impl<'a> From<&'a VersionManifestVersion> for VersionResult<'a> { - fn from(value: &'a VersionManifestVersion) -> Self { - Self::Remote(value) - } -} - -impl<'a, T: Into>> From> for VersionResult<'a> { - fn from(value: Option) -> Self { - value.map_or(VersionResult::None, |v| v.into()) - } -} - -#[derive(Debug)] -pub enum VersionResolveError { - InheritanceLoop(String), - MissingVersion(String), - VersionLoad(VersionError) -} - -impl Display for VersionResolveError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - VersionResolveError::InheritanceLoop(s) => write!(f, "inheritance loop (saw {s} twice)"), - VersionResolveError::MissingVersion(s) => write!(f, "unknown version {s}"), - VersionResolveError::VersionLoad(err) => write!(f, "version load error: {err}") - } - } -} - -impl Error for VersionResolveError {} - -impl VersionList { - async fn create_dir_for(home: &Path) -> Result<(), io::Error> { - debug!("Creating versions directory."); - match fs::create_dir(home).await { - Ok(_) => Ok(()), - Err(e) if e.kind() == ErrorKind::AlreadyExists => Ok(()), - Err(e) => { - debug!("failed to create version home: {}", e); - Err(e) - } - } - } - - pub async fn online(home: &Path) -> Result { - Self::create_dir_for(home).await.map_err(|e| VersionError::IO { what: format!("create version directory {}", home.display()), error: e })?; - - let remote = RemoteVersionList::new().await?; - let local = LocalVersionList::load_versions(home, |s| remote.versions.contains_key(s)).await?; - - Ok(VersionList { - remote: Some(remote), - local, - home: home.to_path_buf() - }) - } - - pub async fn offline(home: &Path) -> Result { - Self::create_dir_for(home).await.map_err(|e| VersionError::IO { what: format!("create version directory {}", home.display()), error: e })?; - - let local = LocalVersionList::load_versions(home, |_| false).await?; - - Ok(VersionList { - remote: None, - local, - home: home.to_path_buf() - }) - } - - pub fn is_online(&self) -> bool { - self.remote.is_some() - } - - pub fn get_version_lazy(&self, id: &str) -> VersionResult { - self.remote.as_ref().and_then(|r| r.versions.get(id).map(VersionResult::from)) - .or_else(|| self.local.versions.get(id).map(VersionResult::from)) - .unwrap_or(VersionResult::None) - } - - pub fn get_profile_version_id<'v>(&self, ver: &'v ProfileVersion) -> Option> { - match ver { - ProfileVersion::LatestRelease => self.remote.as_ref().map(|r| Cow::Owned(r.latest.release.clone())), - ProfileVersion::LatestSnapshot => self.remote.as_ref().map(|r| Cow::Owned(r.latest.snapshot.clone())), - ProfileVersion::Specific(ver) => Some(Cow::Borrowed(ver)) - } - } - - pub fn get_remote_version(&self, id: &str) -> Option<&VersionManifestVersion> { - let remote = self.remote.as_ref().expect("get_remote_version called in offline mode!"); - - remote.versions.get(id) - } - - pub async fn load_remote_version(&self, ver: &VersionManifestVersion) -> Result { - let remote = self.remote.as_ref().expect("load_remote_version called in offline mode!"); - - let id = ver.id.as_str(); - let mut ver_path = self.home.join(id); - ver_path.push(format!("{id}.json")); - - debug!("Loading local copy of remote version {}", ver.id); - - match LocalVersionList::load_version(ver_path.as_path(), Some(ver.sha1)).await { - Ok(v) => return Ok(v), - Err(e) => { - info!("Redownloading {id}, since the local copy could not be loaded: {e}"); - } - } - - remote.download_version(ver, ver_path.as_path()).await - } - - pub async fn resolve_version<'v>(&self, ver: &'v CompleteVersion) -> Result, VersionResolveError> { - let mut seen: HashSet = HashSet::new(); - seen.insert(ver.id.clone()); - - let Some(inherit) = ver.inherits_from.as_ref() else { - return Ok(Cow::Borrowed(ver)); - }; - - if *inherit == ver.id { - warn!("Version {} directly inherits from itself!", ver.id); - return Err(VersionResolveError::InheritanceLoop(ver.id.clone())); - } - - debug!("Resolving version inheritance: {} (inherits from {})", ver.id, inherit); - - let mut ver = ver.clone(); - let mut inherit = inherit.clone(); - - loop { - if !seen.insert(inherit.clone()) { - warn!("Version inheritance loop detected in {}: {} transitively inherits from itself.", ver.id, inherit); - return Err(VersionResolveError::InheritanceLoop(inherit)); - } - - let inherited_ver = match self.get_version_lazy(inherit.as_str()) { - VersionResult::Complete(v) => Cow::Borrowed(v), - VersionResult::Remote(v) => - Cow::Owned(self.load_remote_version(v).await.map_err(VersionResolveError::VersionLoad)?), - VersionResult::None => { - warn!("Cannot resolve version {}, it inherits an unknown version {inherit}", ver.id); - return Err(VersionResolveError::MissingVersion(inherit)); - } - }; - - ver.apply_child(inherited_ver.as_ref()); - - let Some(new_inherit) = inherited_ver.inherits_from.as_ref() else { - break - }; - - inherit.replace_range(.., new_inherit.as_str()); - } - - Ok(Cow::Owned(ver)) - } -} diff --git a/src/lib.rs b/src/lib.rs deleted file mode 100644 index 0d2233b..0000000 --- a/src/lib.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod util; -pub mod version; -pub mod assets; -pub mod launcher; -pub mod auth; // temporarily public diff --git a/src/util.rs b/src/util.rs deleted file mode 100644 index 7510a33..0000000 --- a/src/util.rs +++ /dev/null @@ -1,334 +0,0 @@ -mod progress; - -use std::error::Error; -use std::fmt::{Display, Formatter}; -use std::io::ErrorKind; -use std::path::{Component, Path, PathBuf}; -use const_format::formatcp; -use log::{debug, info, warn}; -use sha1_smol::{Digest, Sha1}; -use tokio::fs::File; -use tokio::{fs, io}; -use tokio::io::{AsyncReadExt, AsyncWriteExt}; - -const PKG_NAME: &str = env!("CARGO_PKG_NAME"); -const PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); -const CRATE_NAME: &str = env!("CARGO_CRATE_NAME"); - -pub const USER_AGENT: &str = formatcp!("{PKG_NAME}/{PKG_VERSION} (in {CRATE_NAME})"); - -#[derive(Debug)] -pub enum IntegrityError { - SizeMismatch{ expect: usize, actual: usize }, - Sha1Mismatch{ expect: Digest, actual: Digest } -} - -impl Display for IntegrityError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - IntegrityError::SizeMismatch{ expect, actual } => - write!(f, "size mismatch (expect {expect} bytes, got {actual} bytes)"), - IntegrityError::Sha1Mismatch {expect, actual} => - write!(f, "sha1 mismatch (expect {expect}, got {actual})") - } - } -} - -impl Error for IntegrityError {} - -pub fn verify_sha1(expect: Digest, s: &str) -> Result<(), Digest> { - let dig = Sha1::from(s).digest(); - - if dig == expect { - return Ok(()); - } - - Err(dig) -} - -#[derive(Debug)] -pub enum FileVerifyError { - Integrity(PathBuf, IntegrityError), - Open(PathBuf, tokio::io::Error), - Read(PathBuf, tokio::io::Error), -} - -impl Display for FileVerifyError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - FileVerifyError::Integrity(path, e) => write!(f, "file integrity error {}: {}", path.display(), e), - FileVerifyError::Open(path, e) => write!(f, "error opening file {}: {}", path.display(), e), - FileVerifyError::Read(path, e) => write!(f, "error reading file {}: {}", path.display(), e) - } - } -} - -impl Error for FileVerifyError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - FileVerifyError::Integrity(_, e) => Some(e), - FileVerifyError::Open(_, e) => Some(e), - FileVerifyError::Read(_, e) => Some(e) - } - } -} - -pub async fn verify_file(path: impl AsRef, expect_size: Option, expect_sha1: Option) -> Result<(), FileVerifyError> { - let path = path.as_ref(); - - if expect_size.is_none() && expect_sha1.is_none() { - return match fs::metadata(path).await { - Ok(_) => { - debug!("No size or sha1 for {}, have to assume it's good.", path.display()); - Ok(()) - }, - Err(e) => { - Err(FileVerifyError::Open(path.to_path_buf(), e)) - } - } - } - - let mut file = File::open(path).await.map_err(|e| FileVerifyError::Open(path.to_owned(), e))?; - - let mut tally = 0usize; - let mut st = Sha1::new(); - let mut buf = [0u8; 4096]; - - loop { - let n = match file.read(&mut buf).await { - Ok(n) => n, - Err(e) => match e.kind() { - ErrorKind::Interrupted => continue, - _ => return Err(FileVerifyError::Read(path.to_owned(), e)) - } - }; - - if n == 0 { - break; - } - - st.update(&buf[..n]); - tally += n; - } - - let dig = st.digest(); - - if expect_size.is_some_and(|sz| sz != tally) { - return Err(FileVerifyError::Integrity(path.to_owned(), IntegrityError::SizeMismatch { - expect: expect_size.unwrap(), - actual: tally - })); - } else if expect_sha1.is_some_and(|exp_dig| exp_dig != dig) { - return Err(FileVerifyError::Integrity(path.to_owned(), IntegrityError::Sha1Mismatch { - expect: expect_sha1.unwrap(), - actual: dig - })); - } - - Ok(()) -} - -#[derive(Debug)] -pub enum EnsureFileError { - IO { what: &'static str, error: io::Error }, - Download { url: String, error: reqwest::Error }, - Integrity(IntegrityError), - Offline, - MissingURL -} - -impl Display for EnsureFileError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - EnsureFileError::IO { what, error } => write!(f, "i/o error ensuring file ({what}): {error}"), - EnsureFileError::Download { url, error } => write!(f, "error downloading file ({url}): {error}"), - EnsureFileError::Integrity(e) => write!(f, "integrity error for downloaded file: {e}"), - EnsureFileError::Offline => f.write_str("unable to download file while offline"), - EnsureFileError::MissingURL => f.write_str("missing url"), - } - } -} - -impl Error for EnsureFileError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self { - EnsureFileError::IO { error, .. } => Some(error), - EnsureFileError::Download { error, .. } => Some(error), - EnsureFileError::Integrity(error) => Some(error), - _ => None - } - } -} - -pub async fn should_download(path: impl AsRef, expect_size: Option, expect_sha1: Option) -> Result { - let path = path.as_ref(); - - match verify_file(path, expect_size, expect_sha1).await { - Ok(()) => { - debug!("Skipping download for file {}, integrity matches.", path.display()); - Ok(false) - }, - Err(FileVerifyError::Open(_, e)) if e.kind() == ErrorKind::NotFound => { - debug!("File {} is missing, downloading it.", path.display()); - Ok(true) - }, - Err(FileVerifyError::Integrity(p, e)) => { - warn!("Integrity error on file {}: {}", p.display(), e); - - // try to delete the file since it's bad - let _ = fs::remove_file(path).await - .map_err(|e| warn!("Error deleting corrupted/modified file {} (ignoring): {}", path.display(), e)); - Ok(true) - } - Err(FileVerifyError::Open(_, e) | FileVerifyError::Read(_, e)) => { - warn!("Error verifying file {} on disk: {}", path.display(), e); - Err(e) - } - } -} - -pub async fn ensure_file(path: impl AsRef, url: Option<&str>, expect_size: Option, expect_sha1: Option, online: bool, force_download: bool) -> Result { - let path = path.as_ref(); - - if !force_download && !should_download(path, expect_size, expect_sha1).await - .map_err(|e| EnsureFileError::IO { what: "verifying file on disk", error: e })? { - - return Ok(false); - } - - if !online { - warn!("Cannot download {} to {} while offline!", url.unwrap_or("(no url)"), path.display()); - return Err(EnsureFileError::Offline); - } - - // download the file - let Some(url) = url else { - return Err(EnsureFileError::MissingURL); - }; - - let mut file = File::create(path).await.map_err(|e| EnsureFileError::IO { - what: "save downloaded file (open)", - error: e - })?; - - debug!("File {} must be downloaded ({}).", path.display(), url); - - let mut response = reqwest::get(url).await.map_err(|e| EnsureFileError::Download { url: url.to_owned(), error: e })?; - let mut tally = 0usize; - let mut sha1 = Sha1::new(); - - while let Some(chunk) = response.chunk().await.map_err(|e| EnsureFileError::Download { url: url.to_owned(), error: e })? { - let slice = chunk.as_ref(); - - file.write_all(slice).await.map_err(|e| EnsureFileError::IO { - what: "save downloaded file (write)", - error: e - })?; - - tally += slice.len(); - sha1.update(slice); - } - - drop(file); // manually close file - - let del_file_silent = || async { - debug!("Deleting downloaded file {} since its integrity doesn't match :(", path.display()); - let _ = fs::remove_file(path).await.map_err(|e| warn!("failed to delete invalid downloaded file: {}", e)); - () - }; - - if expect_size.is_some_and(|s| s != tally) { - del_file_silent().await; - - return Err(EnsureFileError::Integrity(IntegrityError::SizeMismatch { - expect: expect_size.unwrap(), - actual: tally - })); - } - - let digest = sha1.digest(); - - if expect_sha1.is_some_and(|exp_dig| exp_dig != digest) { - del_file_silent().await; - - return Err(EnsureFileError::Integrity(IntegrityError::Sha1Mismatch { - expect: expect_sha1.unwrap(), - actual: digest - })); - } - - info!("File {} downloaded successfully.", path.display()); - Ok(true) -} - -pub fn check_path(name: &str) -> Result<&Path, &'static str> { - let entry_path: &Path = Path::new(name); - - let mut depth = 0usize; - for component in entry_path.components() { - depth = match component { - Component::Prefix(_) | Component::RootDir => - return Err("root path component in entry"), - Component::ParentDir => depth.checked_sub(1) - .map_or_else(|| Err("entry path escapes"), |s| Ok(s))?, - Component::Normal(_) => depth + 1, - _ => depth - } - } - - Ok(entry_path) -} - -#[cfg(windows)] -pub fn strip_verbatim(path: &Path) -> &Path { - let Some(Component::Prefix(p)) = path.components().next() else { - return path; - }; - - use std::path::Prefix; - use std::ffi::OsStr; - - match p.kind() { - Prefix::VerbatimDisk(_) => - Path::new(unsafe { OsStr::from_encoded_bytes_unchecked(&path.as_os_str().as_encoded_bytes()[4..]) }), - _ => path - } -} - -#[cfg(not(windows))] -pub fn strip_verbatim(path: &Path) -> &Path { - path -} - -pub trait AsJavaPath { - fn as_java_path(&self) -> &Path; -} - -impl AsJavaPath for Path { - fn as_java_path(&self) -> &Path { - strip_verbatim(self) - } -} - -#[cfg(test)] -mod tests { - #[allow(unused_imports)] - use super::*; - use std::path::Prefix; - - #[test] - #[cfg(windows)] - fn test_strip_verbatim() { - let path = Path::new(r"\\?\C:\Some\Verbatim\Path"); - match path.components().next().unwrap() { - Component::Prefix(p) => assert!(matches!(p.kind(), Prefix::VerbatimDisk(_)), "(TEST BUG) path does not start with verbatim disk"), - _ => panic!("(TEST BUG) path does not start with prefix") - } - - let path2 = path.as_java_path(); - match path2.components().next().unwrap() { - Component::Prefix(p) => assert!(matches!(p.kind(), Prefix::Disk(_))), - _ => panic!("path does not begin with prefix") - } - } -} diff --git a/src/util/progress.rs b/src/util/progress.rs deleted file mode 100644 index e8bdde1..0000000 --- a/src/util/progress.rs +++ /dev/null @@ -1,3 +0,0 @@ -struct Progress { - -} \ No newline at end of file diff --git a/src/version.rs b/src/version.rs deleted file mode 100644 index 6e9ad3f..0000000 --- a/src/version.rs +++ /dev/null @@ -1,489 +0,0 @@ -use core::fmt; -use std::{collections::BTreeMap, convert::Infallible, marker::PhantomData, ops::Deref, str::FromStr}; -use chrono::{DateTime, NaiveDateTime, Utc}; -use chrono::format::ParseErrorKind; -use regex::Regex; -use serde::{de::{self, Visitor}, Deserialize, Deserializer}; -use serde::de::{Error, SeqAccess}; -use sha1_smol::Digest; - -pub mod manifest; -use manifest::*; - -#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Eq)] -#[serde(rename_all = "lowercase")] -pub enum RuleAction { - Allow, - Disallow -} - -// must derive an order on this because it's used as a key for a btreemap -#[derive(Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] -#[serde(rename_all = "lowercase")] -pub enum OperatingSystem { - Linux, // "linux" - Windows, // "windows" - - #[serde(alias = "osx")] // not technically correct but it works - MacOS, // "osx" - - #[serde(other)] - Unknown // (not used in official jsons) -} - -#[derive(Debug, Clone)] -pub struct WrappedRegex(Regex); - -impl Deref for WrappedRegex { - type Target = Regex; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -struct RegexVisitor; -impl Visitor<'_> for RegexVisitor { - type Value = WrappedRegex; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a valid regular expression") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, { - Regex::new(v).map_err(Error::custom).map(WrappedRegex) - } -} - -impl<'de> Deserialize<'de> for WrappedRegex { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de> { - deserializer.deserialize_any(RegexVisitor) - } -} - -#[derive(Deserialize, Debug, Clone)] -pub struct OSRestriction { - #[serde(rename = "name")] - pub os: Option, - - pub version: Option, - pub arch: Option -} - -#[derive(Deserialize, Debug, Clone)] -pub struct CompatibilityRule { - pub action: RuleAction, - pub features: Option>, - pub os: Option -} - -pub trait FeatureMatcher { - fn matches(&self, feature: &str) -> bool; -} - -impl CompatibilityRule { - pub fn features_match(&self, checker: &impl FeatureMatcher) -> bool { - if let Some(m) = self.features.as_ref() { - for (feat, expect) in m { - if checker.matches(feat) != *expect { - return false; - } - } - } - - true - } -} - -#[derive(Deserialize, Debug, Clone)] -pub struct Argument { - #[serde(default)] - pub rules: Option>, - - #[serde(default)] - #[serde(deserialize_with = "string_or_array")] - pub value: Vec -} - -#[derive(Debug, Clone)] -pub struct WrappedArgument(Argument); - -impl FromStr for Argument { - type Err = Infallible; - - fn from_str(s: &str) -> Result { - Ok(Argument { - value: vec![s.to_owned()], - rules: None - }) - } -} - -impl Deref for WrappedArgument { - type Target = Argument; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl<'de> Deserialize<'de> for WrappedArgument { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de> { - Ok(WrappedArgument(string_or_struct(deserializer)?)) - } -} - -#[derive(Deserialize, Debug, Clone)] -pub struct Arguments { - pub game: Option>, - pub jvm: Option> -} - -impl Arguments { - fn apply_child(&mut self, other: &Arguments) { - if let Some(game) = other.game.as_ref() { - self.game.get_or_insert_default().splice(0..0, game.iter().cloned()); - } - - if let Some(jvm) = other.jvm.as_ref() { - self.jvm.get_or_insert_default().splice(0..0, jvm.iter().cloned()); - } - } -} - -#[derive(Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] -#[serde(rename_all = "snake_case")] -pub enum DownloadType { - Client, - ClientMappings, - Server, - ServerMappings, - WindowsServer -} - -#[derive(Deserialize, Debug, Clone)] -pub struct DownloadInfo { - pub sha1: Option, - pub size: Option, - pub total_size: Option, // available for asset index - pub url: Option, // may not be present for libraries - pub id: Option, - pub path: Option -} - -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct JavaVersionInfo { - pub component: String, - pub major_version: u32 -} - -#[derive(Deserialize, Debug, Clone)] -pub struct LibraryDownloads { - pub artifact: Option, - pub classifiers: Option> -} - -#[derive(Deserialize, Debug, Clone)] -pub struct LibraryExtractRule { - #[serde(default)] - pub exclude: Vec -} - -#[derive(Deserialize, Debug, Clone)] -pub struct Library { - pub downloads: Option, - pub name: String, - pub extract: Option, - pub natives: Option>, - pub rules: Option>, - - // old format - pub url: Option, - pub size: Option, - pub sha1: Option -} - -impl Library { - pub fn get_canonical_name(&self) -> String { - canonicalize_library_name(self.name.as_str(), self.natives.as_ref().map(|_| "__ozone_natives")) - } -} - -impl LibraryDownloads { - pub fn get_download_info(&self, classifier: Option<&str>) -> Option<&DownloadInfo> { - if let Some(classifier) = classifier { - self.classifiers.as_ref()?.get(classifier) - } else { - self.artifact.as_ref() - } - } -} - -#[derive(Deserialize, Debug, Clone)] -pub struct ClientLogging { - pub argument: String, - - #[serde(rename = "type")] - pub log_type: String, - pub file: DownloadInfo -} - -#[derive(Deserialize, Debug, Clone)] -pub struct Logging { - pub client: Option // other fields unknown -} - -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct CompleteVersion { - pub arguments: Option, - pub minecraft_arguments: Option, - - pub asset_index: Option, - pub assets: Option, - - pub compliance_level: Option, - - pub java_version: Option, - - #[serde(default)] - pub downloads: BTreeMap, - - #[serde(default)] - pub libraries: Vec, - - pub id: String, - pub jar: Option, // used as the jar filename if specified? (no longer used officially) - - pub logging: Option, - - pub main_class: Option, - pub minimum_launcher_version: Option, - - #[serde(deserialize_with = "deserialize_datetime_lenient")] - pub release_time: Option>, - #[serde(deserialize_with = "deserialize_datetime_lenient")] - pub time: Option>, - - #[serde(rename = "type")] - pub version_type: Option, - - pub compatibility_rules: Option>, // - pub incompatibility_reason: Option, // message shown when compatibility rules fail for this version - - pub inherits_from: Option - - /* omitting field `savableVersion' because it seems like a vestigial part from old launcher versions - * (also it isn't even a string that is present in modern liblauncher.so, so I assume it will never be used.) - */ -} - -impl CompleteVersion { - pub fn get_jar(&self) -> &String { - self.jar.as_ref().unwrap_or(&self.id) - } - - pub fn apply_child(&mut self, other: &CompleteVersion) { - macro_rules! replace_missing { - ($name:ident) => { - if self.$name.is_none() { - if let Some($name) = other.$name.as_ref() { - self.$name.replace($name.to_owned()); - } - } - }; - } - - if let Some(arguments) = other.arguments.as_ref() { - if let Some(my_args) = self.arguments.as_mut() { - my_args.apply_child(arguments); - } else { - self.arguments.replace(arguments.to_owned()); - } - } - - replace_missing!(minecraft_arguments); - replace_missing!(asset_index); - replace_missing!(assets); - replace_missing!(compliance_level); - replace_missing!(java_version); - - for (dltype, dl) in other.downloads.iter().by_ref() { - self.downloads.entry(*dltype).or_insert_with(|| dl.clone()); - } - - // we use extend here instead of splice for library resolution priority reasons - // (libraries earlier in the list will override libraries later in the list) - self.libraries.extend(other.libraries.iter().cloned()); - - replace_missing!(logging); - replace_missing!(main_class); - replace_missing!(minimum_launcher_version); - replace_missing!(release_time); - replace_missing!(time); - replace_missing!(version_type); - - if let Some(rules) = other.compatibility_rules.as_ref() { - if let Some(my_rules) = self.compatibility_rules.as_mut() { - my_rules.splice(0..0, rules.iter().cloned()); - } else { - self.compatibility_rules.replace(rules.to_owned()); - } - } - - replace_missing!(incompatibility_reason); - } -} - -fn canonicalize_library_name(name: &str, suffix: Option<&str>) -> String { - name.split(':') - .enumerate() - .filter(|(i, _)| *i != 2) - .map(|(_, s)| s.to_ascii_lowercase()) - .chain(suffix.into_iter().map(|s| s.to_owned())) - .collect::>() - .join(":") -} - -fn deserialize_datetime_lenient<'de, D>(deserializer: D) -> Result>, D::Error> -where - D: Deserializer<'de> -{ - struct DateTimeVisitor; - - impl Visitor<'_> for DateTimeVisitor { - type Value = Option>; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a valid datetime") - } - - fn visit_str(self, value: &str) -> Result - where - E: Error - { - match value.parse::>() { - Ok(dt) => Ok(Some(dt)), - Err(e) if e.kind() == ParseErrorKind::TooShort => { - // this probably just doesn't have an offset for some reason - match value.parse::() { - Ok(ndt) => Ok(Some(ndt.and_utc())), - Err(e) => Err(Error::custom(e)) - } - }, - Err(e) => Err(Error::custom(e)) - } - } - } - - deserializer.deserialize_str(DateTimeVisitor) -} - -// https://serde.rs/string-or-struct.html -fn string_or_struct<'de, T, D>(deserializer: D) -> Result -where - T: Deserialize<'de> + FromStr, - D: Deserializer<'de>, -{ - struct StringOrStruct(PhantomData T>); - - impl<'de, T> Visitor<'de> for StringOrStruct - where - T: Deserialize<'de> + FromStr, - { - type Value = T; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("string or map") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, { - Ok(FromStr::from_str(v).unwrap()) - } - - fn visit_map(self, map: A) -> Result - where - A: de::MapAccess<'de>, { - // wizardry (check comment in link) - Deserialize::deserialize(de::value::MapAccessDeserializer::new(map)) - } - } - - deserializer.deserialize_any(StringOrStruct(PhantomData)) -} - -// adapted from above -fn string_or_array<'de, T, D>(deserializer: D) -> Result, D::Error> -where - T: Deserialize<'de> + FromStr, - D: Deserializer<'de>, -{ - struct StringOrVec(PhantomData T>); - - impl<'de, T> Visitor<'de> for StringOrVec - where - T: Deserialize<'de> + FromStr, - { - type Value = Vec; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("string or array") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, { - Ok(vec![FromStr::from_str(v).unwrap()]) - } - - fn visit_seq(self, seq: A) -> Result - where - A: SeqAccess<'de>, { - Deserialize::deserialize(de::value::SeqAccessDeserializer::new(seq)) - } - } - - deserializer.deserialize_any(StringOrVec(PhantomData)) -} - -#[cfg(test)] -mod tests { - use std::fs; - - use super::*; - - #[test] - fn test_it() { - let s = fs::read_to_string("./test_stuff/versions/1.7.10.json"); - - let arg: CompleteVersion = serde_json::from_str(s.unwrap().as_str()).unwrap(); - dbg!(arg); - } - - #[test] - fn test_it2() { - let s = fs::read_to_string("./test_stuff/version_manifest_v2.json"); - - let arg: VersionManifest = serde_json::from_str(s.unwrap().as_str()).unwrap(); - dbg!(arg); - } - - #[test] - fn test_it3() { - assert_eq!(canonicalize_library_name("group:artifact:version", None), String::from("group:artifact")); - assert_eq!(canonicalize_library_name("group:artifact:version:specifier", None), String::from("group:artifact:specifier")); - assert_eq!(canonicalize_library_name("not_enough:fields", None), String::from("not_enough:fields")); - assert_eq!(canonicalize_library_name("word", None), String::from("word")); - assert_eq!(canonicalize_library_name("", None), String::from("")); - assert_eq!(canonicalize_library_name("group:artifact:version", Some("suffix")), String::from("group:artifact:suffix")); - } -} diff --git a/src/version/manifest.rs b/src/version/manifest.rs deleted file mode 100644 index b2b8524..0000000 --- a/src/version/manifest.rs +++ /dev/null @@ -1,91 +0,0 @@ -use core::fmt; -use std::convert::Infallible; -use std::str::FromStr; -use chrono::{DateTime, Utc}; -use serde::{de::Visitor, Deserialize}; -use sha1_smol::Digest; - -#[derive(Deserialize, Debug)] -pub struct LatestVersions { - pub release: String, - pub snapshot: String -} - -#[derive(Debug, Clone)] -pub enum VersionType { - Snapshot, - Release, - OldBeta, - OldAlpha, - Other(String) -} - -impl FromStr for VersionType { - type Err = Infallible; - - fn from_str(s: &str) -> Result { - match s { - "snapshot" => Ok(Self::Snapshot), - "release" => Ok(Self::Release), - "old_beta" => Ok(Self::OldBeta), - "old_alpha" => Ok(Self::OldAlpha), - _ => Ok(Self::Other(s.to_owned())) - } - } -} - -impl VersionType { - pub fn to_str(&self) -> &str { - match self { - Self::Snapshot => "snapshot", - Self::Release => "release", - Self::OldBeta => "old_beta", - Self::OldAlpha => "old_alpha", - Self::Other(s) => s - } - } -} - -struct VersionTypeVisitor; - -impl Visitor<'_> for VersionTypeVisitor { - type Value = VersionType; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a Minecraft release type") - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, { - Ok(VersionType::from_str(v).unwrap(/* infallible */)) - } -} - -impl<'de> Deserialize<'de> for VersionType { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de> { - deserializer.deserialize_string(VersionTypeVisitor) - } -} - -// https://piston-meta.mojang.com/mc/game/version_manifest_v2.json -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct VersionManifestVersion { - pub id: String, - #[serde(rename = "type")] - pub version_type: VersionType, - pub url: String, - pub time: DateTime, - pub release_time: DateTime, - pub sha1: Digest, - pub compliance_level: u32 -} - -#[derive(Deserialize, Debug)] -pub struct VersionManifest { - pub latest: LatestVersions, - pub versions: Vec -} -- cgit v1.2.3-70-g09d2