summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock5
-rw-r--r--ozone/Cargo.toml1
-rw-r--r--ozone/src/launcher.rs36
-rw-r--r--ozone/src/launcher/assets.rs12
-rw-r--r--ozone/src/launcher/download.rs290
-rw-r--r--ozone/src/launcher/jre.rs2
-rw-r--r--ozone/src/launcher/jre/download.rs8
-rw-r--r--ozone/src/util.rs13
-rw-r--r--ozone/src/util/progress.rs46
9 files changed, 274 insertions, 139 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 1ebf372..8bc5e5e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -435,9 +435,9 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
[[package]]
name = "async-trait"
-version = "0.1.85"
+version = "0.1.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056"
+checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d"
dependencies = [
"proc-macro2",
"quote",
@@ -3506,6 +3506,7 @@ dependencies = [
name = "ozone"
version = "0.1.0"
dependencies = [
+ "async-trait",
"cfg-if",
"chrono",
"dirs",
diff --git a/ozone/Cargo.toml b/ozone/Cargo.toml
index b05f066..60d4492 100644
--- a/ozone/Cargo.toml
+++ b/ozone/Cargo.toml
@@ -26,6 +26,7 @@ uuid = { version = "1.12.1", features = ["v4", "serde"] }
walkdir = "2.5.0"
zip = { version = "2.2.2", default-features = false, features = ["bzip2", "deflate", "deflate64", "lzma", "xz"] }
dirs = "6.0.0"
+async-trait = "0.1.86"
[dev-dependencies]
simple_logger = "5.0.0"
diff --git a/ozone/src/launcher.rs b/ozone/src/launcher.rs
index c658b79..336ff31 100644
--- a/ozone/src/launcher.rs
+++ b/ozone/src/launcher.rs
@@ -44,7 +44,7 @@ pub use crate::util::{EnsureFileError, FileVerifyError, IntegrityError};
use crate::assets::AssetIndex;
use runner::ArgumentType;
use crate::auth::Account;
-use crate::launcher::download::FileDownload;
+use crate::launcher::download::{FileDownload, RemoteChecksumDownload};
use crate::launcher::jre::{JavaRuntimeError, JavaRuntimeRepository};
use crate::launcher::version::VersionError;
use crate::version::manifest::VersionType;
@@ -126,7 +126,7 @@ pub enum LaunchError {
// library errors
LibraryDirError(PathBuf, io::Error),
- LibraryVerifyError(FileVerifyError),
+ LibraryVerifyError(Box<dyn Error>),
LibraryDownloadError,
LibraryExtractError(extract::ZipExtractError),
LibraryClasspathError(JoinPathsError),
@@ -177,14 +177,14 @@ impl Display for LaunchError {
}
impl Error for LaunchError {
- fn cause(&self) -> Option<&dyn Error> {
+ fn source(&self) -> Option<&(dyn Error + 'static)> {
match &self {
LaunchError::VersionInit(e) => Some(e),
LaunchError::LoadVersion(e) => Some(e),
LaunchError::ResolveVersion(e) => Some(e),
LaunchError::IncompatibleVersion(e) => Some(e),
LaunchError::LibraryDirError(_, e) => Some(e),
- LaunchError::LibraryVerifyError(e) => Some(e),
+ LaunchError::LibraryVerifyError(e) => Some(e.as_ref()),
LaunchError::LibraryExtractError(e) => Some(e),
LaunchError::LibraryClasspathError(e) => Some(e),
LaunchError::IO { error: e, .. } => Some(e),
@@ -377,7 +377,8 @@ impl Launcher {
}
trace!("Using library {} ({})", lib.name, classifier.unwrap_or("None"));
- dl.make_dirs().await.map_err(|e| LaunchError::LibraryDirError(dl.get_path().to_path_buf(), e))?;
+ fs::create_dir_all(dl.get_path().parent().unwrap())
+ .await.map_err(|e| LaunchError::LibraryDirError(dl.get_path().to_path_buf(), e))?;
if lib.natives.is_some() {
extract_jobs.push(LibraryExtractJob {
@@ -395,14 +396,12 @@ impl Launcher {
if self.online {
info!("Downloading {} libraries...", downloads.len());
let client = util::create_client();
- MultiDownloader::new(downloads.values_mut()).perform(&client).await
+ MultiDownloader::<dyn FileDownload, _, _>::new(downloads.values_mut()).perform(&client).await
.inspect_err(|e| warn!("library download failed: {e}"))
- .try_fold((), |_, _| async {Ok(())})
- .await
.map_err(|_| LaunchError::LibraryDownloadError)?;
} else {
info!("Verifying {} libraries...", downloads.len());
- download::verify_files(downloads.values_mut()).await.map_err(|e| {
+ download::verify_files::<dyn FileDownload, _>(downloads.values()).await.map_err(|e| {
warn!("A library could not be verified: {}", e);
warn!("Since the launcher is in offline mode, libraries cannot be downloaded. Please try again in online mode.");
LaunchError::LibraryVerifyError(e)
@@ -597,21 +596,24 @@ impl LibraryRepository {
Some(p)
}
- fn create_download(&self, lib: &Library, classifier: Option<&str>) -> Option<VerifiedDownload> {
+ fn create_download(&self, lib: &Library, classifier: Option<&str>) -> Option<Box<dyn FileDownload>> {
+ let remote_checksum = |base| {
+ let path = LibraryRepository::get_artifact_path(lib.name.as_str(), classifier)?;
+ let url: String = [base, path.to_string_lossy().as_ref()].into_iter().collect();
+ let check_url: String = [url.as_str(), ".sha1"].into_iter().collect();
+ Some(Box::new(RemoteChecksumDownload::new(url, check_url, self.home.join(path))) as Box<dyn FileDownload>)
+ };
+
if let Some(ref url) = lib.url {
- let path = Self::get_artifact_path(lib.name.as_str(), classifier)?;
- let url = [url.as_str(), path.to_string_lossy().as_ref()].into_iter().collect::<String>();
- Some(VerifiedDownload::new(url.as_ref(), self.home.join(path).as_path(), lib.size, lib.sha1)) // TODO: could download sha1
+ remote_checksum(url)
} else if let Some(ref downloads) = lib.downloads {
let dlinfo = downloads.get_download_info(classifier)?;
// drinking game: take a shot once per heap allocation
let path = self.home.join(dlinfo.path.as_ref().map(PathBuf::from).or_else(|| Self::get_artifact_path(lib.name.as_str(), classifier))?);
- Some(VerifiedDownload::new(dlinfo.url.as_ref()?, path.as_path(), dlinfo.size, dlinfo.sha1))
+ Some(Box::new(VerifiedDownload::new(dlinfo.url.as_ref()?, path.as_path(), dlinfo.size, dlinfo.sha1)))
} else {
- let path = Self::get_artifact_path(lib.name.as_str(), classifier)?;
- let url = ["https://libraries.minecraft.net/", path.to_string_lossy().as_ref()].into_iter().collect::<String>();
- Some(VerifiedDownload::new(url.as_ref(), self.home.join(path).as_path(), lib.size, lib.sha1)) // TODO: could download sha1
+ remote_checksum("https://libraries.minecraft.net/")
}
}
diff --git a/ozone/src/launcher/assets.rs b/ozone/src/launcher/assets.rs
index c13b1a2..b1f429a 100644
--- a/ozone/src/launcher/assets.rs
+++ b/ozone/src/launcher/assets.rs
@@ -33,7 +33,7 @@ pub enum AssetError {
DownloadIndex(reqwest::Error),
Integrity(IntegrityError),
AssetObjectDownload,
- AssetVerifyError(FileVerifyError),
+ AssetVerifyError(Box<dyn Error>),
AssetNameError(&'static str)
}
@@ -62,7 +62,7 @@ impl Error for AssetError {
AssetError::IndexParse(error) => Some(error),
AssetError::DownloadIndex(error) => Some(error),
AssetError::Integrity(error) => Some(error),
- AssetError::AssetVerifyError(error) => Some(error),
+ AssetError::AssetVerifyError(error) => Some(error.as_ref()),
_ => None
}
}
@@ -221,7 +221,7 @@ impl AssetRepository {
Self::ensure_dir(path.parent().unwrap()).await.map_err(|error| AssetError::IO { error, what: "creating directory for object" })?;
- downloads.push(VerifiedDownload::new(&Self::get_object_url(object), &path, Some(object.size), Some(object.hash)));
+ downloads.push(VerifiedDownload::new(Self::get_object_url(object), &path, Some(object.size), Some(object.hash)));
}
if self.online {
@@ -229,12 +229,10 @@ impl AssetRepository {
let client = util::create_client();
MultiDownloader::with_concurrent(downloads.iter_mut(), 32).perform(&client).await
.inspect_err(|e| warn!("asset download failed: {e}"))
- .try_fold((), |_, _| async {Ok(())})
- .await
.map_err(|_| AssetError::AssetObjectDownload)?;
} else {
info!("Verifying {} asset objects...", downloads.len());
- super::download::verify_files(downloads.iter_mut()).await.map_err(AssetError::AssetVerifyError)?;
+ super::download::verify_files(downloads.iter()).await.map_err(AssetError::AssetVerifyError)?;
}
Ok(())
@@ -282,7 +280,7 @@ impl AssetRepository {
},
Err(e) => {
debug!("Error while reconstructing assets: {e}");
- Err(AssetError::AssetVerifyError(e))
+ Err(AssetError::AssetVerifyError(e.into_inner()))
}
}
})
diff --git a/ozone/src/launcher/download.rs b/ozone/src/launcher/download.rs
index 6f0e317..036537d 100644
--- a/ozone/src/launcher/download.rs
+++ b/ozone/src/launcher/download.rs
@@ -1,30 +1,43 @@
+use std::borrow::{Borrow, BorrowMut};
use std::error::Error;
+use std::ffi::{OsStr, OsString};
use std::fmt::{Debug, Display, Formatter};
+use std::io::{ErrorKind, Read};
+use std::marker::PhantomData;
use std::path::{Path, PathBuf};
-use futures::{stream, StreamExt, TryStream, TryStreamExt};
-use log::debug;
-use reqwest::{Client, Method, RequestBuilder};
+use async_trait::async_trait;
+use futures::{stream, StreamExt, TryFutureExt, TryStreamExt};
+use log::{debug, warn};
+use reqwest::{Client, RequestBuilder, Response, StatusCode};
use sha1_smol::{Digest, Sha1};
-use tokio::fs;
use tokio::fs::File;
-use tokio::io::{self, AsyncWriteExt};
+use tokio::io::AsyncWriteExt;
use crate::util;
-use crate::util::{FileVerifyError, IntegrityError};
+use crate::util::IntegrityError;
+#[async_trait]
pub trait Download: Debug + Display {
// return Ok(None) to skip downloading this file
async fn prepare(&mut self, client: &Client) -> Result<Option<RequestBuilder>, Box<dyn Error>>;
async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box<dyn Error>>;
async fn finish(&mut self) -> Result<(), Box<dyn Error>>;
+
+ async fn verify_offline(&self) -> Result<(), Box<dyn Error>>;
}
pub trait FileDownload: Download {
fn get_path(&self) -> &Path;
}
-pub struct MultiDownloader<'j, T: Download + 'j, I: Iterator<Item = &'j mut T>> {
+pub struct MultiDownloader<'d, D, DR, I>
+where
+ D: Download + ?Sized + 'd,
+ DR: BorrowMut<D> + 'd,
+ I: Iterator<Item = &'d mut DR>
+{
jobs: I,
- nconcurrent: usize
+ nconcurrent: usize,
+ _phantom: PhantomData<&'d D>
}
#[derive(Debug, Clone, Copy)]
@@ -49,13 +62,13 @@ impl Display for Phase {
}
}
-pub struct PhaseDownloadError<'j, T: Download> {
+pub struct PhaseDownloadError {
phase: Phase,
inner: Box<dyn Error>,
- job: &'j T
+ job: String
}
-impl<T: Download> Debug for PhaseDownloadError<'_, T> {
+impl Debug for PhaseDownloadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("PhaseDownloadError")
.field("phase", &self.phase)
@@ -65,67 +78,79 @@ impl<T: Download> Debug for PhaseDownloadError<'_, T> {
}
}
-impl<T: Download> Display for PhaseDownloadError<'_, T> {
+impl Display for PhaseDownloadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "error while {} ({}): {}", self.phase, self.job, self.inner)
}
}
-impl<T: Download> Error for PhaseDownloadError<'_, T> {
+impl Error for PhaseDownloadError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
Some(&*self.inner)
}
}
-impl<'j, T: Download> PhaseDownloadError<'j, T> {
- fn new(phase: Phase, inner: Box<dyn Error>, job: &'j T) -> Self {
+impl PhaseDownloadError {
+ fn new(phase: Phase, inner: Box<dyn Error>, job: &(impl Download + ?Sized)) -> Self {
PhaseDownloadError {
- phase, inner, job
+ phase, inner, job: job.to_string()
}
}
}
-impl<'j, T: Download + 'j, I: Iterator<Item = &'j mut T>> MultiDownloader<'j, T, I> {
- pub fn new(jobs: I) -> MultiDownloader<'j, T, I> {
+impl<'j, D, DR, I> MultiDownloader<'j, D, DR, I>
+where
+ D: Download + ?Sized + 'j,
+ DR: BorrowMut<D> + 'j,
+ I: Iterator<Item = &'j mut DR>
+{
+ pub fn new(jobs: I) -> MultiDownloader<'j, D, DR, I> {
Self::with_concurrent(jobs, 24)
}
- pub fn with_concurrent(jobs: I, n: usize) -> MultiDownloader<'j, T, I> {
+ pub fn with_concurrent(jobs: I, n: usize) -> MultiDownloader<'j, D, DR, I> {
assert!(n > 0);
MultiDownloader {
jobs,
- nconcurrent: n
+ nconcurrent: n,
+ _phantom: PhantomData
}
}
- pub async fn perform(self, client: &'j Client) -> impl TryStream<Ok = (), Error = PhaseDownloadError<'j, T>> {
- stream::iter(self.jobs).map(move |job| Ok(async move {
- macro_rules! map_err {
- ($result:expr, $phase:expr, $job:expr) => {
- match $result {
- Ok(v) => v,
- Err(e) => return Err(PhaseDownloadError::new($phase, e.into(), $job))
- }
+ pub async fn perform(self, client: &'j Client) -> Result<(), PhaseDownloadError> {
+ macro_rules! map_err {
+ ($result:expr, $phase:expr, $job:expr) => {
+ match $result {
+ Ok(v) => v,
+ Err(e) => return Err(PhaseDownloadError::new($phase, e.into(), $job))
}
}
+ }
- let Some(rq) = map_err!(job.prepare(client).await, Phase::Prepare, job) else {
- return Ok(())
- };
+ stream::iter(self.jobs.map(Ok))
+ .try_filter_map(move |job_| async move {
+ let job = job_.borrow_mut();
+ let Some(rq) = map_err!(job.prepare(client).await, Phase::Prepare, job) else {
+ return Ok(None);
+ };
- let mut data = map_err!(map_err!(rq.send().await, Phase::Send, job).error_for_status(), Phase::Send, job).bytes_stream();
+ Ok(Some((job_, rq)))
+ })
+ .try_for_each_concurrent(self.nconcurrent, move |(job_, rq)| async move {
+ let job = job_.borrow_mut();
+ let mut data = map_err!(map_err!(rq.send().await, Phase::Send, job).error_for_status(), Phase::Send, job).bytes_stream();
- while let Some(bytes) = data.next().await {
- let bytes = map_err!(bytes, Phase::Receive, job);
+ while let Some(bytes) = data.next().await {
+ let bytes = map_err!(bytes, Phase::Receive, job);
- map_err!(job.handle_chunk(bytes.as_ref()).await, Phase::HandleChunk, job);
- }
+ map_err!(job.handle_chunk(bytes.as_ref()).await, Phase::HandleChunk, job);
+ }
- job.finish().await.map_err(|e| PhaseDownloadError::new(Phase::Finish, e, job))?;
+ job.finish().await.map_err(|e| PhaseDownloadError::new(Phase::Finish, e, job))?;
- Ok(())
- })).try_buffer_unordered(self.nconcurrent)
+ Ok(())
+ }).await
}
}
@@ -157,10 +182,10 @@ impl Display for VerifiedDownload {
}
impl VerifiedDownload {
- pub fn new(url: &str, path: &Path, expect_size: Option<usize>, expect_sha1: Option<Digest>) -> VerifiedDownload {
+ pub fn new(url: impl Into<String>, path: impl Into<PathBuf>, expect_size: Option<usize>, expect_sha1: Option<Digest>) -> VerifiedDownload {
VerifiedDownload {
- url: url.to_owned(),
- path: path.to_owned(),
+ url: url.into(),
+ path: path.into(),
expect_size,
expect_sha1,
@@ -187,25 +212,19 @@ impl VerifiedDownload {
pub fn get_url(&self) -> &str {
&self.url
}
-
+
+ #[allow(dead_code)]
pub fn get_expect_size(&self) -> Option<usize> {
self.expect_size
}
-
+
+ #[allow(dead_code)]
pub fn get_expect_sha1(&self) -> Option<Digest> {
self.expect_sha1
}
-
- pub async fn make_dirs(&self) -> Result<(), io::Error> {
- fs::create_dir_all(self.path.parent().expect("download created with no containing directory (?)")).await
- }
-
- async fn open_output(&mut self) -> Result<(), io::Error> {
- self.file.replace(File::create(&self.path).await?);
- Ok(())
- }
}
+#[async_trait]
impl Download for VerifiedDownload {
async fn prepare(&mut self, client: &Client) -> Result<Option<RequestBuilder>, Box<dyn Error>> {
if !util::should_download(&self.path, self.expect_size, self.expect_sha1).await? {
@@ -213,9 +232,9 @@ impl Download for VerifiedDownload {
}
// potentially racy to close the file and reopen it... :/
- self.open_output().await?;
+ self.file = Some(File::create(&self.path).await?);
- Ok(Some(client.request(Method::GET, &self.url)))
+ Ok(Some(client.get(&self.url)))
}
async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box<dyn Error>> {
@@ -248,6 +267,11 @@ impl Download for VerifiedDownload {
Ok(())
}
+
+ async fn verify_offline(&self) -> Result<(), Box<dyn Error>> {
+ debug!("Verifying download {}", self.path.display());
+ util::verify_file(&self.path, self.expect_size, self.expect_sha1).err_into().await
+ }
}
impl FileDownload for VerifiedDownload {
@@ -256,13 +280,153 @@ impl FileDownload for VerifiedDownload {
}
}
-pub async fn verify_files(files: impl Iterator<Item = &mut VerifiedDownload>) -> Result<(), FileVerifyError> {
- stream::iter(files)
- .map(|dl| Ok(async move {
- debug!("Verifying library {}", dl.get_path().display());
- util::verify_file(dl.get_path(), dl.get_expect_size(), dl.get_expect_sha1()).await
- }))
- .try_buffer_unordered(32)
- .try_fold((), |_, _| async {Ok(())})
+pub struct RemoteChecksumDownload {
+ url: String,
+ check_url: String,
+
+ expect_sha1: Option<Digest>,
+
+ path: PathBuf,
+ file: Option<File>,
+ sha1: Sha1,
+ tally: usize
+}
+
+impl RemoteChecksumDownload {
+ pub fn new(url: impl Into<String>, check_url: impl Into<String>, path: impl Into<PathBuf>) -> RemoteChecksumDownload {
+ RemoteChecksumDownload {
+ url: url.into(),
+ check_url: check_url.into(),
+ path: path.into(),
+
+ expect_sha1: None,
+
+ file: None,
+ sha1: Sha1::new(),
+ tally: 0
+ }
+ }
+
+ fn get_sha1_path(&self) -> PathBuf {
+ [self.path.as_os_str(), OsStr::new(".sha1")].into_iter().collect::<OsString>().into()
+ }
+}
+
+impl Display for RemoteChecksumDownload {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ write!(f, "downloading {} to {} (checksum at {})", self.url, self.path.display(), self.check_url)
+ }
+}
+
+impl Debug for RemoteChecksumDownload {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("RemoteChecksumDownload")
+ .field("url", &self.url)
+ .field("check_url", &self.check_url)
+ .field("path", &self.path).finish()
+ }
+}
+
+const MAX_DIGEST_RESPONSE_LEN: usize = 1024; // really we only need 40 bytes lol
+async fn read_response_digest(res: Response) -> Result<Digest, Box<dyn Error>> {
+ res.bytes_stream().err_into::<Box<dyn Error>>().try_fold(String::new(), |mut dig_string, bytes| async move {
+ if bytes.len() > MAX_DIGEST_RESPONSE_LEN - dig_string.len() {
+ Err(format!("oversize digest response ({} > {MAX_DIGEST_RESPONSE_LEN})", dig_string.len() + bytes.len()).into())
+ } else {
+ (&*bytes).read_to_string(&mut dig_string)?;
+ Ok(dig_string)
+ }
+ }).await?.parse().map_err(Into::into)
+}
+
+#[async_trait]
+impl Download for RemoteChecksumDownload {
+ async fn prepare(&mut self, client: &Client) -> Result<Option<RequestBuilder>, Box<dyn Error>> {
+ debug!("Downloading SHA1 fingerprint for {} from {}", self.path.display(), self.check_url);
+ let res = client.get(self.check_url.as_str()).send().await
+ .inspect_err(|e| warn!("Failed to download SHA1 fingerprint for {} (from {}): {}", self.path.display(), self.check_url, e))?;
+
+ self.expect_sha1 = if res.status() == StatusCode::OK {
+ read_response_digest(res).await
+ .inspect_err(|e| warn!("Error downloading SHA1 fingerprint for {} (from {}): {}", self.path.display(), self.check_url, e))
+ .ok()
+ } else {
+ warn!("Cannot download SHA1 fingerprint for {} (from {}): received status code {}", self.path.display(), self.check_url, res.status());
+ None
+ };
+
+ if let Some(dig) = self.expect_sha1 {
+ let _ = tokio::fs::write(self.get_sha1_path(), dig.to_string()).await
+ .inspect_err(|e| warn!("Failed to save SHA1 fingerprint for {} - integrity will not be checked while offline: {e}", self.path.display()));
+ }
+
+ if !util::should_download(&self.path, None, self.expect_sha1).await? {
+ return Ok(None);
+ }
+
+ self.file = Some(File::create(&self.path).await?);
+
+ Ok(Some(client.get(self.url.as_str())))
+ }
+
+ async fn handle_chunk(&mut self, chunk: &[u8]) -> Result<(), Box<dyn Error>> {
+ self.file.as_mut().unwrap().write_all(chunk).await?;
+ self.sha1.update(chunk);
+ self.tally += chunk.len();
+
+ Ok(())
+ }
+
+ async fn finish(&mut self) -> Result<(), Box<dyn Error>> {
+ let digest = self.sha1.digest();
+
+ if let Some(d) = self.expect_sha1 {
+ if d != digest {
+ debug!("Failed to download {}: sha1 mismatch (exp {}, got {}).", self.path.display(), digest, d);
+ return Err(IntegrityError::Sha1Mismatch { expect: d, actual: digest }.into());
+ }
+ }
+
+ debug!("Successfully downloaded {} ({} bytes).", self.path.display(), self.tally);
+
+ drop(self.file.take().unwrap());
+
+ Ok(())
+ }
+
+ async fn verify_offline(&self) -> Result<(), Box<dyn Error>> {
+ // try to load fingerprint from file
+ let digest_str = match tokio::fs::read_to_string(self.get_sha1_path()).await {
+ Ok(s) => s,
+ Err(e) if e.kind() == ErrorKind::NotFound => {
+ warn!("Unable to read sha1 fingerprint to verify {} - have to assume it's good: {e}", self.path.display());
+ return Ok(());
+ },
+ Err(e) => return Err(e.into())
+ };
+
+ let digest: Digest = digest_str.parse()
+ .inspect_err(|e| warn!("Malformed sha1 fingerprint for file {}: {e}", self.path.display()))
+ .map_err::<Box<dyn Error>, _>(Into::into)?;
+
+ util::verify_file(&self.path, None, Some(digest)).err_into().await
+ }
+}
+
+impl FileDownload for RemoteChecksumDownload {
+ fn get_path(&self) -> &Path {
+ &self.path
+ }
+}
+
+pub async fn verify_files<'d, D, DR>(files: impl Iterator<Item = &'d DR>) -> Result<(), Box<dyn Error>>
+where
+ D: Download + ?Sized,
+ DR: Borrow<D> + 'd
+{
+ stream::iter(files.map(Ok))
+ .try_for_each_concurrent(32, |d| async move {
+ d.borrow().verify_offline().await
+ })
.await
}
diff --git a/ozone/src/launcher/jre.rs b/ozone/src/launcher/jre.rs
index b710fe6..901a6a0 100644
--- a/ozone/src/launcher/jre.rs
+++ b/ozone/src/launcher/jre.rs
@@ -198,8 +198,6 @@ impl JavaRuntimeRepository {
dl.perform(&client).await
.inspect_err(|e| warn!("jre file download failed: {e}"))
- .try_fold((), |_, _| async { Ok(()) })
- .await
.map_err(|_| JavaRuntimeError::MultiDownloadError)
}
diff --git a/ozone/src/launcher/jre/download.rs b/ozone/src/launcher/jre/download.rs
index ddf1ff6..fc457ef 100644
--- a/ozone/src/launcher/jre/download.rs
+++ b/ozone/src/launcher/jre/download.rs
@@ -2,6 +2,8 @@ use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
use std::io::Write;
use std::path::{PathBuf};
+use async_trait::async_trait;
+use futures::TryFutureExt;
use log::debug;
use lzma_rs::decompress;
use reqwest::{Client, RequestBuilder};
@@ -112,6 +114,7 @@ impl Display for LzmaDownloadJob {
}
}
+#[async_trait]
impl Download for LzmaDownloadJob {
async fn prepare(&mut self, client: &Client) -> Result<Option<RequestBuilder>, Box<dyn Error>> {
if !util::should_download(&self.path, self.raw_size, self.raw_sha1).await? {
@@ -192,4 +195,9 @@ impl Download for LzmaDownloadJob {
Ok(())
}
+
+ async fn verify_offline(&self) -> Result<(), Box<dyn Error>> {
+ debug!("Verifying JRE download {}", self.path.display());
+ util::verify_file(&self.path, self.raw_size, self.raw_sha1).err_into().await
+ }
}
diff --git a/ozone/src/util.rs b/ozone/src/util.rs
index 48e0abc..b765a17 100644
--- a/ozone/src/util.rs
+++ b/ozone/src/util.rs
@@ -55,8 +55,8 @@ pub fn verify_sha1(expect: Digest, s: &str) -> Result<(), Digest> {
#[derive(Debug)]
pub enum FileVerifyError {
Integrity(PathBuf, IntegrityError),
- Open(PathBuf, tokio::io::Error),
- Read(PathBuf, tokio::io::Error),
+ Open(PathBuf, io::Error),
+ Read(PathBuf, io::Error),
}
impl Display for FileVerifyError {
@@ -79,6 +79,15 @@ impl Error for FileVerifyError {
}
}
+impl FileVerifyError {
+ pub fn into_inner(self) -> Box<dyn Error> {
+ match self {
+ FileVerifyError::Read(_, e) | FileVerifyError::Open(_, e) => e.into(),
+ FileVerifyError::Integrity(_, e) => e.into()
+ }
+ }
+}
+
pub async fn verify_file(path: impl AsRef<Path>, expect_size: Option<usize>, expect_sha1: Option<Digest>) -> Result<(), FileVerifyError> {
let path = path.as_ref();
diff --git a/ozone/src/util/progress.rs b/ozone/src/util/progress.rs
index 33373b7..e69de29 100644
--- a/ozone/src/util/progress.rs
+++ b/ozone/src/util/progress.rs
@@ -1,46 +0,0 @@
-use std::borrow::Cow;
-use std::cmp::min;
-use std::sync::Mutex;
-
-pub trait Progress {
- fn set_description(&self, step: Cow<'static, str>);
- fn set_progress_full(&self, value: usize, max: usize);
- fn set_progress(&self, value: usize);
- fn inc_progress(&self, by: usize);
-}
-
-struct StepProgressInner<'p> {
- steps_total: usize,
- step_current: usize,
- progress: &'p dyn Progress
-}
-
-pub struct StepProgress<'p> {
- inner: Mutex<StepProgressInner<'p>>
-}
-
-impl<'p> StepProgress<'p> {
- pub fn new(steps: usize, progress: &'p dyn Progress) -> Self {
- Self {
- inner: Mutex::new(StepProgressInner {
- steps_total: steps,
- step_current: 0usize,
- progress
- })
- }
- }
-
- pub fn set_step(&self, step: usize, max: usize) {
- let mut inner = self.inner.lock().unwrap();
- assert!(step < inner.steps_total);
-
- inner.step_current = step;
- inner.progress.set_progress_full(step * max, inner.steps_total * max);
- }
-
- pub fn inc_step(&self, by: usize, max: usize) {
- let mut inner = self.inner.lock().unwrap();
- inner.step_current = min(inner.steps_total, inner.step_current + by);
- inner.progress.set_progress_full(inner.step_current * max, inner.steps_total * max);
- }
-}