use std::collections::{HashMap, HashSet}; use std::path::Path; use anyhow::{Result, anyhow}; use serde::{Deserialize, Serialize}; use tauri::{AppHandle, Emitter}; use tokio::fs; use tokio::task::JoinSet; use crate::model::rainy; use crate::pkg::{Package, PkgKey, Remote}; use crate::util; use crate::download_handler::DownloadHandler; pub struct PackageStore { store: HashMap, app: AppHandle, dlh: DownloadHandler, offline: bool, } #[derive(Clone, Serialize, Deserialize)] pub struct Payload { pub pkg: PkgKey } #[derive(Clone, Copy, Serialize, Deserialize, Debug)] pub enum InstallResult { Ready, Deferred } impl PackageStore { pub fn new(app: AppHandle) -> PackageStore { PackageStore { store: HashMap::new(), app: app.clone(), dlh: DownloadHandler::new(app), offline: true } } pub fn get(&self, key: &PkgKey) -> Result<&Package> { self.store.get(key) .ok_or_else(|| anyhow!("Invalid package key")) } pub fn get_all(&self) -> HashMap { self.store.clone() } pub async fn reload_package(&mut self, key: PkgKey) { let dir = util::pkg_dir().join(&key.0); if let Ok(pkg) = Package::from_dir(dir).await { self.update_nonremote(key, pkg); } else { log::error!("couldn't reload {}", key); } } pub async fn reload_all(&mut self) -> Result<()> { let dirents = std::fs::read_dir(util::pkg_dir())?; let mut futures = JoinSet::new(); for dir in dirents { if let Ok(dir) = dir { let path = dir.path(); futures.spawn(Package::from_dir(path)); } } while let Some(res) = futures.join_next().await { if let Ok(Ok(pkg)) = res { self.update_nonremote(pkg.key(), pkg); } } Ok(()) } pub async fn fetch_listings() -> Result> { use async_compression::futures::bufread::GzipDecoder; use futures::{ io::{self, BufReader, ErrorKind}, prelude::*, }; let response = reqwest::get("https://rainy.patafour.zip/c/ongeki/api/v1/package/").await?; let reader = response .bytes_stream() .map_err(|e| io::Error::new(ErrorKind::Other, e)) .into_async_read(); let mut decoder = GzipDecoder::new(BufReader::new(reader)); let mut data = String::new(); decoder.read_to_string(&mut data).await?; Ok(serde_json::from_str(&data)?) } pub fn is_offline(&self) -> bool { self.offline } pub fn process_fetched_listings(&mut self, listings: Vec) { for listing in listings { // This is None if the package has no versions for whatever reason if let Some(r) = Package::from_rainy(listing) { //log::warn!("D {}", &r.rmt.as_ref().unwrap().dependencies.first().unwrap_or(&"Nothing".to_owned())); match self.store.get_mut(&r.key()) { Some(l) => { l.rmt = r.rmt; } None => { self.store.insert(r.key(), r); } } } } self.offline = false; } pub async fn install_package(&mut self, key: &PkgKey, force: bool, install_deps: bool) -> Result { log::info!("installation request: {}/{}/{}", key, force, install_deps); let pkg = self.store.get(key) .ok_or_else(|| anyhow!("Attempted to install a nonexistent pkg"))? .clone(); if pkg.loc.is_some() && !force { log::debug!("installation skipped"); return Ok(InstallResult::Ready); } self.app.emit("install-start", Payload { pkg: key.to_owned() })?; let rmt = pkg.rmt.as_ref() .ok_or_else(|| anyhow!("Attempted to install a pkg without remote data"))?; if install_deps { let mut set = HashSet::new(); self.resolve_deps(rmt.clone(), &mut set)?; for dep in set { Box::pin(self.install_package(&dep, false, false)).await?; } } let zip_path = util::cache_dir().join(format!( "{}-{}-{}.zip", pkg.namespace, pkg.name, rmt.version )); if !zip_path.exists() { self.dlh.download_zip(&zip_path, &pkg)?; log::debug!("deferring {}", key); return Ok(InstallResult::Deferred); } let cache_file_r = std::fs::File::open(&zip_path)?; let mut archive = zip::ZipArchive::new(cache_file_r)?; self.delete_package(key, false).await?; let path = pkg.path(); fs::create_dir(&path).await?; archive.extract(path)?; self.reload_package(key.to_owned()).await; self.app.emit("install-end-prelude", Payload { pkg: key.to_owned() })?; log::info!("installed {}", key); Ok(InstallResult::Ready) } pub async fn delete_package(&mut self, key: &PkgKey, force: bool) -> Result<()> { log::debug!("will delete {} {}", key, force); let pkg = self.store.get_mut(key) .ok_or_else(|| anyhow!("Attempted to delete a nonexistent pkg"))?; let path = pkg.path(); if path.exists() && path.join("manifest.json").exists() { pkg.loc = None; let rv = Self::clean_up_package(&path).await; if rv.is_ok() { self.app.emit("install-end-prelude", Payload { pkg: key.to_owned() })?; log::info!("deleted {}", key); } rv } else { if force { Err(anyhow!("Nothing to delete")) } else { Ok(()) } } } fn update_nonremote(&mut self, key: PkgKey, mut new: Package) { if let Some(old) = self.store.remove(&key) { new.rmt = old.rmt; } self.store.insert(key, new); } async fn clean_up_dir(path: impl AsRef, name: &str) -> Result<()> { let path = path.as_ref().join(name); if path.exists() { tokio::fs::remove_dir_all(path) .await .map_err(|e| anyhow!("could not delete {}: {}", name, e))?; } Ok(()) } async fn clean_up_file(path: impl AsRef, name: &str, force: bool) -> Result<()> { let path = path.as_ref().join(name); if force || path.exists() { tokio::fs::remove_file(path) .await .map_err(|e| anyhow!("Could not delete /{}: {}", name, e))?; } Ok(()) } async fn clean_up_package(path: impl AsRef) -> Result<()> { // todo case sensitivity for linux Self::clean_up_dir(&path, "app").await?; Self::clean_up_dir(&path, "option").await?; Self::clean_up_dir(&path, "segatools").await?; Self::clean_up_file(&path, "icon.png", true).await?; Self::clean_up_file(&path, "manifest.json", true).await?; Self::clean_up_file(&path, "README.md", true).await?; tokio::fs::remove_dir(path.as_ref()) .await .map_err(|e| anyhow!("Could not delete {}: {}", path.as_ref().to_string_lossy(), e))?; Ok(()) } fn resolve_deps(&self, rmt: Remote, set: &mut HashSet) -> Result<()> { for d in rmt.dependencies { set.insert(d.clone()); let subrmt = self.store.get(&d) .ok_or_else(|| anyhow!("Attempted to delete a nonexistent pkg"))? .rmt .clone() .ok_or_else(|| anyhow!("Attempted to resolve deps without fetching"))?; self.resolve_deps(subrmt, set)?; } Ok(()) } }