diff --git a/Cargo.lock b/Cargo.lock index 536a47f5..0cf407e5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + [[package]] name = "aes" version = "0.8.4" @@ -116,6 +122,18 @@ dependencies = [ "derive_arbitrary", ] +[[package]] +name = "arrayref" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + [[package]] name = "ascii" version = "0.9.3" @@ -224,7 +242,7 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.7.4", "object", "rustc-demangle", ] @@ -259,6 +277,19 @@ version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +[[package]] +name = "blake3" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -315,12 +346,13 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.6" +version = "1.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aba8f4e9906c7ce3c73463f62a7f0c65183ada1a2d47e397cc8810827f9694f" +checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6" dependencies = [ "jobserver", "libc", + "shlex", ] [[package]] @@ -818,12 +850,12 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.30" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.8.0", ] [[package]] @@ -1122,6 +1154,7 @@ dependencies = [ "anyhow", "async-stream", "base64 0.22.1", + "blake3", "chrono", "clap", "console", @@ -1134,6 +1167,7 @@ dependencies = [ "duct", "env_logger", "finl_unicode", + "flate2", "fs_extra", "futures", "git2", @@ -1169,6 +1203,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", + "sha256", "smart-default", "spdx-rs", "tabled", @@ -1190,6 +1225,8 @@ dependencies = [ "xml-rs", "xz2", "zip", + "zip-extensions", + "zstd", ] [[package]] @@ -1700,6 +1737,15 @@ dependencies = [ "adler", ] +[[package]] +name = "miniz_oxide" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +dependencies = [ + "adler2", +] + [[package]] name = "mio" version = "1.0.2" @@ -2565,6 +2611,18 @@ dependencies = [ "digest", ] +[[package]] +name = "sha256" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18278f6a914fa3070aa316493f7d2ddfb9ac86ebc06fa3b83bffda487e9065b0" +dependencies = [ + "async-trait", + "bytes", + "hex", + "sha2", +] + [[package]] name = "sharded-slab" version = "0.1.7" @@ -2590,6 +2648,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "simd-adler32" version = "0.3.7" @@ -3669,9 +3733,9 @@ dependencies = [ [[package]] name = "zip" -version = "2.1.6" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40dd8c92efc296286ce1fbd16657c5dbefff44f1b4ca01cc5f517d8b7b3d3e2e" +checksum = "dc5e4288ea4057ae23afc69a4472434a87a2495cafce6632fd1c4ec9f5cf3494" dependencies = [ "aes", "arbitrary", @@ -3696,6 +3760,15 @@ dependencies = [ "zstd", ] +[[package]] +name = "zip-extensions" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "386508a00aae1d8218b9252a41f59bba739ccee3f8e420bb90bcb1c30d960d4a" +dependencies = [ + "zip", +] + [[package]] name = "zopfli" version = "0.8.1" diff --git a/hipcheck/Cargo.toml b/hipcheck/Cargo.toml index a506643f..5dd90f9e 100644 --- a/hipcheck/Cargo.toml +++ b/hipcheck/Cargo.toml @@ -33,6 +33,7 @@ benchmarking = [] async-stream = "0.3.5" base64 = "0.22.1" +blake3 = "1.5.4" content_inspector = "0.2.4" cyclonedx-bom = "0.7.0" dotenv = "0.15.0" @@ -47,6 +48,7 @@ env_logger = { version = "0.11.5" } finl_unicode = { version = "1.2.0", default-features = false, features = [ "grapheme_clusters", ] } +flate2 = "1.0.33" fs_extra = "1.3.0" futures = "0.3.30" # Vendor libgit2 and openssl so that they will be statically included @@ -106,6 +108,7 @@ semver = "1.0.9" serde = { version = "1.0.206", features = ["derive", "rc"] } serde_derive = "1.0.137" serde_json = "1.0.122" +sha256 = { version = "1.5.0", default-features = false } smart-default = "0.7.1" spdx-rs = "0.5.0" tabled = "0.16.0" @@ -117,6 +120,7 @@ tokio = { version = "1.39.3", features = [ "sync", "time", ] } +tokio-stream = "0.1.15" toml = "0.8.19" tonic = "0.12.1" thiserror = "1.0.63" @@ -130,8 +134,9 @@ walkdir = "2.5.0" which = { version = "6.0.3", default-features = false } xml-rs = "0.8.20" xz2 = "0.1.7" -zip = "2.1.6" -tokio-stream = "0.1.15" +zip = "2.2.0" +zip-extensions = "0.8.1" +zstd = "0.13.2" [build-dependencies] diff --git a/hipcheck/src/cache/plugin_cache.rs b/hipcheck/src/cache/plugin_cache.rs index 038ddef8..4e3cfd6c 100644 --- a/hipcheck/src/cache/plugin_cache.rs +++ b/hipcheck/src/cache/plugin_cache.rs @@ -4,6 +4,9 @@ use std::path::{Path, PathBuf}; use pathbuf::pathbuf; +use crate::plugin::{PluginArch, PluginName, PluginPublisher, PluginVersion}; + +/// Plugins are stored with the following format `////` pub struct HcPluginCache { path: PathBuf, } @@ -13,4 +16,19 @@ impl HcPluginCache { let plugins_path = pathbuf![path, "plugins"]; Self { path: plugins_path } } + + /// `////` + pub fn plugin_download_dir( + &self, + publisher: &PluginPublisher, + name: &PluginName, + version: &PluginVersion, + arch: &PluginArch, + ) -> PathBuf { + self.path + .join(&publisher.0) + .join(&name.0) + .join(&version.0) + .join(&arch.0) + } } diff --git a/hipcheck/src/plugin/parser/download_manifest.rs b/hipcheck/src/plugin/download_manifest.rs similarity index 84% rename from hipcheck/src/plugin/parser/download_manifest.rs rename to hipcheck/src/plugin/download_manifest.rs index b676708d..89d57147 100644 --- a/hipcheck/src/plugin/parser/download_manifest.rs +++ b/hipcheck/src/plugin/download_manifest.rs @@ -1,8 +1,18 @@ -use super::extract_data; -use crate::plugin::parser::ParseKdlNode; +use super::plugin_manifest::PluginArch; +use super::{extract_data, PluginName, PluginPublisher, PluginVersion}; +use crate::cache::plugin_cache::HcPluginCache; +use crate::context::Context; +use crate::plugin::retrieval::{download_plugin, extract_plugin}; +use crate::plugin::ParseKdlNode; use crate::string_newtype_parse_kdl_node; +use crate::util::http::agent::agent; use crate::{error::Error, hc_error}; +use fs_extra::dir::remove; use kdl::{KdlDocument, KdlNode, KdlValue}; +use std::fs::File; +use std::hash::Hash; +use std::io::{self, Read, Write}; +use std::path::{Path, PathBuf}; use std::{fmt::Display, str::FromStr}; use url::Url; @@ -56,7 +66,7 @@ pub struct HashWithDigest { } impl HashWithDigest { - fn new(hash_algorithm: HashAlgorithm, digest: String) -> Self { + pub fn new(hash_algorithm: HashAlgorithm, digest: String) -> Self { Self { hash_algorithm, digest, @@ -82,7 +92,7 @@ impl ParseKdlNode for HashWithDigest { } } -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum ArchiveFormat { /// archived with tar and compressed with the XZ algorithm TarXz, @@ -206,7 +216,7 @@ pub struct DownloadManifestEntry { // TODO: make this a SemVer type? /// A `SemVer` version of the plugin. Not a version requirement as in the plugin manifest file, /// but only a specific concrete version - pub version: String, + pub version: PluginVersion, // TODO: make this a target-triple enum? /// The target architecture for a plugin pub arch: String, @@ -233,7 +243,7 @@ impl ParseKdlNode for DownloadManifestEntry { return None; } // Per RFD #0004, version is of type String - let version = node.get("version")?.value().as_string()?.to_string(); + let version = PluginVersion(node.get("version")?.value().as_string()?.to_string()); // Per RFD #0004, arch is of type String let arch = node.get("arch")?.value().as_string()?.to_string(); @@ -258,9 +268,59 @@ impl ParseKdlNode for DownloadManifestEntry { } } +impl DownloadManifestEntry { + /// Download the specified plugin, verifies its size and hash and extracts it into the appropriate folder + pub fn download_and_unpack_plugin( + &self, + plugin_cache: &HcPluginCache, + publisher: &PluginPublisher, + name: &PluginName, + version: &PluginVersion, + arch: &PluginArch, + ) -> Result<(), Error> { + // currently plugins are put in HC_CACHE/plugins//// + let download_dir = plugin_cache.plugin_download_dir(publisher, name, version, arch); + + // currently, if the directory exists, then we assume that we downloaded the plugin successfully + if download_dir.exists() { + return Ok(()); + } + + let output_path = download_plugin( + &self.url, + download_dir.as_path(), + self.size.bytes, + &self.hash, + ) + .map_err(|e| { + // delete any leftover remnants + let _ = remove(download_dir.as_path()); + hc_error!("Error [{}] downloading '{}'", e, &self.url) + })?; + + extract_plugin( + output_path.as_path(), + download_dir.as_path(), + self.compress.format, + ) + .map_err(|e| { + // delete any leftover remnants + let _ = remove(download_dir.as_path()); + hc_error!( + "Error [{}] extracting plugin '{}/{}' version {} for {}", + e, + publisher.0, + name.0, + version.0, + arch.0 + ) + }) + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct DownloadManifest { - entries: Vec, + pub entries: Vec, } impl DownloadManifest { @@ -271,6 +331,19 @@ impl DownloadManifest { pub fn len(&self) -> usize { self.entries.len() } + + pub fn download_and_unpack_all_plugins( + &self, + plugin_cache: &HcPluginCache, + publisher: &PluginPublisher, + name: &PluginName, + version: &PluginVersion, + arch: &PluginArch, + ) -> Result<(), Error> { + self.entries.iter().try_for_each(|entry| { + entry.download_and_unpack_plugin(plugin_cache, publisher, name, version, arch) + }) + } } impl FromStr for DownloadManifest { @@ -404,7 +477,7 @@ mod test { .unwrap(); let expected_entry = DownloadManifestEntry { - version: version.to_string(), + version: PluginVersion(version.to_string()), arch: arch.to_string(), url: Url::parse(url).unwrap(), hash: HashWithDigest::new( @@ -445,7 +518,7 @@ plugin version="0.1.0" arch="x86_64-apple-darwin" { let mut entries_iter = entries.iter(); assert_eq!( &DownloadManifestEntry { - version: "0.1.0".to_owned(), + version: PluginVersion("0.1.0".to_owned()), arch: "aarch64-apple-darwin".to_owned(), url: Url::parse("https://github.com/mitre/hipcheck/releases/download/hipcheck-v3.4.0/hipcheck-aarch64-apple-darwin.tar.xz").unwrap(), hash: HashWithDigest::new(HashAlgorithm::Sha256, "b8e111e7817c4a1eb40ed50712d04e15b369546c4748be1aa8893b553f4e756b".to_owned()), @@ -458,7 +531,7 @@ plugin version="0.1.0" arch="x86_64-apple-darwin" { ); assert_eq!( &DownloadManifestEntry { - version: "0.1.0".to_owned(), + version: PluginVersion("0.1.0".to_owned()), arch: "x86_64-apple-darwin".to_owned(), url: Url::parse("https://github.com/mitre/hipcheck/releases/download/hipcheck-v3.4.0/hipcheck-x86_64-apple-darwin.tar.xz").unwrap(), hash: HashWithDigest::new(HashAlgorithm::Sha256, "ddb8c6d26dd9a91e11c99b3bd7ee2b9585aedac6e6df614190f1ba2bfe86dc19".to_owned()), diff --git a/hipcheck/src/plugin/parser/mod.rs b/hipcheck/src/plugin/kdl_parsing.rs similarity index 93% rename from hipcheck/src/plugin/parser/mod.rs rename to hipcheck/src/plugin/kdl_parsing.rs index dc2e85ad..7c7efd72 100644 --- a/hipcheck/src/plugin/parser/mod.rs +++ b/hipcheck/src/plugin/kdl_parsing.rs @@ -1,10 +1,7 @@ -mod download_manifest; -mod plugin_manifest; - use kdl::KdlNode; // Helper trait to make it easier to parse KdlNodes into our own types -trait ParseKdlNode +pub trait ParseKdlNode where Self: Sized, { @@ -16,7 +13,7 @@ where } /// Returns the first successful node that can be parsed into T, if there is one -fn extract_data(nodes: &[KdlNode]) -> Option +pub fn extract_data(nodes: &[KdlNode]) -> Option where T: ParseKdlNode, { diff --git a/hipcheck/src/plugin/mod.rs b/hipcheck/src/plugin/mod.rs index ca7490e0..642f5231 100644 --- a/hipcheck/src/plugin/mod.rs +++ b/hipcheck/src/plugin/mod.rs @@ -1,9 +1,18 @@ +mod download_manifest; +mod kdl_parsing; mod manager; -mod parser; +mod plugin_manifest; +mod retrieval; mod types; pub use crate::plugin::manager::*; pub use crate::plugin::types::*; +pub use download_manifest::{ + ArchiveFormat, DownloadManifest, DownloadManifestEntry, HashAlgorithm, HashWithDigest, +}; +pub use kdl_parsing::{extract_data, ParseKdlNode}; +pub use plugin_manifest::{PluginArch, PluginManifest, PluginName, PluginPublisher, PluginVersion}; + use crate::Result; use futures::future::join_all; use serde_json::Value; diff --git a/hipcheck/src/plugin/parser/plugin_manifest.rs b/hipcheck/src/plugin/plugin_manifest.rs similarity index 75% rename from hipcheck/src/plugin/parser/plugin_manifest.rs rename to hipcheck/src/plugin/plugin_manifest.rs index 5fcc523c..9fd9e734 100644 --- a/hipcheck/src/plugin/parser/plugin_manifest.rs +++ b/hipcheck/src/plugin/plugin_manifest.rs @@ -1,4 +1,4 @@ -use crate::plugin::parser::ParseKdlNode; +use crate::plugin::ParseKdlNode; use crate::string_newtype_parse_kdl_node; use crate::{error::Error, hc_error}; use core::panic; @@ -12,38 +12,41 @@ use super::extract_data; // NOTE: the implementation in this crate was largely derived from RFD #0004 #[derive(Clone, Debug, PartialEq, Eq)] -pub struct Publisher(pub String); -string_newtype_parse_kdl_node!(Publisher, "publisher"); +pub struct PluginPublisher(pub String); +string_newtype_parse_kdl_node!(PluginPublisher, "publisher"); #[derive(Clone, Debug, PartialEq, Eq)] -pub struct Name(pub String); -string_newtype_parse_kdl_node!(Name, "name"); +pub struct PluginName(pub String); +string_newtype_parse_kdl_node!(PluginName, "name"); #[derive(Clone, Debug, PartialEq, Eq)] -pub struct Version(pub String); -string_newtype_parse_kdl_node!(Version, "version"); +pub struct PluginVersion(pub String); +string_newtype_parse_kdl_node!(PluginVersion, "version"); #[derive(Clone, Debug, PartialEq, Eq)] pub struct License(pub String); string_newtype_parse_kdl_node!(License, "license"); -#[derive(Clone, Debug, PartialEq, Eq)] // TODO: target-triple enum -pub struct Entrypoints(pub HashMap); +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct PluginArch(pub String); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Entrypoints(pub HashMap); impl Entrypoints { pub fn new() -> Self { Self(HashMap::new()) } - pub fn insert(&mut self, arch: String, entrypoint: String) -> Result<(), Error> { + pub fn insert(&mut self, arch: PluginArch, entrypoint: String) -> Result<(), Error> { match self.0.insert(arch.clone(), entrypoint) { - Some(_duplicate_key) => Err(hc_error!("Multiple entrypoints specified for {}", arch)), + Some(_duplicate_key) => Err(hc_error!("Multiple entrypoints specified for {}", arch.0)), None => Ok(()), } } - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator { self.0.iter() } } @@ -60,11 +63,13 @@ impl ParseKdlNode for Entrypoints { let mut entrypoints = Entrypoints::new(); for entrypoint_spec in node.children()?.nodes() { // per RFD #0004, the value for "arch" is of type String - let arch = entrypoint_spec - .get("arch")? - .value() - .as_string()? - .to_string(); + let arch = PluginArch( + entrypoint_spec + .get("arch")? + .value() + .as_string()? + .to_string(), + ); // per RFD #0004, the actual entrypoint is the first positional arg after "arch" and is // of type String let entrypoint = entrypoint_spec @@ -74,7 +79,7 @@ impl ParseKdlNode for Entrypoints { .as_string()? .to_string(); if let Err(e) = entrypoints.insert(arch.clone(), entrypoint) { - log::error!("Duplicate entrypoint detected for [{arch}]"); + log::error!("Duplicate entrypoint detected for [{}]", arch.0); return None; } } @@ -84,15 +89,22 @@ impl ParseKdlNode for Entrypoints { #[derive(Clone, Debug, PartialEq, Eq)] pub struct PluginDependency { - pub name: String, - pub version: String, + pub publisher: PluginPublisher, + pub name: PluginName, + pub version: PluginVersion, // NOTE: until Hipcheck supports a registry, this is effectively required pub manifest: Option, } impl PluginDependency { - pub fn new(name: String, version: String, manifest: Option) -> Self { + pub fn new( + publisher: PluginPublisher, + name: PluginName, + version: PluginVersion, + manifest: Option, + ) -> Self { Self { + publisher, name, version, manifest, @@ -110,9 +122,17 @@ impl ParseKdlNode for PluginDependency { return None; } - // per RFD #0004, the name is the first positional entry and has type String - let name = node.entries().first()?.value().as_string()?.to_string(); - let version = node.get("version")?.value().as_string()?.to_string(); + // per RFD #0004, the name is the first positional entry and has type String and is of the format `/` + let publisher_and_name = node.entries().first()?.value().as_string()?; + let (publisher, name) = match publisher_and_name.split_once('/') { + Some((publisher, name)) => ( + PluginPublisher(publisher.to_string()), + PluginName(name.to_string()), + ), + None => return None, + }; + + let version = PluginVersion(node.get("version")?.value().as_string()?.to_string()); let manifest = match node.get("manifest") { Some(manifest) => { let raw_url = manifest.value().as_string()?; @@ -123,6 +143,7 @@ impl ParseKdlNode for PluginDependency { Some(Self { name, + publisher, version, manifest, }) @@ -174,9 +195,9 @@ impl ParseKdlNode for PluginDependencyList { #[derive(Clone, Debug, PartialEq, Eq)] pub struct PluginManifest { - pub publisher: Publisher, - pub name: Name, - pub version: Version, + pub publisher: PluginPublisher, + pub name: PluginName, + pub version: PluginVersion, pub license: License, pub entrypoints: Entrypoints, pub dependencies: PluginDependencyList, @@ -190,10 +211,11 @@ impl FromStr for PluginManifest { .map_err(|e| hc_error!("Error parsing plugin manifest file: {e}"))?; let nodes = document.nodes(); - let publisher: Publisher = + let publisher: PluginPublisher = extract_data(nodes).ok_or_else(|| hc_error!("Could not parse 'publisher'"))?; - let name: Name = extract_data(nodes).ok_or_else(|| hc_error!("Could not parse 'name'"))?; - let version: Version = + let name: PluginName = + extract_data(nodes).ok_or_else(|| hc_error!("Could not parse 'name'"))?; + let version: PluginVersion = extract_data(nodes).ok_or_else(|| hc_error!("Could not parse 'version'"))?; let license: License = extract_data(nodes).ok_or_else(|| hc_error!("Could not parse 'license'"))?; @@ -225,8 +247,8 @@ mod test { let data = r#"publisher "mitre""#; let node = KdlNode::from_str(data).unwrap(); assert_eq!( - Publisher::new("mitre".to_owned()), - Publisher::parse_node(&node).unwrap() + PluginPublisher::new("mitre".to_owned()), + PluginPublisher::parse_node(&node).unwrap() ) } @@ -235,8 +257,8 @@ mod test { let data = r#"version "0.1.0""#; let node = KdlNode::from_str(data).unwrap(); assert_eq!( - Version::new("0.1.0".to_owned()), - Version::parse_node(&node).unwrap() + PluginVersion::new("0.1.0".to_owned()), + PluginVersion::parse_node(&node).unwrap() ) } @@ -245,8 +267,8 @@ mod test { let data = r#"name "affiliation""#; let node = KdlNode::from_str(data).unwrap(); assert_eq!( - Name::new("affiliation".to_owned()), - Name::parse_node(&node).unwrap() + PluginName::new("affiliation".to_owned()), + PluginName::parse_node(&node).unwrap() ); } @@ -269,7 +291,7 @@ mod test { let mut expected = Entrypoints::new(); expected .insert( - "aarch64-apple-darwin".to_owned(), + PluginArch("aarch64-apple-darwin".to_owned()), "./hc-mitre-affiliation".to_owned(), ) .unwrap(); @@ -298,19 +320,19 @@ mod test { let node = KdlNode::from_str(multiple_entrypoint).unwrap(); let mut expected = Entrypoints::new(); expected.insert( - "aarch64-apple-darwin".to_owned(), + PluginArch("aarch64-apple-darwin".to_owned()), "./hc-mitre-affiliation".to_owned(), ); expected.insert( - "x86_64-apple-darwin".to_owned(), + PluginArch("x86_64-apple-darwin".to_owned()), "./hc-mitre-affiliation".to_owned(), ); expected.insert( - "x86_64-unknown-linux-gnu".to_owned(), + PluginArch("x86_64-unknown-linux-gnu".to_owned()), "./hc-mitre-affiliation".to_owned(), ); expected.insert( - "x86_64-pc-windows-msvc".to_owned(), + PluginArch("x86_64-pc-windows-msvc".to_owned()), "./hc-mitre-affiliation".to_owned(), ); assert_eq!(Entrypoints::parse_node(&node).unwrap(), expected) @@ -323,8 +345,9 @@ mod test { assert_eq!( PluginDependency::parse_node(&node).unwrap(), PluginDependency::new( - "mitre/git".to_owned(), - "0.1.0".to_owned(), + PluginPublisher("mitre".to_string()), + PluginName("git".to_string()), + PluginVersion("0.1.0".to_string()), Some( Url::parse( "https://github.com/mitre/hipcheck/blob/main/plugin/dist/mitre-git.kdl" @@ -344,8 +367,9 @@ mod test { let node = KdlNode::from_str(dependencies).unwrap(); let mut expected = PluginDependencyList::new(); expected.push(PluginDependency::new( - "mitre/git".to_owned(), - "0.1.0".to_owned(), + PluginPublisher("mitre".to_string()), + PluginName("git".to_string()), + PluginVersion("0.1.0".to_string()), Some( url::Url::parse( "https://github.com/mitre/hipcheck/blob/main/plugin/dist/mitre-git.kdl", @@ -355,8 +379,9 @@ mod test { .to_owned(), )); expected.push(PluginDependency::new( - "mitre/plugin2".to_owned(), - "0.4.0".to_owned(), + PluginPublisher("mitre".to_string()), + PluginName("plugin2".to_string()), + PluginVersion("0.4.0".to_string()), Some( url::Url::parse( "https://github.com/mitre/hipcheck/blob/main/plugin/dist/mitre-plugin2.kdl", @@ -387,26 +412,27 @@ dependencies { let mut entrypoints = Entrypoints::new(); entrypoints.insert( - "aarch64-apple-darwin".to_owned(), + PluginArch("aarch64-apple-darwin".to_owned()), "./hc-mitre-affiliation".to_owned(), ); entrypoints.insert( - "x86_64-apple-darwin".to_owned(), + PluginArch("x86_64-apple-darwin".to_owned()), "./hc-mitre-affiliation".to_owned(), ); entrypoints.insert( - "x86_64-unknown-linux-gnu".to_owned(), + PluginArch("x86_64-unknown-linux-gnu".to_owned()), "./hc-mitre-affiliation".to_owned(), ); entrypoints.insert( - "x86_64-pc-windows-msvc".to_owned(), + PluginArch("x86_64-pc-windows-msvc".to_owned()), "./hc-mitre-affiliation".to_owned(), ); let mut dependencies = PluginDependencyList::new(); dependencies.push(PluginDependency::new( - "mitre/git".to_owned(), - "0.1.0".to_owned(), + PluginPublisher("mitre".to_string()), + PluginName("git".to_string()), + PluginVersion("0.1.0".to_string()), Some( url::Url::parse( "https://github.com/mitre/hipcheck/blob/main/plugin/dist/mitre-git.kdl", @@ -416,9 +442,9 @@ dependencies { )); let expected_manifest = PluginManifest { - publisher: Publisher::new("mitre".to_owned()), - name: Name::new("affiliation".to_owned()), - version: Version::new("0.1.0".to_owned()), + publisher: PluginPublisher::new("mitre".to_owned()), + name: PluginName::new("affiliation".to_owned()), + version: PluginVersion::new("0.1.0".to_owned()), license: License::new("Apache-2.0".to_owned()), entrypoints, dependencies, diff --git a/hipcheck/src/plugin/retrieval.rs b/hipcheck/src/plugin/retrieval.rs new file mode 100644 index 00000000..5d6c99cb --- /dev/null +++ b/hipcheck/src/plugin/retrieval.rs @@ -0,0 +1,143 @@ +use std::{ + fs::File, + io::{Read, Seek, Write}, + path::{Path, PathBuf}, +}; + +use flate2::read::GzDecoder; +use tar::Archive; +use url::Url; +use xz2::read::XzDecoder; +use zip_extensions::{zip_extract, ZipArchiveExtensions}; + +use crate::error::Error; +use crate::hc_error; +use crate::plugin::{ArchiveFormat, HashAlgorithm, HashWithDigest}; +use crate::util::http::agent::agent; + +/// download a plugin, verify its size and hash +pub fn download_plugin( + url: &Url, + download_dir: &Path, + expected_size: u64, + expected_hash_with_digest: &HashWithDigest, +) -> Result { + // retrieve archive + let agent = agent(); + let response = agent + .get(url.as_str()) + .call() + .map_err(|e| hc_error!("Error [{}] retrieving download manifest {}", e, url))?; + let error_code = response.status(); + if error_code != 200 { + return Err(hc_error!( + "HTTP error code {} when retrieving {}", + error_code, + url + )); + } + + // extract bytes from response + // preallocate 10 MB to cut down on number of allocations needed + let mut contents = Vec::with_capacity(10 * 1024 * 1024); + let amount_read = response + .into_reader() + .read_to_end(&mut contents) + .map_err(|e| hc_error!("Error [{}] reading download into buffer", e))?; + contents.truncate(amount_read); + + // verify size of download + if expected_size != amount_read as u64 { + return Err(hc_error!( + "File size mismatch, Expected {} B, Found {} B", + expected_size, + amount_read + )); + } + + // verify hash + let actual_hash = match expected_hash_with_digest.hash_algorithm { + HashAlgorithm::Sha256 => sha256::digest(&contents), + HashAlgorithm::Blake3 => blake3::hash(&contents).to_string(), + }; + if actual_hash != expected_hash_with_digest.digest { + return Err(hc_error!( + "Plugin hash mismatch. Expected [{}], Received [{}]", + actual_hash, + expected_hash_with_digest.digest + )); + } + + let filename = url.path_segments().unwrap().last().unwrap(); + let output_path = Path::new(download_dir).join(filename); + let mut file = File::create(&output_path).map_err(|e| { + hc_error!( + "Error [{}] creating file: {}", + e, + output_path.to_string_lossy() + ) + })?; + file.write_all(&contents).map_err(|e| { + hc_error!( + "Error [{}] writing to file: {}", + e, + output_path.to_string_lossy() + ) + })?; + + Ok(output_path) +} + +/// Extract a bundle located at `bundle_path` into `extract_dir` by applying the specified `ArchiveFormat` extractions +pub fn extract_plugin( + bundle_path: &Path, + extract_dir: &Path, + archive_format: ArchiveFormat, +) -> Result<(), Error> { + let file = File::open(bundle_path).map_err(|e| { + hc_error!( + "Error [{}] opening file {}", + e, + bundle_path.to_string_lossy() + ) + })?; + + // perform decompression, if necessary, then unarchive + match archive_format { + ArchiveFormat::TarXz => { + let decoder = XzDecoder::new(file); + let mut archive = Archive::new(decoder); + archive.unpack(extract_dir).map_err(|e| { + hc_error!("Error [{}] extracting {}", e, bundle_path.to_string_lossy()) + })?; + } + ArchiveFormat::TarGz => { + let decoder = GzDecoder::new(file); + let mut archive = Archive::new(decoder); + archive.unpack(extract_dir).map_err(|e| { + hc_error!("Error [{}] extracting {}", e, bundle_path.to_string_lossy()) + })?; + } + ArchiveFormat::TarZst => { + let decoder = zstd::Decoder::new(file).unwrap(); + let mut archive = Archive::new(decoder); + archive.unpack(extract_dir).map_err(|e| { + hc_error!("Error [{}] extracting {}", e, bundle_path.to_string_lossy()) + })?; + } + ArchiveFormat::Tar => { + let mut archive = Archive::new(file); + archive.unpack(extract_dir).map_err(|e| { + hc_error!("Error [{}] extracting {}", e, bundle_path.to_string_lossy()) + })?; + } + ArchiveFormat::Zip => { + let mut archive = zip::ZipArchive::new(file).unwrap(); + archive.extract(extract_dir).map_err(|e| { + hc_error!("Error [{}] extracting {}", e, bundle_path.to_string_lossy()) + })?; + } + }; + + Ok(()) +}