From 7633fb7d6381adb2f47ac8552b861dfeb1cd2c64 Mon Sep 17 00:00:00 2001 From: Jonathan Lebon Date: Tue, 3 Dec 2024 12:02:16 -0500 Subject: [PATCH 1/2] Stop compressing applehv and hyperv by default Apple Hypervisor doesn't inherently require images to be compressed with gzip. It's just that when we _do_ compress it, it's the most convenient format to use because gzip is guaranteed to be available on macOS. Similarly for Windows Hyper-V and ZIP. Notably, this is different from e.g. GCP, where the platform itself dictates a `tar.gz` file. And so for consistency we should have the output from the build step for `applehv` and `hyperv` just return the disk image in the format it's intended to be used in, and then `cosa compress` just compresses them using e.g. `gzip` or `zip`. This requires adding a new `platform-compressor` key in the `image.yaml` file to allow overridding the default `compressor` setting for certain platforms. This allows folks using the same code to build the disk images for those platforms and compress them with the compressor of their choice. --- src/cmd-compress | 28 ++++++++++++++++++---------- src/cosalib/qemuvariants.py | 8 ++------ 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/src/cmd-compress b/src/cmd-compress index 105309af40..ea11867ff9 100755 --- a/src/cmd-compress +++ b/src/cmd-compress @@ -53,7 +53,7 @@ else: print(f"Targeting build: {build}") # common extensions for known compressors -ext_dict = {'xz': '.xz', 'gzip': '.gz', 'zstd': '.zst'} +ext_dict = {'xz': '.xz', 'gzip': '.gz', 'zstd': '.zst', 'zip': '.zip'} def get_cpu_param(param): @@ -65,7 +65,7 @@ def strip_ext(path): return path.rsplit(".", 1)[0] -def compress_one_builddir(builddir): +def compress_one_builddir(builddir, platform_compressors): print(f"Compressing: {builddir}") buildmeta_path = os.path.join(builddir, 'meta.json') # In the case where we are doing builds for different architectures @@ -77,9 +77,6 @@ def compress_one_builddir(builddir): with open(buildmeta_path) as f: buildmeta = json.load(f) - # Find what extension to use based on the selected compressor - ext = ext_dict[args.compressor] - tmpdir = 'tmp/compress' if os.path.isdir(tmpdir): shutil.rmtree(tmpdir) @@ -106,6 +103,10 @@ def compress_one_builddir(builddir): if only_artifacts is not None and img_format not in only_artifacts: continue + compressor = platform_compressors.get(img_format) or args.compressor + # Find what extension to use based on the selected compressor + ext = ext_dict[compressor] + file = img['path'] filepath = os.path.join(builddir, file) if img.get('uncompressed-sha256') is None: @@ -116,12 +117,16 @@ def compress_one_builddir(builddir): img['uncompressed-size'] = img['size'] with open(tmpfile, 'wb') as f: t = ncpu() - if args.compressor == 'xz': + if compressor == 'xz': runcmd(['xz', '-c9', f'-T{t}', filepath], stdout=f) - elif args.compressor == 'zstd': + elif compressor == 'zstd': runcmd(['zstd', '-10', '-c', f'-T{t}', filepath], stdout=f) - else: + elif compressor == 'gzip': runcmd(['gzip', f'-{gzip_level}', '-c', filepath], stdout=f) # pylint: disable=E0606 + elif compressor == 'zip': + runcmd(['zip', '-9j', '-', filepath], stdout=f) # pylint: disable=E0606 + else: + raise Exception(f"Unknown compressor: {compressor}") file_with_ext = file + ext filepath_with_ext = filepath + ext compressed_size = os.path.getsize(tmpfile) @@ -202,6 +207,8 @@ def uncompress_one_builddir(builddir): runcmd(['zstd', '-dc', filepath], stdout=f) elif file.endswith('gz'): runcmd(['gzip', '-dc', filepath], stdout=f) + elif file.endswith('zip'): + runcmd(['unzip', '-p', filepath], stdout=f) else: print(f"Unknown sufix of file {file}") file_without_ext = strip_ext(file) @@ -266,16 +273,17 @@ changed = [] if args.mode == "compress": # Find what compressor we should use, either picking it up from # CLI args or from image.json + image_json = get_image_json() gzip_level = 6 if args.fast: args.compressor = 'gzip' gzip_level = 1 elif not args.compressor: - image_json = get_image_json() args.compressor = image_json.get('compressor', DEFAULT_COMPRESSOR) for arch in builds.get_build_arches(build): builddir = builds.get_build_dir(build, arch) - changed.append(compress_one_builddir(builddir)) + changed.append(compress_one_builddir(builddir, + image_json.get('platform-compressor', {}))) if not any(changed): print("All builds already compressed") elif args.mode == "uncompress": diff --git a/src/cosalib/qemuvariants.py b/src/cosalib/qemuvariants.py index 99ae09737c..45fa3c82b6 100644 --- a/src/cosalib/qemuvariants.py +++ b/src/cosalib/qemuvariants.py @@ -54,9 +54,7 @@ }, "applehv": { "image_format": "raw", - "image_suffix": "raw.gz", - "platform": "applehv", - "compression": "gzip" + "platform": "applehv" }, "azure": { "image_format": "vpc", @@ -99,9 +97,7 @@ }, "hyperv": { "image_format": "vhdx", - "image_suffix": "vhdx.zip", - "platform": "hyperv", - "compression": "zip" + "platform": "hyperv" }, "kubevirt": { "image_format": "qcow2", From 158d8d7fcdf34665411a53fe0747a749446e78ce Mon Sep 17 00:00:00 2001 From: Dusty Mabe Date: Tue, 3 Dec 2024 15:11:46 -0500 Subject: [PATCH 2/2] osbuild: output applehv and hyperv in uncompressed formats They will now be compressed in the `cosa compress` CoreOS pipeline stage. --- src/cmd-osbuild | 6 +++--- src/osbuild-manifests/platform.applehv.ipp.yaml | 16 ++++++++-------- src/osbuild-manifests/platform.hyperv.ipp.yaml | 17 +---------------- 3 files changed, 12 insertions(+), 27 deletions(-) diff --git a/src/cmd-osbuild b/src/cmd-osbuild index bc7cf88a98..12156fc040 100755 --- a/src/cmd-osbuild +++ b/src/cmd-osbuild @@ -8,9 +8,9 @@ dn=$(dirname "$0") # A list of supported platforms and the filename suffix of the main # artifact that platform produces. declare -A SUPPORTED_PLATFORMS=( - ['applehv']='raw.gz' + ['applehv']='raw' ['gcp']='tar.gz' - ['hyperv']='vhdx.zip' + ['hyperv']='vhdx' ['metal4k']='raw' ['metal']='raw' ['qemu']='qcow2' @@ -384,7 +384,7 @@ main() { # Perform postprocessing case "$platform" in - applehv|gcp|hyperv) + gcp) # Update the meta.json and builddir with the generated artifact. # Skip Compression on these platforms as they are already compressed. postprocess_artifact "${platform}" "${imgpath}" "${imgname}" 'True' diff --git a/src/osbuild-manifests/platform.applehv.ipp.yaml b/src/osbuild-manifests/platform.applehv.ipp.yaml index a4a9ec2d65..ca926cd8db 100644 --- a/src/osbuild-manifests/platform.applehv.ipp.yaml +++ b/src/osbuild-manifests/platform.applehv.ipp.yaml @@ -55,15 +55,15 @@ pipelines: build: mpp-format-string: '{buildroot}' stages: - - type: org.osbuild.gzip + - type: org.osbuild.copy inputs: - file: - type: org.osbuild.files + tree: + type: org.osbuild.tree origin: org.osbuild.pipeline references: - name:raw-applehv-image: - file: disk.img + - name:raw-applehv-image options: - level: 9 - filename: - mpp-format-string: '{artifact_name_prefix}-applehv.{arch}.raw.gz' + paths: + - from: input://tree/disk.img + to: + mpp-format-string: 'tree:///{artifact_name_prefix}-applehv.{arch}.raw' diff --git a/src/osbuild-manifests/platform.hyperv.ipp.yaml b/src/osbuild-manifests/platform.hyperv.ipp.yaml index 799386178f..3dc746ece6 100644 --- a/src/osbuild-manifests/platform.hyperv.ipp.yaml +++ b/src/osbuild-manifests/platform.hyperv.ipp.yaml @@ -51,7 +51,7 @@ pipelines: partition: mpp-format-int: '{image.layout[''boot''].partnum}' target: /boot - - name: raw-hyperv-image-vhdx + - name: hyperv build: mpp-format-string: '{host_as_buildroot}' stages: @@ -68,18 +68,3 @@ pipelines: mpp-format-string: '{artifact_name_prefix}-hyperv.{arch}.vhdx' format: type: vhdx - - name: hyperv - build: - mpp-format-string: '{host_as_buildroot}' - stages: - - type: org.osbuild.zip - inputs: - tree: - type: org.osbuild.tree - origin: org.osbuild.pipeline - references: - - name:raw-hyperv-image-vhdx - options: - level: 9 - filename: - mpp-format-string: '{artifact_name_prefix}-hyperv.{arch}.vhdx.zip'