Skip to content

Commit

Permalink
Merge branch 'main' into rust-dep-info
Browse files Browse the repository at this point in the history
Signed-off-by: C0D3 M4513R <[email protected]>
  • Loading branch information
C0D3-M4513R authored Sep 12, 2024
2 parents b889150 + 1b86326 commit 8e0f693
Show file tree
Hide file tree
Showing 256 changed files with 14,079 additions and 2,997 deletions.
28 changes: 22 additions & 6 deletions .binny.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,31 +18,31 @@ tools:
# used to sign mac binaries at release
- name: quill
version:
want: v0.4.1
want: v0.4.2
method: github-release
with:
repo: anchore/quill

# used for linting
- name: golangci-lint
version:
want: v1.59.1
want: v1.60.3
method: github-release
with:
repo: golangci/golangci-lint

# used for showing the changelog at release
- name: glow
version:
want: v1.5.1
want: v2.0.0
method: github-release
with:
repo: charmbracelet/glow

# used for signing the checksums file at release
- name: cosign
version:
want: v2.2.4
want: v2.4.0
method: github-release
with:
repo: sigstore/cosign
Expand All @@ -58,7 +58,7 @@ tools:
# used to release all artifacts
- name: goreleaser
version:
want: v2.0.1
want: v2.2.0
method: github-release
with:
repo: goreleaser/goreleaser
Expand Down Expand Up @@ -111,7 +111,23 @@ tools:
# used for triggering a release
- name: gh
version:
want: v2.52.0
want: v2.55.0
method: github-release
with:
repo: cli/cli

# used to upload test fixture cache
- name: oras
version:
want: v1.2.0
method: github-release
with:
repo: oras-project/oras

# used to upload test fixture cache
- name: yq
version:
want: v4.44.3
method: github-release
with:
repo: mikefarah/yq
1 change: 1 addition & 0 deletions .bouncer.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ permit:
- MPL.*
- ISC
- WTFPL
- Unlicense

ignore-packages:
# packageurl-go is released under the MIT license located in the root of the repo at /mit.LICENSE
Expand Down
6 changes: 3 additions & 3 deletions .github/ISSUE_TEMPLATE/config.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
contact_links:

- name: Join the Slack community 💬
# link to our community Slack registration page
url: https://anchore.com/slack
- name: Join our Discourse community 💬
# link to our community Discourse site
url: https://anchore.com/discourse
about: 'Come chat with us! Ask for help, join our software development efforts, or just give us feedback!'
23 changes: 14 additions & 9 deletions .github/actions/bootstrap/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,15 @@ inputs:
cache-key-prefix:
description: "Prefix all cache keys with this value"
required: true
default: "1ac8281053"
compute-fingerprints:
description: "Compute test fixture fingerprints"
default: "181053ac82"
download-test-fixture-cache:
description: "Download test fixture cache from OCI and github actions"
required: true
default: "true"
default: "false"
bootstrap-apt-packages:
description: "Space delimited list of tools to install via apt"
default: "libxml2-utils"


runs:
using: "composite"
steps:
Expand Down Expand Up @@ -54,8 +53,14 @@ runs:
run: |
DEBIAN_FRONTEND=noninteractive sudo apt update && sudo -E apt install -y ${{ inputs.bootstrap-apt-packages }}
- name: Create all cache fingerprints
if: inputs.compute-fingerprints == 'true'
shell: bash
run: make fingerprints
- name: Restore ORAS cache from github actions
if: inputs.download-test-fixture-cache == 'true'
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
with:
path: ${{ github.workspace }}/.tmp/oras-cache
key: ${{ inputs.cache-key-prefix }}-oras-cache

- name: Download test fixture cache
if: inputs.download-test-fixture-cache == 'true'
shell: bash
run: make download-test-fixture-cache
135 changes: 135 additions & 0 deletions .github/scripts/find_cache_paths.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
#!/usr/bin/env python3
from __future__ import annotations

import os
import glob
import sys
import json
import hashlib


IGNORED_PREFIXES = []


def find_fingerprints_and_check_dirs(base_dir):
all_fingerprints = set(glob.glob(os.path.join(base_dir, '**', 'test*', '**', '*.fingerprint'), recursive=True))

all_fingerprints = {os.path.relpath(fp) for fp in all_fingerprints
if not any(fp.startswith(prefix) for prefix in IGNORED_PREFIXES)}

if not all_fingerprints:
show("No .fingerprint files or cache directories found.")
exit(1)

missing_content = []
valid_paths = set()
fingerprint_contents = []

for fingerprint in all_fingerprints:
path = fingerprint.replace('.fingerprint', '')

if not os.path.exists(path):
missing_content.append(path)
continue

if not os.path.isdir(path):
valid_paths.add(path)
continue

if os.listdir(path):
valid_paths.add(path)
else:
missing_content.append(path)

with open(fingerprint, 'r') as f:
content = f.read().strip()
fingerprint_contents.append((fingerprint, content))

return sorted(valid_paths), missing_content, fingerprint_contents


def parse_fingerprint_contents(fingerprint_content):
input_map = {}
for line in fingerprint_content.splitlines():
digest, path = line.split()
input_map[path] = digest
return input_map


def calculate_sha256(fingerprint_contents):
sorted_fingerprint_contents = sorted(fingerprint_contents, key=lambda x: x[0])

concatenated_contents = ''.join(content for _, content in sorted_fingerprint_contents)

sha256_hash = hashlib.sha256(concatenated_contents.encode()).hexdigest()

return sha256_hash


def calculate_file_sha256(file_path):
sha256_hash = hashlib.sha256()
with open(file_path, 'rb') as f:
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
return sha256_hash.hexdigest()


def show(*s: str):
print(*s, file=sys.stderr)


def main(file_path: str | None):
base_dir = '.'
valid_paths, missing_content, fingerprint_contents = find_fingerprints_and_check_dirs(base_dir)

if missing_content:
show("The following paths are missing or have no content, but have corresponding .fingerprint files:")
for path in sorted(missing_content):
show(f"- {path}")
show("Please ensure these paths exist and have content if they are directories.")
exit(1)

sha256_hash = calculate_sha256(fingerprint_contents)

paths_with_digests = []
for path in sorted(valid_paths):
fingerprint_file = f"{path}.fingerprint"
try:
if os.path.exists(fingerprint_file):
file_digest = calculate_file_sha256(fingerprint_file)

# Parse the fingerprint file to get the digest/path tuples
with open(fingerprint_file, 'r') as f:
fingerprint_content = f.read().strip()
input_map = parse_fingerprint_contents(fingerprint_content)

paths_with_digests.append({
"path": path,
"digest": file_digest,
"input": input_map
})

except Exception as e:
show(f"Error processing {fingerprint_file}: {e}")
raise e


output = {
"digest": sha256_hash,
"paths": paths_with_digests
}

content = json.dumps(output, indent=2, sort_keys=True)

if file_path:
with open(file_path, 'w') as f:
f.write(content)

print(content)


if __name__ == "__main__":
file_path = None
if len(sys.argv) > 1:
file_path = sys.argv[1]
main(file_path)
70 changes: 70 additions & 0 deletions .github/scripts/fingerprint_docker_fixtures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#!/usr/bin/env python3

import os
import subprocess
import hashlib

BOLD = '\033[1m'
YELLOW = '\033[0;33m'
RESET = '\033[0m'


def print_message(message):
print(f"{YELLOW}{message}{RESET}")


def sha256sum(filepath):
h = hashlib.sha256()
with open(filepath, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
h.update(chunk)
return h.hexdigest()


def is_git_tracked_or_untracked(directory):
"""Returns a sorted list of files in the directory that are tracked or not ignored by Git."""
result = subprocess.run(
["git", "ls-files", "--cached", "--others", "--exclude-standard"],
cwd=directory,
stdout=subprocess.PIPE,
text=True
)
return sorted(result.stdout.strip().splitlines())


def find_test_fixture_dirs_with_images(base_dir):
"""Find directories that contain 'test-fixtures' and at least one 'image-*' directory."""
for root, dirs, files in os.walk(base_dir):
if 'test-fixtures' in root:
image_dirs = [d for d in dirs if d.startswith('image-')]
if image_dirs:
yield os.path.realpath(root)


def generate_fingerprints():
print_message("creating fingerprint files for docker fixtures...")

for test_fixture_dir in find_test_fixture_dirs_with_images('.'):
cache_fingerprint_path = os.path.join(test_fixture_dir, 'cache.fingerprint')

with open(cache_fingerprint_path, 'w') as fingerprint_file:
for image_dir in find_image_dirs(test_fixture_dir):
for file in is_git_tracked_or_untracked(image_dir):
file_path = os.path.join(image_dir, file)
checksum = sha256sum(file_path)
path_from_fixture_dir = os.path.relpath(file_path, test_fixture_dir)
fingerprint_file.write(f"{checksum} {path_from_fixture_dir}\n")


def find_image_dirs(test_fixture_dir):
"""Find all 'image-*' directories inside a given test-fixture directory."""
result = []
for root, dirs, files in os.walk(test_fixture_dir):
for dir_name in dirs:
if dir_name.startswith('image-'):
result.append(os.path.join(root, dir_name))
return sorted(result)


if __name__ == "__main__":
generate_fingerprints()
2 changes: 2 additions & 0 deletions .github/scripts/labeler.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#!/usr/bin/env python3

from __future__ import annotations

import sys
Expand Down
2 changes: 2 additions & 0 deletions .github/scripts/labeler_test.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#!/usr/bin/env python3

import unittest
from unittest.mock import patch
import subprocess
Expand Down
21 changes: 14 additions & 7 deletions .github/scripts/trigger-release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,19 @@ set -eu
bold=$(tput bold)
normal=$(tput sgr0)

if ! [ -x "$(command -v gh)" ]; then
echo "The GitHub CLI could not be found. To continue follow the instructions at https://github.com/cli/cli#installation"
GH_CLI=.tool/gh

if ! [ -x "$(command -v $GH_CLI)" ]; then
echo "The GitHub CLI could not be found. run: make bootstrap"
exit 1
fi

gh auth status
$GH_CLI auth status

# set the default repo in cases where multiple remotes are defined
$GH_CLI repo set-default anchore/syft

export GITHUB_TOKEN="${GITHUB_TOKEN-"$($GH_CLI auth token)"}"

# we need all of the git state to determine the next version. Since tagging is done by
# the release pipeline it is possible to not have all of the tags from previous releases.
Expand Down Expand Up @@ -37,14 +44,14 @@ done

echo "${bold}Kicking off release for ${NEXT_VERSION}${normal}..."
echo
gh workflow run release.yaml -f version=${NEXT_VERSION}
$GH_CLI workflow run release.yaml -f version=${NEXT_VERSION}

echo
echo "${bold}Waiting for release to start...${normal}"
sleep 10

set +e

echo "${bold}Head to the release workflow to monitor the release:${normal} $(gh run list --workflow=release.yaml --limit=1 --json url --jq '.[].url')"
id=$(gh run list --workflow=release.yaml --limit=1 --json databaseId --jq '.[].databaseId')
gh run watch $id --exit-status || (echo ; echo "${bold}Logs of failed step:${normal}" && GH_PAGER="" gh run view $id --log-failed)
echo "${bold}Head to the release workflow to monitor the release:${normal} $($GH_CLI run list --workflow=release.yaml --limit=1 --json url --jq '.[].url')"
id=$($GH_CLI run list --workflow=release.yaml --limit=1 --json databaseId --jq '.[].databaseId')
$GH_CLI run watch $id --exit-status || (echo ; echo "${bold}Logs of failed step:${normal}" && GH_PAGER="" $GH_CLI run view $id --log-failed)
2 changes: 1 addition & 1 deletion .github/workflows/benchmark-testing.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
OUTPUT="${OUTPUT//$'\r'/'%0D'}" # URL encode all '\r' characters
echo "result=$OUTPUT" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: benchmark-test-results
path: test/results/**/*
Expand Down
Loading

0 comments on commit 8e0f693

Please sign in to comment.