chore(debug): setup sentry for error tracking #139
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI/CD | |
on: | |
push: | |
pull_request: | |
workflow_dispatch: | |
env: | |
PYTHON_VERSION: '3.11' | |
jobs: | |
dependencies: | |
name: Install Dependencies | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
name: Checkout | |
- name: Save Cached Poetry | |
id: cached-poetry | |
uses: actions/cache@v4 | |
with: | |
path: | | |
~/.cache | |
~/.local | |
key: poetry-${{ hashFiles('poetry.lock') }} | |
- uses: actions/setup-python@v5 | |
name: Setup Python | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
architecture: x64 | |
- name: Install Poetry | |
uses: snok/install-poetry@v1 | |
with: | |
virtualenvs-create: true | |
- name: Install dependencies | |
run: poetry install --extras=dev --with dev | |
type-check: | |
name: Type Check | |
needs: | |
- dependencies | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
name: Checkout | |
- uses: actions/setup-python@v5 | |
name: Setup Python | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
architecture: x64 | |
- name: Load Cached Poetry | |
id: cached-poetry | |
uses: actions/cache/restore@v4 | |
with: | |
path: | | |
~/.cache | |
~/.local | |
key: poetry-${{ hashFiles('poetry.lock') }} | |
- name: Create stub files | |
run: poetry run pyright --createstub kivy | |
- name: Type Check | |
run: poetry run poe typecheck | |
lint: | |
name: Lint | |
needs: | |
- dependencies | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
name: Checkout | |
- uses: actions/setup-python@v5 | |
name: Setup Python | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
architecture: x64 | |
- name: Load Cached Poetry | |
id: cached-poetry | |
uses: actions/cache/restore@v4 | |
with: | |
path: | | |
~/.cache | |
~/.local | |
key: poetry-${{ hashFiles('poetry.lock') }} | |
- name: Lint | |
run: poetry run poe lint | |
test: | |
name: Test | |
needs: | |
- dependencies | |
runs-on: ubuntu-latest | |
steps: | |
- name: System Dependencies | |
run: sudo apt-get install -y libegl1 libgl1 libmtdev1 libzbar0 | |
- uses: actions/checkout@v4 | |
name: Checkout | |
- uses: actions/setup-python@v5 | |
name: Setup Python | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
architecture: x64 | |
- uses: actions/cache/restore@v4 | |
name: Load Cached Poetry | |
id: cached-poetry | |
with: | |
path: | | |
~/.cache | |
~/.local | |
key: poetry-${{ hashFiles('poetry.lock') }} | |
- name: Run Tests | |
run: | | |
poetry run poe test --make-screenshots --cov-report=xml --cov-report=html -n auto | |
- name: Collect Window Screenshots | |
uses: actions/upload-artifact@v4 | |
if: always() | |
with: | |
name: screenshots | |
path: tests/**/results/**/*.png | |
- name: Collect Store Snapshots | |
uses: actions/upload-artifact@v4 | |
if: always() | |
with: | |
name: snapshots | |
path: tests/**/results/**/*.jsonc | |
- name: Collect HTML Coverage Report | |
uses: actions/upload-artifact@v4 | |
with: | |
name: coverage-report | |
path: htmlcov | |
- name: Upload Coverage to Codecov | |
uses: codecov/codecov-action@v4 | |
with: | |
file: ./coverage.xml | |
flags: integration | |
fail_ci_if_error: true | |
token: ${{ secrets.CODECOV_TOKEN }} | |
build: | |
name: Build | |
needs: | |
- dependencies | |
runs-on: ubuntu-latest | |
outputs: | |
version: ${{ steps.extract_version.outputs.VERSION }} | |
name: ${{ steps.extract_version.outputs.NAME }} | |
steps: | |
- uses: actions/checkout@v4 | |
name: Checkout | |
with: | |
lfs: true | |
- uses: actions/setup-python@v5 | |
name: Setup Python | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
architecture: x64 | |
- name: Load Cached Poetry | |
id: cached-poetry | |
uses: actions/cache/restore@v4 | |
with: | |
path: | | |
~/.cache | |
~/.local | |
key: poetry-${{ hashFiles('poetry.lock') }} | |
- name: Add SENTRY_DSN to .env | |
run: | | |
echo "SENTRY_DSN=${{ secrets.SENTRY_DSN }}" >> .env | |
- name: Build | |
run: poetry build | |
- name: Extract Version | |
id: extract_version | |
run: | | |
echo "VERSION=$(poetry version --short)" >> "$GITHUB_OUTPUT" | |
echo "NAME=$(poetry version | cut -d' ' -f1)" >> "$GITHUB_OUTPUT" | |
echo "VERSION=$(poetry version --short)" | |
echo "NAME=$(poetry version | cut -d' ' -f1)" | |
- name: Extract Version from CHANGELOG.md | |
id: extract_changelog_version | |
run: | | |
VERSION_CHANGELOG=$(sed -n '3 s/## Version //p' CHANGELOG.md) | |
echo "VERSION_CHANGELOG=$VERSION_CHANGELOG" | |
if [ "${{ steps.extract_version.outputs.VERSION }}" != "$VERSION_CHANGELOG" ]; then | |
echo "Error: Version extracted from CHANGELOG.md does not match the version in pyproject.toml" | |
exit 1 | |
else | |
echo "Versions are consistent." | |
fi | |
- name: Extract Version from Tag | |
if: startsWith(github.ref, 'refs/tags/v') | |
id: extract_tag_version | |
run: | | |
VERSION_TAG=$(sed 's/^v//' <<< ${{ github.ref_name }}) | |
echo "VERSION_TAG=$VERSION_TAG" | |
if [ "${{ steps.extract_version.outputs.VERSION }}" != "$VERSION_TAG" ]; then | |
echo "Error: Version extracted from tag does not match the version in pyproject.toml" | |
exit 1 | |
else | |
echo "Versions are consistent." | |
fi | |
- name: Upload wheel | |
uses: actions/upload-artifact@v4 | |
with: | |
name: wheel | |
path: dist/*.whl | |
if-no-files-found: error | |
- name: Upload binary | |
uses: actions/upload-artifact@v4 | |
with: | |
name: binary | |
path: dist/*.tar.gz | |
if-no-files-found: error | |
pypi-publish: | |
name: Publish to PyPI | |
if: >- | |
github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
needs: | |
- type-check | |
- lint | |
- test | |
- build | |
runs-on: ubuntu-latest | |
environment: | |
name: release | |
url: https://pypi.org/p/${{ needs.build.outputs.name }} | |
permissions: | |
id-token: write | |
steps: | |
- uses: actions/download-artifact@v4 | |
with: | |
name: wheel | |
path: dist | |
- uses: actions/download-artifact@v4 | |
with: | |
name: binary | |
path: dist | |
- name: Publish package distributions to PyPI | |
uses: pypa/gh-action-pypi-publish@release/v1 | |
with: | |
packages-dir: dist | |
verbose: true | |
skip-existing: true | |
images: | |
name: Create Images | |
needs: | |
- type-check | |
- lint | |
- test | |
- build | |
runs-on: ubuntu-latest | |
container: | |
image: ghcr.io/solo-io/packer-plugin-arm-image | |
env: | |
PACKER_CACHE_DIR: /build/packer_cache | |
volumes: | |
- /dev:/dev | |
options: --rm --privileged | |
strategy: | |
fail-fast: false | |
matrix: | |
suffix: ['lite', '', 'full'] | |
steps: | |
- run: echo Building amd64-${{ matrix.suffix }} image | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- uses: actions/download-artifact@v4 | |
with: | |
name: wheel | |
path: /build/dist | |
- uses: actions/download-artifact@v4 | |
with: | |
name: binary | |
path: /build/dist | |
- run: | | |
ls -l /build/dist | |
- name: Generate Image URL and Checksum | |
id: generate_image_url | |
run: | | |
if [ -n "${{ matrix.suffix }}" ]; then | |
SUFFIX="_${{ matrix.suffix }}" | |
else | |
SUFFIX="${{ matrix.suffix }}" | |
fi | |
DASHED_SUFFIX=$(echo $SUFFIX | sed 's/_/-/g') | |
IMAGE_URL="https://downloads.raspberrypi.com/raspios${SUFFIX}_arm64/images/raspios${SUFFIX}_arm64-2023-12-06/2023-12-05-raspios-bookworm-arm64${DASHED_SUFFIX}.img.xz" | |
IMAGE_CHECKSUM=$(curl -s "${IMAGE_URL}.sha256" | awk '{print $1}') | |
echo "suffix=$SUFFIX" >> "$GITHUB_OUTPUT" | |
echo "dashed_suffix=$DASHED_SUFFIX" >> "$GITHUB_OUTPUT" | |
echo "image_url=$IMAGE_URL" >> "$GITHUB_OUTPUT" | |
echo "image_checksum=$IMAGE_CHECKSUM" >> "$GITHUB_OUTPUT" | |
- name: Build Artifact | |
env: | |
PKR_VAR_ubo_app_version: ${{ needs.build.outputs.version }} | |
PKR_VAR_image_url: ${{ steps.generate_image_url.outputs.image_url }} | |
PKR_VAR_image_checksum: | |
${{ steps.generate_image_url.outputs.image_checksum }} | |
PKR_VAR_target_image_size: | |
${{ matrix.suffix == 'lite' && '3758096384' || '0' }} | |
run: | | |
/entrypoint.sh validate scripts/packer/image.pkr.hcl | |
/entrypoint.sh build scripts/packer/image.pkr.hcl | |
- name: Write Zeroes to Free Space | |
run: | | |
losetup -P /dev/loop3 /build/image.img | |
apt-get -y update | |
apt-get install -y zerofree | |
zerofree /dev/loop3p2 | |
losetup -d /dev/loop3 | |
- name: Compress File with Gzip | |
run: | | |
scripts/consume.sh /build/image.img | gzip -9 > /ubo_app-${{ needs.build.outputs.version }}-bookworm${{ steps.generate_image_url.outputs.dashed_suffix }}.img.gz | |
- name: Upload Image | |
uses: actions/upload-artifact@v4 | |
with: | |
name: | |
ubo_app-${{ needs.build.outputs.version }}-bookworm${{ | |
steps.generate_image_url.outputs.dashed_suffix}}-arm64.img.gz | |
path: | |
/ubo_app-${{ needs.build.outputs.version }}-bookworm${{ | |
steps.generate_image_url.outputs.dashed_suffix }}.img.gz | |
if-no-files-found: error | |
release: | |
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
name: Release | |
needs: | |
- type-check | |
- lint | |
- test | |
- build | |
- pypi-publish | |
- images | |
runs-on: ubuntu-latest | |
environment: | |
name: release | |
url: https://pypi.org/p/${{ needs.build.outputs.name }} | |
permissions: | |
contents: write | |
steps: | |
- name: Procure Lite Image | |
uses: actions/download-artifact@v4 | |
with: | |
name: | |
ubo_app-${{ needs.build.outputs.version | |
}}-bookworm-lite-arm64.img.gz | |
path: artifacts | |
- name: Procure Default Image | |
uses: actions/download-artifact@v4 | |
with: | |
name: ubo_app-${{ needs.build.outputs.version }}-bookworm-arm64.img.gz | |
path: artifacts | |
- name: Procure Full Image | |
uses: actions/download-artifact@v4 | |
with: | |
name: | |
ubo_app-${{ needs.build.outputs.version | |
}}-bookworm-full-arm64.img.gz | |
path: artifacts | |
- name: Procure Wheel | |
uses: actions/download-artifact@v4 | |
with: | |
name: wheel | |
path: artifacts | |
- name: Procure Binary | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary | |
path: artifacts | |
- name: | |
Split Large Files into 2GB chunks in a for loop only if they are | |
bigger than 2GB | |
run: | | |
for file in artifacts/*; do | |
if [ $(stat -c%s "$file") -gt 2147000000 ]; then | |
split -b 2147000000 "$file" "$file"_ | |
rm "$file" | |
fi | |
done | |
- name: Release | |
uses: softprops/action-gh-release@v2 | |
with: | |
files: artifacts/* | |
tag_name: ${{ needs.build.outputs.version }} | |
body: | | |
Release of version ${{ needs.build.outputs.version }} | |
PyPI package: https://pypi.org/project/${{ needs.build.outputs.name }}/${{ needs.build.outputs.version }} | |
Note than GitHub doesn't allow assets bigger than 2GB in a release. Due to this, the files bigger than 2GB have been split into 2GB chunks. You can join them using the following command: | |
```bash | |
cat [[filename]]_* > [[filename]] | |
``` | |
prerelease: false | |
draft: false |