From 0706c25e9fc7b42939cd420a0dea23df4c1e94c5 Mon Sep 17 00:00:00 2001 From: Mike Date: Tue, 9 Jul 2024 16:40:09 +0100 Subject: [PATCH] CI ccache rework (#2857) This PR makes a few changes to the way ccache is used for CI. I did consider adding ccache detection to `export.sh` so it gets enabled by default. Whilst ccache is great and seems quite robust, if things do go wrong it just adds more complication for users to sort out so requiring manual activation seems best I think. **Simplify CI ccache use** Hendrik Muhs `ccache-action` got things working easily, but it does installation and some other stuff which we don't need and creating versioned caches on every run is un-necessary. Instead, just use the standard `cache` action as used for IDF tools, so pull requests use the `develop` branch cache if available or creates one. This keeps things clean and simple. However, once created caches aren't update automatically, so I've added a workflows to rebuilding the tools which is required if the IDF toolchains get updated. I've also added a second 'cache clean` workflow which removes any cache entries for pull requests (specifically, not develop). **Tidy action scripts** - Actions `if` clause doesn't require `${{ }}` - Add workflow_dispatch for both CI builds so we can trigger rebuilds manually if necessary. **Esp32 tools installation** Installer is still re-installing the tools (gdb, etc.) we've previously pruned out. So just skip esp32 tools installation for CI if directory already present. IDF also insists on pulling unwanted submodules back in again, even though they're not used. Fortunately there is `IDF_SKIP_CHECK_SUBMODULES=1` to disable this behaviour. Also some improvements to the `clean-tools.py` script. Works much better. Have added some notes to the `Tools/ci/README.rst`. This is also a useful step in identifying and eliminating stuff we don't need for Sming. One thing I have in mind is getting rid of the IDF build step completely and just building the required code separately. Unfortunately the whole SDK is excessively complex and have found the Rp2040 much easier to work with in practice. If I ever find a serious application for the extra hardware in an esp32 I might revisit this... --- .github/workflows/cache-clean.yml | 25 ++++ .github/workflows/cache-rebuild.yml | 97 ++++++++++++++++ .github/workflows/ci-esp32.yml | 31 +++-- .github/workflows/ci.yml | 30 +++-- .github/workflows/coverity-scan.yml | 8 +- .github/workflows/library.yml | 18 +-- .github/workflows/release.yml | 12 +- Sming/Arch/Esp32/Tools/install.cmd | 7 ++ Sming/Arch/Esp32/Tools/install.sh | 6 + Tools/ci/README.rst | 75 ++++++++++-- Tools/ci/clean-tools.py | 169 ++++++++++++++++------------ Tools/ci/install.cmd | 6 + Tools/ci/install.sh | 6 + 13 files changed, 367 insertions(+), 123 deletions(-) create mode 100644 .github/workflows/cache-clean.yml create mode 100644 .github/workflows/cache-rebuild.yml diff --git a/.github/workflows/cache-clean.yml b/.github/workflows/cache-clean.yml new file mode 100644 index 0000000000..fcb54cf00b --- /dev/null +++ b/.github/workflows/cache-clean.yml @@ -0,0 +1,25 @@ +name: Cache clean + +on: + workflow_dispatch: + +jobs: + cleanup: + runs-on: ubuntu-latest + steps: + - name: Cleanup + run: | + gh extension install actions/gh-actions-cache + + echo "Fetching list of cache keys" + cacheKeys=$(gh actions-cache list -R $REPO -L 100 | grep -v develop | cut -f 1 ) + + echo "Deleting caches..." + set +e + for cacheKey in $cacheKeys; do + gh actions-cache delete "$cacheKey" -R "$REPO" --confirm + done + echo "Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} diff --git a/.github/workflows/cache-rebuild.yml b/.github/workflows/cache-rebuild.yml new file mode 100644 index 0000000000..8bac555e3a --- /dev/null +++ b/.github/workflows/cache-rebuild.yml @@ -0,0 +1,97 @@ +name: Cache rebuild + +on: + workflow_dispatch: + inputs: + clean_all: + description: 'Clean all caches, not just esp32 ones' + default: false + +jobs: + cleanup: + runs-on: ubuntu-latest + steps: + - name: Cleanup + id: cleanup + run: | + gh extension install actions/gh-actions-cache + + echo "Fetching list of cache keys" + if [ -z "$CLEAN_ALL" ]; then + cacheKeys=$(gh actions-cache list -R "$REPO" -L 100 | cut -f 1 ) + else + cacheKeys=$(gh actions-cache list -R "$REPO" -L 100 | grep -w "idf\|esp32" | cut -f 1 ) + fi + + echo "Deleting caches..." + set +e + for cacheKey in $cacheKeys; do + gh actions-cache delete "$cacheKey" -R "$REPO" --confirm + done + echo "Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + CLEAN_ALL: ${{ inputs.clean_all }} + + build: + needs: cleanup + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + idf_version: ["4.4", "5.2"] + include: + - os: ubuntu-latest + idf_version: "4.3" + - os: ubuntu-latest + idf_version: "5.0" + exclude: + - os: macos-latest + idf_version: "4.4" + + runs-on: ${{ matrix.os }} + + env: + SMING_ARCH: Esp32 + SMING_SOC: esp32 + INSTALL_IDF_VER: ${{ matrix.idf_version }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.idf_version == '4.3' && '3.8' || '3.12' }} + + - name: Configure environment + shell: pwsh + run: | + "CI_BUILD_DIR=" + (Resolve-Path ".").path >> $env:GITHUB_ENV + "SMING_HOME=" + (Resolve-Path "Sming").path >> $env:GITHUB_ENV + + - name: Fix permissions + if: matrix.os != 'windows-latest' + run: | + sudo chown $USER /opt + + - name: Install build tools for Ubuntu / MacOS + if: matrix.os != 'windows-latest' + run: | + Tools/ci/install.sh + + - name: Install build tools for Windows + if: matrix.os == 'windows-latest' + run: | + . Tools/ci/setenv.ps1 + Tools/ci/install.cmd + + - name: Cache ESP-IDF and build tools + uses: actions/cache/save@v4 + with: + path: | + /opt/esp-idf-${{ matrix.idf_version }} + /opt/esp32 + key: ${{ matrix.os }}-idf-${{ matrix.idf_version }} diff --git a/.github/workflows/ci-esp32.yml b/.github/workflows/ci-esp32.yml index 1644494eff..38f4c0d012 100644 --- a/.github/workflows/ci-esp32.yml +++ b/.github/workflows/ci-esp32.yml @@ -2,7 +2,9 @@ name: Continuous Integration (CI) for Esp32 on: push: - + + workflow_dispatch: + pull_request: branches: [ develop ] @@ -38,6 +40,7 @@ jobs: SMING_ARCH: Esp32 SMING_SOC: ${{ matrix.variant }} INSTALL_IDF_VER: ${{ matrix.idf_version }} + IDF_SKIP_CHECK_SUBMODULES: 1 ENABLE_CCACHE: 1 steps: @@ -60,7 +63,7 @@ jobs: "SMING_HOME=" + (Resolve-Path "Sming").path >> $env:GITHUB_ENV - name: Fix permissions - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | sudo chown $USER /opt @@ -72,30 +75,34 @@ jobs: /opt/esp32 key: ${{ matrix.os }}-idf-${{ matrix.idf_version }} + - name: Compiler Cache + uses: actions/cache@v4 + with: + path: .ccache + key: ${{ matrix.os }}-ccache-${{ matrix.variant }}-${{ matrix.idf_version }} + - name: Install build tools for Ubuntu / MacOS - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | Tools/ci/install.sh - - name: Install build tools for Windows - if: ${{ matrix.os == 'windows-latest' }} + - name: Install build tools for Windows + if: matrix.os == 'windows-latest' run: | . Tools/ci/setenv.ps1 Tools/ci/install.cmd - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ matrix.os }}-${{ matrix.variant }}-${{ matrix.idf_version }} - - name: Build and test for ${{matrix.variant}} with IDF v${{matrix.idf_version}} on Ubuntu / MacOS - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | source $SMING_HOME/../Tools/export.sh Tools/ci/build.sh - name: Build and test for ${{matrix.variant}} with IDF v${{matrix.idf_version}} on Windows - if: ${{ matrix.os == 'windows-latest' }} + if: matrix.os == 'windows-latest' run: | . Tools/ci/setenv.ps1 Tools/ci/build.cmd + + - name: Compiler Cache stats + run: ccache -sv diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 608d6286e7..787923a590 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,9 @@ name: Continuous Integration (CI) on: push: - + + workflow_dispatch: + pull_request: branches: [ develop ] @@ -30,7 +32,7 @@ jobs: - variant: rp2040 arch: Rp2040 - concurrency: + concurrency: group: ${{ github.workflow }}-${{ github.ref }}-${{ toJson(matrix) }} cancel-in-progress: true @@ -62,32 +64,36 @@ jobs: "CI_BUILD_DIR=" + (Resolve-Path ".").path >> $env:GITHUB_ENV "SMING_HOME=" + (Resolve-Path "Sming").path >> $env:GITHUB_ENV + - name: Compiler Cache + uses: actions/cache@v4 + with: + path: .ccache + key: ${{ matrix.os }}-ccache-${{ matrix.toolchain }}-${{ matrix.variant }} + - name: Install build tools for Ubuntu / MacOS - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | Tools/ci/install.sh - - name: Install build tools for Windows - if: ${{ matrix.os == 'windows-latest' }} + - name: Install build tools for Windows + if: matrix.os == 'windows-latest' run: | . Tools/ci/setenv.ps1 Tools/ci/install.cmd - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ matrix.os }}-${{ matrix.toolchain }}-${{ matrix.variant }} - - name: Build and test for ${{matrix.variant}} on Ubuntu / MacOS env: CLANG_FORMAT: clang-format-8 - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | source $SMING_HOME/../Tools/export.sh Tools/ci/build.sh - name: Build and test for ${{matrix.variant}} on Windows - if: ${{ matrix.os == 'windows-latest' }} + if: matrix.os == 'windows-latest' run: | . Tools/ci/setenv.ps1 Tools/ci/build.cmd + + - name: Compiler Cache stats + run: ccache -sv diff --git a/.github/workflows/coverity-scan.yml b/.github/workflows/coverity-scan.yml index 21b710e246..daa11e047b 100644 --- a/.github/workflows/coverity-scan.yml +++ b/.github/workflows/coverity-scan.yml @@ -44,19 +44,19 @@ jobs: echo "CHECK_SCA=$CHECK_SCA" >> $GITHUB_ENV - name: Setup SMING_HOME for Ubuntu - if: ${{ env.CHECK_SCA == 1 }} + if: env.CHECK_SCA == 1 run: | echo "CI_BUILD_DIR=$GITHUB_WORKSPACE" >> $GITHUB_ENV echo "SMING_HOME=$GITHUB_WORKSPACE/Sming" >> $GITHUB_ENV echo "SMING_ARCH=Host" >> $GITHUB_ENV - name: Install Sming Framework on Ubuntu - if: ${{ env.CHECK_SCA == 1 }} + if: env.CHECK_SCA == 1 run: | Tools/ci/install.sh - name: Run Coverity Scan - if: ${{ env.CHECK_SCA == 1 }} + if: env.CHECK_SCA == 1 env: COVERITY_SCAN_TOKEN: ${{secrets.COVERITY_SCAN_TOKEN}} run: | @@ -67,7 +67,7 @@ jobs: $SMING_HOME/Arch/Host/Tools/ci/coverity-scan.sh - name: Archive scan log - if: ${{ env.CHECK_SCA == 1 }} + if: env.CHECK_SCA == 1 uses: actions/upload-artifact@v3 with: name: coverity-scan-report diff --git a/.github/workflows/library.yml b/.github/workflows/library.yml index 295da95f56..d4bb144771 100644 --- a/.github/workflows/library.yml +++ b/.github/workflows/library.yml @@ -62,7 +62,7 @@ jobs: - os: macos-latest idf_version: "4.4" - concurrency: + concurrency: group: ${{ github.workflow }}-${{ github.ref }}-${{ toJson(matrix) }} cancel-in-progress: true @@ -89,7 +89,7 @@ jobs: python-version: "3.12" - name: Create library alias - if: ${{ inputs.alias }} + if: inputs.alias shell: pwsh run: | New-Item -ItemType SymbolicLink -Path "../${{ inputs.alias }}" -Target (Resolve-Path ".").path @@ -107,12 +107,12 @@ jobs: "CI_MAKEFILE=" + (Resolve-Path "../../sming/Tools/ci/library/Makefile") >> $env:GITHUB_ENV - name: Fix permissions - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | sudo chown $USER /opt - name: Cache ESP-IDF and build tools - if: ${{ matrix.arch == 'Esp32' }} + if: matrix.arch == 'Esp32' uses: actions/cache@v4 with: path: | @@ -121,12 +121,12 @@ jobs: key: ${{ matrix.os }}-idf-${{ env.INSTALL_IDF_VER }} - name: Install build tools for Ubuntu / MacOS - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | $SMING_HOME/../Tools/ci/install.sh - - name: Install build tools for Windows - if: ${{ matrix.os == 'windows-latest' }} + - name: Install build tools for Windows + if: matrix.os == 'windows-latest' run: | cd $env:SMING_HOME/.. . Tools/ci/setenv.ps1 @@ -135,13 +135,13 @@ jobs: - name: Build and Test for ${{matrix.arch}} on Ubuntu / MacOS env: CLANG_FORMAT: clang-format-8 - if: ${{ matrix.os != 'windows-latest' }} + if: matrix.os != 'windows-latest' run: | source $SMING_HOME/../Tools/export.sh make -j$(nproc) -f $CI_MAKEFILE - name: Build and Test for ${{matrix.arch}} on Windows - if: ${{ matrix.os == 'windows-latest' }} + if: matrix.os == 'windows-latest' run: | . "$env:SMING_HOME/../Tools/ci/setenv.ps1" make -j $env:NUMBER_OF_PROCESSORS -f $env:CI_MAKEFILE diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ef9127ad64..8f2d4dee10 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,7 +5,7 @@ on: push: tags: - '[0-9]+.[0-9]+.[0-9]+' - + # TODO: check if the tag is pointing to the tip of the master branch jobs: @@ -14,22 +14,22 @@ jobs: steps: - uses: actions/checkout@v4 - uses: trstringer/manual-approval@v1 - if: ${{ github.ref_type == 'tag' }} + if: github.ref_type == 'tag' with: secret: ${{ github.TOKEN }} approvers: slaff - name: Install xmlstarlet - if: ${{ github.ref_type == 'tag' }} + if: github.ref_type == 'tag' run: sudo apt-get install -y jq xmlstarlet - name: Build docs - if: ${{ github.ref_type == 'tag' }} + if: github.ref_type == 'tag' run: | Tools/install.sh doc make -C docs html zip -r sming-docs.zip docs/build/html - name: Release New Version - if: ${{ github.ref_type == 'tag' }} - env: + if: github.ref_type == 'tag' + env: SMING_ARCH: Host RELEASE_TOKEN: ${{secrets.RELEASE_TOKEN}} CI_REPO_NAME: ${{github.repository}} diff --git a/Sming/Arch/Esp32/Tools/install.cmd b/Sming/Arch/Esp32/Tools/install.cmd index 9952a151c6..27fafbb755 100644 --- a/Sming/Arch/Esp32/Tools/install.cmd +++ b/Sming/Arch/Esp32/Tools/install.cmd @@ -33,6 +33,13 @@ goto :setup :setup +REM Skip installation for CI if already present +if "%CI_BUILD_DIR%" NEQ "" ( + if exist "%IDF_TOOLS_PATH%\tools" ( + goto :EOF + ) +) + REM Install IDF tools and packages python "%IDF_PATH%\tools\idf_tools.py" --non-interactive install python -m pip install %SMINGTOOLS%/gevent-1.5.0-cp39-cp39-win_amd64.whl diff --git a/Sming/Arch/Esp32/Tools/install.sh b/Sming/Arch/Esp32/Tools/install.sh index 82bab550fd..7932363311 100755 --- a/Sming/Arch/Esp32/Tools/install.sh +++ b/Sming/Arch/Esp32/Tools/install.sh @@ -60,6 +60,10 @@ fi rm -f "$IDF_PATH" ln -s "$IDF_CLONE_PATH" "$IDF_PATH" + +# Skip installation for CI if already present +if [ -z "$CI_BUILD_DIR" ] || [ ! -d "$IDF_TOOLS_PATH/tools" ]; then + # Install IDF tools and packages python3 "$IDF_PATH/tools/idf_tools.py" --non-interactive install if [ -n "$VIRTUAL_ENV" ]; then @@ -73,4 +77,6 @@ if [ -z "$KEEP_DOWNLOADS" ]; then rm -rf "$IDF_TOOLS_PATH/dist" fi +fi # CI install + fi diff --git a/Tools/ci/README.rst b/Tools/ci/README.rst index 00e9e67ace..f8e39c3281 100644 --- a/Tools/ci/README.rst +++ b/Tools/ci/README.rst @@ -1,8 +1,69 @@ -CI logs -======= +Continuous Integration testing +============================== + +Github actions +-------------- + +See ``.github/workflows``. + +Cache clean + Dispatch workflow to remove caches for pull requests, but leave those for the default (develop) branch intact. + +Cache rebuild + Dispatch workflow to rebuild the esp32 idf/tool caches. + By default, cleans only idf/esp32 caches but has option to perform full clean. + +CodeQL + Performs code quality analysis when develop branch is updated. + +Continuous Integration (CI) + Tests for all architectures except esp32. Run for every pull request and merge to develop. + +Continuous Integration (CI) for Esp32 + Tests for esp32 architecture. Requires a separate workflow as matrix becomes too complex otherwise. + +Continuous Integration (CI) for Library + Used indirectly by library workflows for testing. See workflows/library. + +Coverity Scan + Code quality analyser. Does instrumented build then uploads database to coverity servers for analysis. + +Release + Run manually during release phase only. + +Spelling Check + Run for all pull requests and merge to develop. + -Overview --------- +Esp32 IDF and tools cleaning +---------------------------- + +Because the IDF and associated tools are large and relatively time-consuming to install, these are cached. +There's so much bloat that it doesn't take much to fill the 10GB github cache allocation. + +So after installing the tools - before it gets cached - the `clean-tools.py` script gets run. +This tool contains a list of filters (regular expressions) which match various paths. +Candidates for removal were identified by inspection using the Gnome disk usage analyzer. +Some other stuff (like test code and examples) are fairly safe candidates to remove as well. + +To evaluate how much would be removed run this command (it's safe):: + + python clean-tools.py scan + +To perform 'dry-run' of a clean operation, without actually deleteing anything:: + + python clean-tools.py clean + +To actually delete stuff requires a confirmation flag:: + + python clean-tools.py clean --delete + +Note that some unused submodules are cleaned, but by default the IDF pulls them back in again! +To prevent this behaviour, set `IDF_SKIP_CHECK_SUBMODULES=1`. + + +CI logs +------- Analysing CI logs is important for several reasons: @@ -23,7 +84,7 @@ The log files related to a run and therefore only two are required (the main bui Setup ------ +~~~~~ The github CLI client must be installed and authenticated with the Sming repo (or fork). @@ -31,7 +92,7 @@ See https://docs.github.com/en/github-cli/github-cli/quickstart. Usage ------ +~~~~~ Fetch and scan the most recent build:: @@ -67,7 +128,7 @@ The named exclusion file contains a list of regular expressions to match against vscode ------- +~~~~~ The warnings output using the scanlog tool can be used as hyperlinks in vscode: diff --git a/Tools/ci/clean-tools.py b/Tools/ci/clean-tools.py index 1654b37b50..75762ca914 100644 --- a/Tools/ci/clean-tools.py +++ b/Tools/ci/clean-tools.py @@ -5,53 +5,75 @@ import shutil import argparse +# Mandatory environment variables +IDF_PATH = os.environ['IDF_PATH'] +IDF_TOOLS_PATH = os.environ['IDF_TOOLS_PATH'] + # The commented-out ARC paths below are for regular libraries with RTTI. # These are used by default without the -fno-rtti switch so without them the cmake # compiler check fails. Otherwise they can go. TOOLS = r'esp32/tools/' IDF = r'esp-idf([^/]+)/' + ARC = r'([^/]+)\.a' -FILTERS = [ - # Leave versioned directory to avoid re-installation - rf'{TOOLS}.*esp-elf-gdb/.*/.*esp-elf-gdb/', - rf'{TOOLS}esp32ulp-elf/.*/esp32ulp-elf/', - rf'{TOOLS}openocd-esp32/.*/openocd-esp32/', - # Libraries not required by Sming - # rf'{TOOLS}.*/riscv32-esp-elf/lib/{ARC}', - rf'{TOOLS}.*/riscv32-esp-elf/lib/rv32i_.*', - rf'{TOOLS}.*/riscv32-esp-elf/lib/rv32i/', - rf'{TOOLS}.*/riscv32-esp-elf/lib/rv32imac_.*', - rf'{TOOLS}.*/riscv32-esp-elf/lib/rv32imafc_.*', - rf'{TOOLS}.*/riscv32-esp-elf/lib/rv32imafc/', - # rf'{TOOLS}.*/riscv32-esp-elf/lib/rv32imc_zicsr_zifencei/ilp32/{ARC}', - # rf'{TOOLS}.*/riscv32-esp-elf/lib/rv32imc/ilp32/{ARC}', - # rf'{TOOLS}.*/lib/esp32/{ARC}', - rf'{TOOLS}.*/lib/esp32-psram', - rf'{TOOLS}.*/lib/esp32/psram', - # rf'{TOOLS}.*/lib/esp32s2/{ARC}', - # rf'{TOOLS}.*/lib/esp32s3/{ARC}', - # rf'{TOOLS}.*/xtensa-esp32-elf/lib/{ARC}', - # rf'{TOOLS}.*/xtensa-esp32s2-elf/lib/{ARC}', - # rf'{TOOLS}.*/xtensa-esp32s3-elf/lib/{ARC}', - # rf'{TOOLS}.*/xtensa-esp-elf/lib/{ARC}', - # Components, examples and archives - rf'{IDF}docs/', - rf'{IDF}examples/', - rf'{IDF}components/cmock/', - rf'{IDF}components/unity/', - rf'{IDF}.*esp32c6.*', - rf'{IDF}.*esp32h2.*', - rf'{IDF}.*esp32p4.*', - rf'{IDF}.*/tests', -] - -# Python 3.8 doesn't have str.removeprefix -def removeprefix(path: str, prefix: str) -> str: - return path[len(prefix):] if path.startswith(prefix) else path + +# These filters are matched from the **start** of the path so there's an implicit .* at the end +FILTERS = { + IDF_TOOLS_PATH: [ + # Leave versioned directory to avoid re-installation + r'.*esp-elf-gdb/.*/.*esp-elf-gdb/', + r'esp32ulp-elf/.*/esp32ulp-elf/', + r'openocd-esp32/.*/openocd-esp32/', + # Libraries not required by Sming + # r'.*/riscv32-esp-elf/lib/{ARC}', + r'.*/riscv32-esp-elf/lib/rv32i_.*', + r'.*/riscv32-esp-elf/lib/rv32i/', + r'.*/riscv32-esp-elf/lib/rv32imac_.*', + r'.*/riscv32-esp-elf/lib/rv32imafc_.*', + r'.*/riscv32-esp-elf/lib/rv32imafc/', + # r'.*/riscv32-esp-elf/lib/rv32imc_zicsr_zifencei/ilp32/{ARC}', + # r'.*/riscv32-esp-elf/lib/rv32imc/ilp32/{ARC}', + # r'.*/lib/esp32/{ARC}', + r'.*/lib/esp32(-|/)psram', + # r'.*/lib/esp32s2/{ARC}', + # r'.*/lib/esp32s3/{ARC}', + # r'.*/xtensa-esp32-elf/lib/{ARC}', + # r'.*/xtensa-esp32s2-elf/lib/{ARC}', + # r'.*/xtensa-esp32s3-elf/lib/{ARC}', + # r'.*/xtensa-esp-elf/lib/{ARC}', + ], + IDF_PATH: [ + # Components, examples and archives + r'docs/', + r'.*/doc', + r'examples/', + r'components/asio/', + r'components/cmock/', + r'components/openthread/openthread/third_party', + r'components/unity/', + r'components/.*esp32c6.*', + r'components/.*esp32h2.*', + r'components/.*esp32p4.*', + r'components/.*/test', + r'components/.*/fuzz', + r'components/expat/expat/testdata', + r'components/libsodium', + r'components/nghttp/nghttp2/third-party/mruby', + r'components/nghttp/nghttp2/third-party', + r'components/tinyusb/', + r'components/.*/win32', + r'tools/esp_app_trace', + r'tools/test', + r'tools/ci', + ] +} + +def fix_path(path: str) -> str: + return path[2:].replace('\\', '/') if path[1] == ':' else path def scan_log(logfile: str, file_list: dict): - with open(logfile, 'r') as f: + with open(logfile, 'r', encoding='utf8') as f: for path in f: path = path.strip() try: @@ -59,10 +81,8 @@ def scan_log(logfile: str, file_list: dict): path, _, size = path.rpartition(' ') blocks = int(blocks) size = int(size) - if path[1] == ':': - path = path[2:].replace('\\', '/') - path = removeprefix(path, '/opt/') - if not path.startswith('esp'): + path = fix_path(path) + if '/esp' not in path: continue existing = file_list.get(path) if not existing or size > existing: @@ -80,20 +100,15 @@ def scan_logs(log_dir) -> dict: return file_list -def scan_tools_dir(tools_dir) -> dict: - file_list = {} +def scan_tree(start_path: str, file_list: dict): def scan(root_path): for entry in os.scandir(root_path): if entry.is_dir(): scan(entry.path) continue - path = entry.path - path = removeprefix(path, tools_dir + '/') - if not path.startswith('esp'): - continue - path = path.replace('\\', '/') + path = fix_path(entry.path) file_list[path] = entry.stat().st_size - scan(tools_dir) + scan(start_path) return file_list @@ -106,25 +121,33 @@ def mbstr(size) -> str: print(f'{len(file_list)} files, total size {mbstr(total_size)}') - total_size = 0 - for flt in FILTERS: - expr = re.compile(flt) - size = sum(size for path, size in file_list.items() if expr.match(path)) - total_size += size - print(f'{flt}: {mbstr(size)}') + def match(start_path: str, filters: list): + start_path = fix_path(start_path) + matched_size = 0 + for flt in filters: + flt = rf'{start_path}/{flt}' + expr = re.compile(flt, flags=re.IGNORECASE) + size = sum(size for path, size in file_list.items() if expr.match(path)) + matched_size += size + print(f'{flt}: {mbstr(size)}') + return matched_size + + total_size = sum(match(*item) for item in FILTERS.items()) print(f'Total size {mbstr(total_size)}') -def clean_tools_dir(tools_dir: str, do_clean: bool): - re_filter = re.compile('|'.join(FILTERS)) +def clean_tree(start_path: str, filters: list, do_clean: bool): + if not os.path.exists(start_path): + print(f'"{start_path}" not found, skipping.') + return + + re_filter = re.compile('|'.join(rf'{fix_path(start_path)}/{f}' for f in filters), flags=re.IGNORECASE) def clean_path(root_path): for entry in os.scandir(root_path): try: - path = entry.path - path = path.replace('\\', '/') - path = removeprefix(path, tools_dir.replace('\\', '/') + '/') + path = fix_path(entry.path) if entry.is_dir(): if re_filter.match(path + '/'): print(f"rmtree {entry.path}") @@ -139,7 +162,7 @@ def clean_path(root_path): except Exception as e: print(f'{repr(e)}') - clean_path(tools_dir) + clean_path(start_path) def main(): @@ -150,25 +173,25 @@ def main(): args = parser.parse_args() - tools_dir = os.path.dirname(os.environ['IDF_PATH']) - - print(f'Action: {args.action} "{tools_dir}"') + print(f'Action: {args.action}, IDF_PATH="{IDF_PATH}", IDF_TOOLS_PATH="{IDF_TOOLS_PATH}"') if args.action == 'scan': if args.logdir: file_list = scan_logs(args.logdir) else: - file_list = scan_tools_dir(tools_dir) + file_list = {} + scan_tree(IDF_PATH, file_list) + scan_tree(IDF_TOOLS_PATH, file_list) scan_list(file_list) - elif args.action == 'clean': - if not os.path.exists(tools_dir): - print(f'"{tools_dir}" not found, skipping.') - return - clean_tools_dir(tools_dir, args.delete) + return + + if args.action == 'clean': + for path, filters in FILTERS.items(): + clean_tree(path, filters, args.delete) if args.delete: - print("OK, items cleaned.") + print("** Cleaning finished.") else: - print("Dry run, nothing deleted.") + print("** Dry run, nothing deleted.") if __name__ == '__main__': main() diff --git a/Tools/ci/install.cmd b/Tools/ci/install.cmd index 7fe9f022a2..b1504673a6 100644 --- a/Tools/ci/install.cmd +++ b/Tools/ci/install.cmd @@ -10,4 +10,10 @@ if "%BUILD_DOCS%" == "true" ( call %CI_BUILD_DIR%\Tools\install.cmd %SMING_ARCH% %INSTALL_OPTS% +REM Configure ccache +ccache --set-config cache_dir="%CI_BUILD_DIR%\.ccache" +ccache --set-config max_size=500M +ccache -z + +REM Clean up tools installation python "%CI_BUILD_DIR%\Tools\ci\clean-tools.py" clean --delete diff --git a/Tools/ci/install.sh b/Tools/ci/install.sh index 0cb02170cb..f94da42799 100755 --- a/Tools/ci/install.sh +++ b/Tools/ci/install.sh @@ -23,5 +23,11 @@ sudo chown "$USER" /opt fi +# Configure ccache +ccache --set-config cache_dir="$CI_BUILD_DIR/.ccache" +ccache --set-config max_size=500M +ccache -z + +# Clean up tools installation source "$CI_BUILD_DIR/Tools/export.sh" python "$CI_BUILD_DIR/Tools/ci/clean-tools.py" clean --delete