From f1d1a65a6866b73daecc31608201d7141455e066 Mon Sep 17 00:00:00 2001 From: Mike Date: Wed, 3 Jul 2024 08:56:57 +0100 Subject: [PATCH] Add IDF caching for libraries (#2850) Cache esp-idf and tools for library builds. Be nice if we could use the cache from `SmingHub/Sming` directly but that sort of cross-repo sharing isn't supported. Also discovered that cross-OS caching breaks because the cache action uses paths relative to the build directory, even though it's been given absolute paths. So, just simplify the thing and use a separate, combined cache for each OS/IDF combination. Total's about 5GB. In all the .yml files we don't need to repeat steps for Ubuntu/MacOS as they're the same, so we can use a combined step for those, ie. `not windows`. Scanlog tool: - Fix scanlog tool windows path normalisation - didn't account for change from c:/tools/ to d:/opt/, and that Windows idf path contains version. - Print warnings for each job by default, add 'm' flag to merge them Also fix a few more warnings. --- .github/workflows/ci-esp32.yml | 34 +-- .github/workflows/ci.yml | 20 +- .github/workflows/library.yml | 34 ++- .../src/HttpMultipartResource.cpp | 6 +- .../src/HttpMultipartResource.h | 4 +- .../Libraries/SwitchJoycon/src/SwitchJoycon.h | 2 +- Sming/Libraries/nanopb/src/Callback.cpp | 2 +- Sming/Libraries/nanopb/src/Stream.cpp | 2 +- .../nanopb/src/include/Protobuf/Stream.h | 2 +- Tools/ci/README.rst | 22 +- Tools/ci/scanlog.py | 273 +++++++++--------- Tools/ci/warn-exclude.lst | 2 +- samples/Basic_Audio/app/application.cpp | 2 +- 13 files changed, 196 insertions(+), 209 deletions(-) diff --git a/.github/workflows/ci-esp32.yml b/.github/workflows/ci-esp32.yml index 90761f6979..35f177364d 100644 --- a/.github/workflows/ci-esp32.yml +++ b/.github/workflows/ci-esp32.yml @@ -67,26 +67,16 @@ jobs: run: | sudo chown $USER /opt - - name: Cache ESP-IDF + - name: Cache ESP-IDF and build tools uses: actions/cache@v4 with: - path: /opt/esp-idf-${{ matrix.idf_version }} - key: idf-${{ matrix.idf_version }} - enableCrossOsArchive: true + path: | + /opt/esp-idf-${{ matrix.idf_version }} + /opt/esp32 + key: ${{ matrix.os }}-idf-${{ matrix.idf_version }} - - name: Cache build tools - uses: actions/cache@v4 - with: - path: /opt/esp32 - key: ${{ matrix.os }}-esp32-${{ matrix.idf_version }} - - - name: Install build tools for Ubuntu - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - Tools/ci/install.sh - - - name: Install build tools for MacOS - if: ${{ matrix.os == 'macos-latest' }} + - name: Install build tools for Ubuntu / MacOS + if: ${{ matrix.os != 'windows-latest' }} run: | Tools/ci/install.sh @@ -96,14 +86,8 @@ jobs: . Tools/ci/setenv.ps1 Tools/ci/install.cmd - - name: Build and test for ${{matrix.variant}} with IDF v${{matrix.idf_version}} on Ubuntu - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - source $SMING_HOME/../Tools/export.sh - Tools/ci/build.sh - - - name: Build and test for ${{matrix.variant}} on MacOS - if: ${{ matrix.os == 'macos-latest' }} + - name: Build and test for ${{matrix.variant}} with IDF v${{matrix.idf_version}} on Ubuntu / MacOS + if: ${{ matrix.os != 'windows-latest' }} run: | source $SMING_HOME/../Tools/export.sh Tools/ci/build.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb5f09032e..7ba517edd1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -61,13 +61,8 @@ jobs: "CI_BUILD_DIR=" + (Resolve-Path ".").path >> $env:GITHUB_ENV "SMING_HOME=" + (Resolve-Path "Sming").path >> $env:GITHUB_ENV - - name: Install build tools for Ubuntu - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - Tools/ci/install.sh - - - name: Install build tools for MacOS - if: ${{ matrix.os == 'macos-latest' }} + - name: Install build tools for Ubuntu / MacOS + if: ${{ matrix.os != 'windows-latest' }} run: | Tools/ci/install.sh @@ -77,17 +72,10 @@ jobs: . Tools/ci/setenv.ps1 Tools/ci/install.cmd - - name: Build and test for ${{matrix.variant}} on Ubuntu + - name: Build and test for ${{matrix.variant}} on Ubuntu / MacOS env: CLANG_FORMAT: clang-format-8 - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - source $SMING_HOME/../Tools/export.sh - $CLANG_FORMAT --version - Tools/ci/build.sh - - - name: Build and test for ${{matrix.variant}} on MacOS - if: ${{ matrix.os == 'macos-latest' }} + if: ${{ matrix.os != 'windows-latest' }} run: | source $SMING_HOME/../Tools/export.sh Tools/ci/build.sh diff --git a/.github/workflows/library.yml b/.github/workflows/library.yml index 01c1a7b57c..295da95f56 100644 --- a/.github/workflows/library.yml +++ b/.github/workflows/library.yml @@ -71,7 +71,7 @@ jobs: env: SMING_ARCH: ${{ matrix.arch }} SMING_SOC: ${{ matrix.variant }} - INSTALL_IDF_VER: ${{ matrix.idf_version }} + INSTALL_IDF_VER: ${{ matrix.idf_version || '5.2' }} CLANG_BUILD: ${{ matrix.toolchain == 'clang' && '15' || '0' }} BUILD64: ${{ matrix.toolchain == 'gcc64' && 1 || 0 }} @@ -101,37 +101,41 @@ jobs: - name: Configure environment shell: pwsh run: | + "CI_BUILD_DIR=" + (Resolve-Path ".").path >> $env:GITHUB_ENV "SMING_HOME=" + (Resolve-Path "../../sming/Sming").path >> $env:GITHUB_ENV "COMPONENT_SEARCH_DIRS=" + (Resolve-Path "..").path >> $env:GITHUB_ENV "CI_MAKEFILE=" + (Resolve-Path "../../sming/Tools/ci/library/Makefile") >> $env:GITHUB_ENV - - name: Install build tools for Ubuntu - if: ${{ matrix.os == 'ubuntu-latest' }} + - name: Fix permissions + if: ${{ matrix.os != 'windows-latest' }} run: | - $SMING_HOME/../Tools/ci/install.sh + sudo chown $USER /opt + + - name: Cache ESP-IDF and build tools + if: ${{ matrix.arch == 'Esp32' }} + uses: actions/cache@v4 + with: + path: | + /opt/esp-idf-${{ env.INSTALL_IDF_VER }} + /opt/esp32 + key: ${{ matrix.os }}-idf-${{ env.INSTALL_IDF_VER }} - - name: Install build tools for MacOS - if: ${{ matrix.os == 'macos-latest' }} + - name: Install build tools for Ubuntu / MacOS + if: ${{ matrix.os != 'windows-latest' }} run: | $SMING_HOME/../Tools/ci/install.sh - name: Install build tools for Windows - if: ${{ matrix.os == 'windows-latest' }} + if: ${{ matrix.os == 'windows-latest' }} run: | cd $env:SMING_HOME/.. . Tools/ci/setenv.ps1 Tools/ci/install.cmd - - name: Build and Test for ${{matrix.arch}} on Ubuntu + - name: Build and Test for ${{matrix.arch}} on Ubuntu / MacOS env: CLANG_FORMAT: clang-format-8 - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - source $SMING_HOME/../Tools/export.sh - make -j$(nproc) -f $CI_MAKEFILE - - - name: Build and test for ${{matrix.variant}} on MacOS - if: ${{ matrix.os == 'macos-latest' }} + if: ${{ matrix.os != 'windows-latest' }} run: | source $SMING_HOME/../Tools/export.sh make -j$(nproc) -f $CI_MAKEFILE diff --git a/Sming/Libraries/MultipartParser/src/HttpMultipartResource.cpp b/Sming/Libraries/MultipartParser/src/HttpMultipartResource.cpp index ecfbe2e462..15b131a66d 100644 --- a/Sming/Libraries/MultipartParser/src/HttpMultipartResource.cpp +++ b/Sming/Libraries/MultipartParser/src/HttpMultipartResource.cpp @@ -12,7 +12,7 @@ #include "HttpMultipartResource.h" -int HttpMultipartResource::setFileMap(HttpServerConnection& connection, HttpRequest& request, HttpResponse& response) +int HttpMultipartResource::setFileMap(HttpServerConnection&, HttpRequest& request, HttpResponse& response) { String contentType = request.headers[HTTP_HEADER_CONTENT_TYPE]; String mimeType = toString(MIME_FORM_MULTIPART); @@ -24,7 +24,3 @@ int HttpMultipartResource::setFileMap(HttpServerConnection& connection, HttpRequ return 0; } - -void HttpMultipartResource::shutdown(HttpServerConnection& connection) -{ -} diff --git a/Sming/Libraries/MultipartParser/src/HttpMultipartResource.h b/Sming/Libraries/MultipartParser/src/HttpMultipartResource.h index 39e4fd3464..703d068a53 100644 --- a/Sming/Libraries/MultipartParser/src/HttpMultipartResource.h +++ b/Sming/Libraries/MultipartParser/src/HttpMultipartResource.h @@ -50,7 +50,9 @@ class HttpMultipartResource : public HttpResource */ virtual int setFileMap(HttpServerConnection& connection, HttpRequest& request, HttpResponse& response); - void shutdown(HttpServerConnection& connection) override; + void shutdown(HttpServerConnection&) override + { + } private: HttpFilesMapper mapper; diff --git a/Sming/Libraries/SwitchJoycon/src/SwitchJoycon.h b/Sming/Libraries/SwitchJoycon/src/SwitchJoycon.h index 86a2e63011..149381bac6 100644 --- a/Sming/Libraries/SwitchJoycon/src/SwitchJoycon.h +++ b/Sming/Libraries/SwitchJoycon/src/SwitchJoycon.h @@ -121,7 +121,7 @@ class SwitchJoycon void resetButtons(); protected: - virtual void onStarted(NimBLEServer* pServer){}; + virtual void onStarted(NimBLEServer*){}; private: bool started{false}; diff --git a/Sming/Libraries/nanopb/src/Callback.cpp b/Sming/Libraries/nanopb/src/Callback.cpp index 76794000d0..46b6e72b2b 100644 --- a/Sming/Libraries/nanopb/src/Callback.cpp +++ b/Sming/Libraries/nanopb/src/Callback.cpp @@ -25,7 +25,7 @@ bool OutputCallback::encode(pb_ostream_t* stream, const pb_field_t* field) return pb_encode_string(stream, data, length); } -bool InputCallback::decode(pb_istream_t* stream, const pb_field_t* field) +bool InputCallback::decode(pb_istream_t* stream, const pb_field_t*) { size_t available = stream->bytes_left; auto new_buf = realloc(data, length + available); diff --git a/Sming/Libraries/nanopb/src/Stream.cpp b/Sming/Libraries/nanopb/src/Stream.cpp index 2b44e3b4c1..8884b0f045 100644 --- a/Sming/Libraries/nanopb/src/Stream.cpp +++ b/Sming/Libraries/nanopb/src/Stream.cpp @@ -24,7 +24,7 @@ bool InputStream::decode(const pb_msgdesc_t* fields, void* dest_struct) is.callback = [](pb_istream_t* stream, pb_byte_t* buf, size_t count) -> bool { auto self = static_cast(stream->state); assert(self != nullptr); - size_t read = self->stream.readBytes(reinterpret_cast(buf), count); + self->stream.readBytes(reinterpret_cast(buf), count); return true; }; is.state = this; diff --git a/Sming/Libraries/nanopb/src/include/Protobuf/Stream.h b/Sming/Libraries/nanopb/src/include/Protobuf/Stream.h index 6e7cfbbcab..7a9bd91a14 100644 --- a/Sming/Libraries/nanopb/src/include/Protobuf/Stream.h +++ b/Sming/Libraries/nanopb/src/include/Protobuf/Stream.h @@ -63,7 +63,7 @@ class OutputStream : public Stream class DummyOutputStream : public OutputStream { protected: - bool write(const pb_byte_t* buf, size_t count) override + bool write(const pb_byte_t*, size_t) override { return true; } diff --git a/Tools/ci/README.rst b/Tools/ci/README.rst index fbf692c5cb..00e9e67ace 100644 --- a/Tools/ci/README.rst +++ b/Tools/ci/README.rst @@ -55,10 +55,28 @@ To explicitly specify the repository to fetch from:: To list all source locations with warnings:: - python3 scanlog.py last-build.txt -w + python3 scanlog.py last-build.txt -w -m + +Note: The 'm' flag merges warnings from all jobs. Omitting this shows warnings for each job separately. To filter out warnings:: - python3 scanlog.py last-build.txt -w --exclude warn-exclude.lst + python3 scanlog.py last-build.txt -w -m --exclude warn-exclude.lst The named exclusion file contains a list of regular expressions to match against. + + +vscode +------ + +The warnings output using the scanlog tool can be used as hyperlinks in vscode: + +- Select a project, e.g. ``tests/HostTests`` and run ``make ide-vscode`` +- Open the resulting workspace in vscode +- Add the ``sming`` folder to the project +- Open an integrated terminal and dump the warnings as shown above. + Or, redirect them into a file and ``cat`` it. + +The file locations act as links to the source. +Note that this isn't perfect. For example, esp-idf paths are not resolved to the specific version in use. +Listing warnings for each job can be helpful as it shows which IDF version was used. diff --git a/Tools/ci/scanlog.py b/Tools/ci/scanlog.py index 19feaf44b4..73821602b8 100644 --- a/Tools/ci/scanlog.py +++ b/Tools/ci/scanlog.py @@ -26,7 +26,6 @@ import re import subprocess import json -from enum import Enum class Table: COL_SEP = ' ' @@ -91,16 +90,25 @@ def __next__(self): class Job: - def __init__(self, log: "Log", name: str): - self.log = log + # Second figure in warning lines not reliable, remove it + WARNING_SPLIT = re.compile(r':(\d+): ?(\d+: )?(warning: )') + + def __init__(self, name: str): self.name = name self.table: Table = Table() self.warnings: dict[str, str] = {} # De-duplicated warnings self.warning_count: int = 0 # Includes duplicates - @property - def caption(self): - return f'{self.log.name}: {self.name}' + def parse_warning(self, line: str): + self.warning_count += 1 + s = line.removeprefix('from ') + x = Job.WARNING_SPLIT.split(line) + if len(x) == 5: + location, warning = Path.normalise(f'{x[0]}:{x[1]}'), x[4] + else: + location, warning = '?', s + lines = self.warnings.setdefault(location, set()) + lines.add(warning) class Log: @@ -109,7 +117,7 @@ def __init__(self, name: str): self.jobs: list[Job] = [] -class PathPrefix: +class Path: # Paths vary by platform, so normalise them IGNORE_PREFIX = [ # Linux @@ -124,126 +132,106 @@ class PathPrefix: '/d/a/Sming/Sming/Sming/', 'd:/a/Sming/Sming/projects/', 'd:/a/Sming/Sming/Sming/', - 'c:/tools/', ] - re_prefix = re.compile('|'.join(f'^{s}' for s in IGNORE_PREFIX), re.IGNORECASE) + REMOVE = re.compile('|'.join(f'^{s}' for s in IGNORE_PREFIX), re.IGNORECASE) + SUBST = re.compile(r'^d:/opt/esp-idf-\d.\d', re.IGNORECASE) @staticmethod - def remove(line: str) -> str: - return PathPrefix.re_prefix.sub('', line) + def normalise(line: str) -> str: + s = line.replace('\\', '/') + s = Path.REMOVE.sub('', s) + s = Path.SUBST.sub('esp-idf', s) + return os.path.normpath(s) -def normalise_path(line: str) -> str: - line = line.replace('\\', '/') - line = PathPrefix.remove(line) - return os.path.normpath(line) - - -def scan_log(filename: str) -> Log: - class State(Enum): - searching = 1 - building = 2 - linking = 3 - BUILD_PREFIX = 'Building ' - log = Log(filename) - job = None - warnings = {} - state = State.searching - target = None - row = None +class Parser: + JOB_LINE = re.compile(r'build \((.+?)\)\t(Build and test .+)\t(.+)') + BUILD_LINE = re.compile(r'Building (.*)/out/.*/clib-App.*') - def finish_job(): - if job is None: - return - - nonlocal row - if row is not None: - job.table.append(row) - row = None - - nonlocal warnings - job.warnings = warnings - warnings = {} - - # Second figure in warning lines not reliable, remove it - warning_split = re.compile(r':(\d+): ?(\d+: )?(warning: )') - - sys.stderr.write(f'Scanning {filename}\n') - - logfile = open(filename, 'rb') - for line in logfile: + def __init__(self): + self.log = None + self.job = None + self.state = None + self.target = None + self._link_line = None + self.row = None + + def scan(self, filename: str): + sys.stderr.write(f'Scanning {filename}\n') + + self.log = Log(filename) + with open(filename, 'rb') as logfile: + for line_index, line in enumerate(logfile): + try: + self.parse_line(line) + except Exception as e: + e.add_note(f'Parsing line {line_index+1}') + raise e + + sys.stderr.write('\r\033[K') + self.log.jobs.sort(key=lambda job: job.name) + return self.log + + def parse_line(self, line: str): line = line.decode('utf-8-sig').strip() # Replace typographical quotes with normal ones to ensure equivalence line = re.sub(r"‘|’", "'", line) job_name = None - if not line[:4].isdigit(): # Not a date - if not line.startswith('build'): - continue - job_name, _, line = line.partition('\t') - job_name = re.search(r'\((.+?)\)', job_name)[1] - step, _, line = line.partition('\t') - if not step.startswith('Build and test'): - continue - if job and job.name != job_name: - finish_job() - sys.stderr.write(f'\r{job.name} ...\033[K') - job = None - if job is None: - job = Job(log, job_name) - log.jobs.append(job) - state = State.searching + if not line[:4].isdigit(): + # Not a date: assume line is from GH CLI logfile, looking for valid build line + m = Parser.JOB_LINE.match(line) + if not m: + return + job_name, _, line = m[1], m[2], m[3] + if self.job and self.job.name != job_name: + if self.row: + self.job.table.append(self.row) + self.row = None + self.job = None + if self.job is None: + sys.stderr.write(f'\r{job_name} ...\033[K') + self.job = Job(job_name) + self.log.jobs.append(self.job) + self.state = self._searching dtstr, _, line = line.partition(' ') if not dtstr: - continue + return if ': warning:' in line: - job.warning_count += 1 - s = line.removeprefix('from ') - x = warning_split.split(line) - if len(x) == 5: - s = normalise_path(f'{x[0]}:{x[1]}') - lines = warnings.setdefault(s, set()) - lines.add(x[4]) - else: - warnings.setdefault(s, set()) - continue - if state == State.searching: - if not line.startswith(BUILD_PREFIX): - continue - if 'clib-App' not in line: - continue - c = normalise_path(line[len(BUILD_PREFIX):]) - target, _, _ = c.partition('/out/') - state = State.building - continue - if state == State.building: - if line.startswith(f'{os.path.basename(target)}: Linking'): - state = State.linking - row = None - continue - if state == State.linking: - if row is None: - if line.startswith('----'): - row = {'target': target} - continue - if '|' in line: - cols = line.split('|') - k, v = cols[0], cols[4] - elif ' : ' in line: - k, v = line.split(':') - else: - job.table.append(row) - row = None - state = State.searching - continue - k, v = k.strip(), v.strip() - row[k] = v - - finish_job() - - sys.stderr.write('\r\033[K') - log.jobs.sort(key=lambda job: job.name) + self.job.parse_warning(line) + return + self.state(line) + + def _searching(self, line: str): + '''Searching for `Building ... clib_App ... after which comes the memory usage summary''' + match = Parser.BUILD_LINE.match(line) + if match: + self.target = Path.normalise(match[1]) + self._link_line = f'{os.path.basename(self.target)}: Linking' + self.state = self._building + + def _building(self, line: str): + if line.startswith(self._link_line): + self.state = self._linking + self.row = None + + def _linking(self, line: str): + if self.row is None: + if line.startswith('----'): + self.row = {'target': self.target} + return + if '|' in line: + cols = line.split('|') + k, v = cols[0], cols[4] + elif ' : ' in line: + k, v = line.split(':') + else: + self.job.table.append(self.row) + self.row = self.target = None + self.state = self._searching + return + k, v = k.strip(), v.strip() + self.row[k] = v - return log def print_table(table: Table): @@ -252,24 +240,23 @@ def print_table(table: Table): print() -def print_warnings(log: Log, exclude_file: str): - exclude = None - if exclude_file is not None: - with open(exclude_file, 'r') as f: - s = '|'.join(line.strip() for line in f) - exclude = re.compile(s, re.IGNORECASE) - +def merge_warnings(log: Log) -> dict[str, set]: warnings = {} total_warning_count = 0 for job in log.jobs: total_warning_count += job.warning_count for location, details in job.warnings.items(): - location_warnings = warnings.setdefault(location, {}) - for det in details: - x = location_warnings.setdefault(det, []) - x.append(job.name) + location_warnings = warnings.setdefault(location, set()) + location_warnings |= details + return warnings + - print(f'{total_warning_count} warnings found in {len(warnings)} unique locations.') +def print_warnings(warnings: dict[str, set], exclude_file: str): + exclude = None + if exclude_file is not None: + with open(exclude_file, 'r', encoding='utf-8') as f: + s = '|'.join(line.strip() for line in f) + exclude = re.compile(s, re.IGNORECASE) exclude_count = 0 if exclude: @@ -284,7 +271,8 @@ def print_warnings(log: Log, exclude_file: str): warnings[location] = filtered_details else: exclude_count += 1 - print(f'{exclude_count} locations excluded.') + + print(f'Listing {len(warnings)} locations, {exclude_count} excluded.') loc_width = min(2 + max(len(loc) for loc in warnings), 80) loc_pad = ''.ljust(loc_width) @@ -297,6 +285,7 @@ def print_warnings(log: Log, exclude_file: str): for det in sorted(warnings[location]): print(f'\t{locstr}{det}') locstr = loc_pad + print() def fetch_logs(filename: str, repo: str = None, branch: str = None): @@ -312,8 +301,7 @@ def get_args(cmd: str): if branch: args.append(f'-b={branch}') args.append('--json=displayTitle,headBranch,number,name,databaseId,headSha,conclusion') - r = subprocess.run(args, capture_output=True, encoding='utf-8') - r.check_returncode() + r = subprocess.run(args, capture_output=True, encoding='utf-8', check=True) data = json.loads(r.stdout) joblist = [] @@ -324,15 +312,14 @@ def get_args(cmd: str): break joblist.append(job) - with open(filename, 'w') as f: + with open(filename, 'w', encoding='utf-8') as f: sys.stderr.write(f'Creating {filename}...\n') for job in joblist: job_id = job['databaseId'] sys.stderr.write(f'Fetching {job_id}: "{job["displayTitle"]}" - {job["headBranch"]} - {job["name"]} - {job["conclusion"]}\n') try: args = get_args('view') + ['--log', str(job_id)] - r = subprocess.run(args, stdout=f, encoding='utf-8') - r.check_returncode() + r = subprocess.run(args, stdout=f, encoding='utf-8', check=True) except: os.unlink(filename) raise @@ -352,7 +339,7 @@ def print_diff(log1: Log, log2: Log): target = row1[0] i = table2.find_row(target) if i < 0: - print(f'** {target} NOT found in {table2.caption}') + print(f'** {target} NOT found in {log2.name} - {job2.name}') continue row2 = table2.rows.pop(i) if row2 == row1: @@ -360,11 +347,11 @@ def print_diff(log1: Log, log2: Log): diff_table = Table() - data = {'log': job1.log.name} + data = {'log': log1.name} for k, v in zip(table1.headings[1:], row1[1:]): data[k] = v diff_table.append(data) - data = {'log': job2.log.name} + data = {'log': log2.name} for k, v in zip(table2.headings[1:], row2[1:]): data[k] = v diff_table.append(data) @@ -383,7 +370,7 @@ def print_diff(log1: Log, log2: Log): print_table(diff_table) if table2.rows: - print(f'** Targets not in {job1.name}') + print(f'** Targets not in {log1.name}') print_table(table2) @@ -396,23 +383,31 @@ def main(): parser.add_argument('-c', '--compare', help='Second log to compare') parser.add_argument('-w', '--warnings', action='store_true', help='Summarise warnings') parser.add_argument('-x', '--exclude', help='File containing source locations to exclude') + parser.add_argument('-m', '--merge', action='store_true', help='Merge warnings from all jobs') args = parser.parse_args() if args.fetch: fetch_logs(args.filename, repo=args.repo, branch=args.branch) - log1 = scan_log(args.filename) + log1 = Parser().scan(args.filename) if args.compare is None: if args.warnings: - print_warnings(log1, args.exclude) + if args.merge: + print(f'Total warnings: {sum(job.warning_count for job in log1.jobs)} from {len(log1.jobs)} jobs.') + warnings = merge_warnings(log1) + print_warnings(warnings, args.exclude) + else: + for i, job in enumerate(log1.jobs): + print(f'Job #{i+1}: {job.name} - {job.warning_count} warnings') + print_warnings(job.warnings, args.exclude) else: for job in log1.jobs: - print(job.caption) + print(job.name) print_table(job.table) return - log2 = scan_log(args.compare) + log2 = Parser().scan(args.compare) print_diff(log1, log2) diff --git a/Tools/ci/warn-exclude.lst b/Tools/ci/warn-exclude.lst index e2b3707490..3c64c00f91 100644 --- a/Tools/ci/warn-exclude.lst +++ b/Tools/ci/warn-exclude.lst @@ -3,5 +3,5 @@ Libraries/Adafruit_VL53L0X/.* Libraries/Arduino_TensorFlowLite/.* Libraries/jerryscript/jerryscript/.* Libraries/NimBLE/esp-nimble-cpp/.* -samples/.*\[-Wunused-parameter\] +(^|.*/)samples/.*\[-Wunused-parameter\] .*\.c.*\[-Wimplicit-fallthrough\] diff --git a/samples/Basic_Audio/app/application.cpp b/samples/Basic_Audio/app/application.cpp index 2d623b36a7..95becf40d8 100644 --- a/samples/Basic_Audio/app/application.cpp +++ b/samples/Basic_Audio/app/application.cpp @@ -149,7 +149,7 @@ void checkReceive() debug_i("RX: %u bytes", total); } -void IRAM_ATTR i2sCallback(void* param, i2s_event_type_t event) +void IRAM_ATTR i2sCallback(void*, i2s_event_type_t event) { // For this sample, process the data in task context switch(event) {