Skip to content

Commit

Permalink
trace: Add functionality to parse DMA trace
Browse files Browse the repository at this point in the history
  • Loading branch information
colluca committed Jan 10, 2024
1 parent 7f1fdf3 commit f509e23
Show file tree
Hide file tree
Showing 26 changed files with 755 additions and 459 deletions.
15 changes: 15 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,21 @@ jobs:
- name: Build docs
run: make docs

#####################
# Python unit tests #
#####################

pytest:
name: Python unit tests
runs-on: ubuntu-22.04
container:
image: ghcr.io/pulp-platform/snitch_cluster:extend-sim-utils
steps:
- uses: actions/checkout@v2
- name: Run pytest
run: pytest


##############################################
# Simulate SW on Snitch Cluster w/ Verilator #
##############################################
Expand Down
8 changes: 8 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,14 @@ docs:
script:
- make docs

#####################
# Python unit tests #
#####################

pytest:
script:
- pytest

#################################
# Build Snitch cluster software #
#################################
Expand Down
1 change: 1 addition & 0 deletions docs/rm/bench/join.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::: join
1 change: 1 addition & 0 deletions docs/rm/bench/roi.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::: roi
1 change: 1 addition & 0 deletions docs/rm/bench/visualize.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::: visualize
1 change: 1 addition & 0 deletions docs/rm/trace/annotate.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::: annotate
1 change: 1 addition & 0 deletions docs/rm/trace/events.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::: events
1 change: 1 addition & 0 deletions docs/rm/trace/gen_trace.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::: gen_trace
10 changes: 9 additions & 1 deletion mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ plugins:
- mkdocstrings:
handlers:
python:
paths: [util/sim]
paths: [util/sim, util/trace, util/bench]
- macros:
on_error_fail: true
use_directory_urls: false
Expand Down Expand Up @@ -57,6 +57,14 @@ nav:
- sim_utils: rm/sim/sim_utils.md
- rm/sim/Simulation.md
- rm/sim/Simulator.md
- Trace Utilities:
- gen_trace.py: rm/trace/gen_trace.md
- annotate.py: rm/trace/annotate.md
- events.py: rm/trace/events.md
- Benchmarking Utilities:
- join.py: rm/bench/join.md
- roi.py: rm/bench/roi.md
- visualize.py: rm/bench/visualize.md
- Snitch Runtime:
- Pages: runtime/Pages/index.md
- Files: runtime/Files/index.md
Expand Down
17 changes: 10 additions & 7 deletions python-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,23 @@ editorconfig-checker==2.3.51
flake8
gitpython
hjson
json5
jsonref
jsonschema
mako
matplotlib
mkdocstrings
mkdocstrings-python
mako
pandas
progressbar2
tabulate
yamllint
pyyaml
psutil
pyelftools
pytablewriter
pytest
pyyaml
tabulate
termcolor
pandas
pyelftools
psutil
yamllint

-r docs/requirements.txt
-r sw/dnn/requirements.txt
44 changes: 21 additions & 23 deletions target/common/common.mk
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ VLIB ?= $(QUESTA_SEPP) vlib
GENTRACE_PY ?= $(UTIL_DIR)/trace/gen_trace.py
ANNOTATE_PY ?= $(UTIL_DIR)/trace/annotate.py
EVENTS_PY ?= $(UTIL_DIR)/trace/events.py
PERF_CSV_PY ?= $(UTIL_DIR)/trace/perf_csv.py
LAYOUT_EVENTS_PY ?= $(UTIL_DIR)/trace/layout_events.py
EVENTVIS_PY ?= $(UTIL_DIR)/trace/eventvis.py
JOIN_PY ?= $(UTIL_DIR)/bench/join.py
ROI_PY ?= $(UTIL_DIR)/bench/roi.py
VISUALIZE_PY ?= $(UTIL_DIR)/bench/visualize.py

VERILATOR_ROOT ?= $(dir $(shell $(VERILATOR_SEPP) which verilator))..
VLT_ROOT ?= ${VERILATOR_ROOT}
Expand Down Expand Up @@ -232,26 +232,27 @@ endef

DASM_TRACES = $(shell (ls $(LOGS_DIR)/trace_hart_*.dasm 2>/dev/null))
TXT_TRACES = $(shell (echo $(DASM_TRACES) | sed 's/\.dasm/\.txt/g'))
PERF_TRACES = $(shell (echo $(DASM_TRACES) | sed 's/trace_hart/hart/g' | sed 's/.dasm/_perf.json/g'))
PERF_DUMPS = $(shell (echo $(DASM_TRACES) | sed 's/trace_hart/hart/g' | sed 's/.dasm/_perf.json/g'))
ANNOTATED_TRACES = $(shell (echo $(DASM_TRACES) | sed 's/\.dasm/\.s/g'))
DIFF_TRACES = $(shell (echo $(DASM_TRACES) | sed 's/\.dasm/\.diff/g'))

GENTRACE_OUTPUTS = $(TXT_TRACES) $(PERF_TRACES)
GENTRACE_OUTPUTS = $(TXT_TRACES) $(PERF_DUMPS)
ANNOTATE_OUTPUTS = $(ANNOTATED_TRACES)
PERF_CSV = $(LOGS_DIR)/perf.csv
EVENT_CSV = $(LOGS_DIR)/event.csv
TRACE_CSV = $(LOGS_DIR)/trace.csv
PERF_DUMP = $(LOGS_DIR)/perf.json
ROI_DUMP = $(LOGS_DIR)/roi.json
TRACE_JSON = $(LOGS_DIR)/trace.json

.PHONY: traces annotate perf-csv event-csv layout
.PHONY: traces annotate trace-view clean-traces clean-annotate
traces: $(GENTRACE_OUTPUTS)
annotate: $(ANNOTATE_OUTPUTS)
perf-csv: $(PERF_CSV)
event-csv: $(EVENT_CSV)
layout: $(TRACE_CSV) $(TRACE_JSON)
trace-view: $(TRACE_JSON)
clean-traces:
rm -f $(GENTRACE_OUTPUTS)
clean-annotate:
rm -f $(ANNOTATE_OUTPUTS)

$(LOGS_DIR)/trace_hart_%.txt $(LOGS_DIR)/hart_%_perf.json: $(LOGS_DIR)/trace_hart_%.dasm $(GENTRACE_PY)
$(DASM) < $< | $(PYTHON) $(GENTRACE_PY) --permissive -d $(LOGS_DIR)/hart_$*_perf.json > $(LOGS_DIR)/trace_hart_$*.txt
$(addprefix $(LOGS_DIR)/,trace_hart_%.txt hart_%_perf.json): $(LOGS_DIR)/trace_hart_%.dasm $(GENTRACE_PY)
$(DASM) < $< | $(PYTHON) $(GENTRACE_PY) --permissive --dma-trace $(LOGS_DIR)/dma_trace_$*.log --dump-hart-perf $(LOGS_DIR)/hart_$*_perf.json --dump-dma-perf $(LOGS_DIR)/dma_$*_perf.json -o $(LOGS_DIR)/trace_hart_$*.txt

# Generate source-code interleaved traces for all harts. Reads the binary from
# the logs/.rtlbinary file that is written at start of simulation in the vsim script
Expand All @@ -261,14 +262,11 @@ $(LOGS_DIR)/trace_hart_%.s: $(LOGS_DIR)/trace_hart_%.txt ${ANNOTATE_PY}
$(LOGS_DIR)/trace_hart_%.diff: $(LOGS_DIR)/trace_hart_%.txt ${ANNOTATE_PY}
$(PYTHON) ${ANNOTATE_PY} ${ANNOTATE_FLAGS} -o $@ $(BINARY) $< -d

$(PERF_CSV): $(PERF_TRACES) $(PERF_CSV_PY)
$(PYTHON) $(PERF_CSV_PY) -o $@ -i $(PERF_TRACES)
$(PERF_DUMP): $(PERF_DUMPS) $(JOIN_PY)
$(PYTHON) $(JOIN_PY) -i $(shell ls $(LOGS_DIR)/*_perf.json) -o $@

$(EVENT_CSV): $(PERF_TRACES) $(PERF_CSV_PY)
$(PYTHON) $(PERF_CSV_PY) -o $@ -i $(PERF_TRACES) --filter tstart tend
$(ROI_DUMP): $(PERF_DUMP) $(ROI_SPEC) $(ROI_PY)
$(PYTHON) $(ROI_PY) $(PERF_DUMP) $(ROI_SPEC) --cfg $(CFG) -o $@

$(TRACE_CSV): $(EVENT_CSV) $(LAYOUT_FILE) $(LAYOUT_EVENTS_PY)
$(PYTHON) $(LAYOUT_EVENTS_PY) $(LAYOUT_EVENTS_FLAGS) $(EVENT_CSV) $(LAYOUT_FILE) -o $@

$(TRACE_JSON): $(TRACE_CSV) $(EVENTVIS_PY)
$(PYTHON) $(EVENTVIS_PY) -o $@ $(TRACE_CSV)
$(TRACE_JSON): $(ROI_DUMP) $(VISUALIZE_PY)
$(PYTHON) $(VISUALIZE_PY) $(ROI_DUMP) --traces $(TXT_TRACES) --elf $(BINARY) -o $@
Empty file added util/bench/__init__.py
Empty file.
62 changes: 62 additions & 0 deletions util/bench/join.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
#!/usr/bin/env python3
# Copyright 2024 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
#
# Author: Luca Colagrande <[email protected]>
"""Combines performance metrics from all threads into one JSON file.
This script takes the performance metrics from multiple cores or DMA
engines, in JSON format as dumped by the [`events.py`][events] or
[`gen_trace.py`][gen_trace] scripts, and merges them into a single
JSON file for global inspection and further processing.
"""

import sys
import argparse
import re
import json


FILENAME_REGEX = r'([a-z]+)_([0-9a-f]+)_perf.json'


def main():
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument(
'-i',
'--inputs',
metavar='<inputs>',
nargs='+',
help='Input performance metric dumps')
parser.add_argument(
'-o',
'--output',
metavar='<output>',
nargs='?',
default='perf.json',
help='Output JSON file')
args = parser.parse_args()

# Populate a list (one entry per hart) of dictionaries
# enumerating all the performance metrics for each hart
data = {}
for filename in sorted(args.inputs):

# Get thread ID and type (DMA or hart) from filename
match = re.search(FILENAME_REGEX, filename)
typ = match.group(1)
idx = int(match.group(2), base=16)

# Populate dictionary of metrics for the current hart
with open(filename, 'r') as f:
data[f'{typ}_{idx}'] = json.load(f)

# Export data
with open(args.output, 'w') as f:
json.dump(data, f, indent=4)


if __name__ == '__main__':
sys.exit(main())
116 changes: 116 additions & 0 deletions util/bench/roi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
#!/usr/bin/env python3
# Copyright 2024 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
#
# Author: Luca Colagrande <[email protected]>
"""Filters and labels execution regions for visualization.
This script takes a JSON file of performance metrics, as output by
[`join.py`][join], and generates another JSON, where the execution
regions are filtered and labeled for visualization, according to an
auxiliary region-of-interest (ROI) specification file (JSON format).
The specification file can be a Mako template to parameterize
certain parameters, such as the number of clusters in the system.
The output JSON can be passed to the [`visualize.py`][visualize]
script for visualization.
Check out `test_data/data.json` and `test_data/spec.json` for an
example input and specification file which can be fed as input to the
tool respectively. The corresponding output is contained in
`test_data/roi.json`.
"""

import argparse
import json
import json5
from mako.template import Template
import sys


def format_roi(roi, label):
return {
"label": label,
"tstart": roi["tstart"],
"tend": roi["tend"],
"attrs": {key: value for key, value in roi.items() if key not in ["tstart", "tend"]}
}


def get_roi(data, thread, idx):
thread_type, thread_idx = thread.split('_')
thread_idx = int(thread_idx)
thread_data = data[thread]
if thread_type == "hart":
return thread_data[idx]
elif thread_type == "dma":
return thread_data["transfers"][idx]
else:
raise ValueError(f"Unsupported thread type {thread_type}")


def filter_and_label_rois(data, spec):
output = {}
# Iterate all threads in the rendered specification
for thread_spec in spec:
thread = thread_spec['thread']
output_rois = []
# Iterate all ROIs to keep for the current thread
for roi in thread_spec['roi']:
output_roi = format_roi(get_roi(data, thread, roi['idx']), roi['label'])
output_rois.append(output_roi)
# Add ROIs for current thread to output, if any
if output_rois:
output[thread] = output_rois
return output


def load_json_inputs(input_path, spec_path, **kwargs):
# Read input JSON
with open(input_path, 'r') as f:
data = json5.load(f)
# Read and render specification template JSON
with open(spec_path, 'r') as f:
spec_template = Template(f.read())
rendered_spec = spec_template.render(**kwargs)
spec = json5.loads(rendered_spec)
return data, spec


def main():
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument(
'input',
help='Input JSON file')
parser.add_argument(
'spec',
help='ROI specification file (JSON format)')
parser.add_argument(
'--cfg',
help='Hardware configuration file used to render the specification file')
parser.add_argument(
'-o',
'--output',
nargs='?',
default='roi.json',
help='Output JSON file')
args = parser.parse_args()

# Load hardware configuration
with open(args.cfg, 'r') as f:
cfg = json5.load(f)

# Read and render input files
data, spec = load_json_inputs(args.input, args.spec, cfg=cfg)

# Process inputs and generate output JSON
output = filter_and_label_rois(data, spec)

# Write output to file
with open(args.output, 'w') as f:
json.dump(output, f, indent=4)


if __name__ == '__main__':
sys.exit(main())
Empty file added util/bench/tests/__init__.py
Empty file.
Loading

0 comments on commit f509e23

Please sign in to comment.