-
Notifications
You must be signed in to change notification settings - Fork 58
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
10 changed files
with
247 additions
and
76 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
#!/usr/bin/env python3 | ||
# Copyright 2023 ETH Zurich and University of Bologna. | ||
# Licensed under the Apache License, Version 2.0, see LICENSE for details. | ||
# SPDX-License-Identifier: Apache-2.0 | ||
# | ||
# Tim Fischer <[email protected]> | ||
# Viviane Potocnik <[email protected]> | ||
# Luca Colagrande <[email protected]> | ||
|
||
import argparse | ||
import pathlib | ||
import hjson | ||
import sys | ||
import os | ||
import torch | ||
|
||
sys.path.append(os.path.join(os.path.dirname(__file__), "../../../../util/sim/")) | ||
import data_utils # noqa: E402 | ||
from data_utils import emit_license, \ | ||
format_struct_definition, format_array_definition, \ | ||
format_array_declaration, format_ifdef_wrapper # noqa: E402 | ||
|
||
torch.manual_seed(42) | ||
|
||
# AXI splits bursts crossing 4KB address boundaries. To minimize | ||
# the occurrence of these splits the data should be aligned to 4KB | ||
BURST_ALIGNMENT = 4096 | ||
|
||
PRECISION_T = { | ||
'64': 'FP64', | ||
'32': 'FP32', | ||
'16': 'FP16', | ||
'8': 'FP8' | ||
} | ||
|
||
|
||
def golden_model(ifmap, axis): | ||
softmax = torch.nn.Softmax(dim=axis) | ||
return softmax(ifmap) | ||
|
||
|
||
def emit_header(**kwargs): | ||
batch_size = kwargs['input_dim']['batch_size'] | ||
seq_len = kwargs['input_dim']['seq_len'] | ||
input_samples = kwargs['input_dim']['input_samples'] | ||
reduce_dim = kwargs['reduce_dim'] | ||
prec = str(kwargs['prec']) | ||
|
||
torch_type = data_utils.floating_point_torch_type(prec) | ||
ifmap = torch.randn(batch_size, seq_len, input_samples, requires_grad=False, dtype=torch_type) | ||
|
||
ofmap = golden_model(ifmap, reduce_dim) | ||
ofmap = ofmap.detach().numpy() | ||
|
||
ctype = data_utils.floating_point_ctype(prec) | ||
|
||
ifmap_uid = 'ifmap' | ||
ofmap_uid = 'ofmap' | ||
|
||
layer_cfg = { | ||
**kwargs['input_dim'], | ||
'reduce_dim': reduce_dim, | ||
'ifmap': ifmap_uid, | ||
'ofmap': ofmap_uid, | ||
'dtype': PRECISION_T[prec] | ||
} | ||
|
||
data_str = [emit_license()] | ||
data_str += [format_array_declaration(ctype, ifmap_uid, ifmap.shape, | ||
alignment=BURST_ALIGNMENT)] | ||
data_str += [format_array_declaration(ctype, ofmap_uid, ofmap.shape, | ||
alignment=BURST_ALIGNMENT)] | ||
data_str += [format_struct_definition('softmax_layer_t', 'layer', layer_cfg)] | ||
data_str += [format_array_definition(ctype, ifmap_uid, ifmap, | ||
alignment=BURST_ALIGNMENT)] | ||
result_def = format_array_definition(ctype, 'golden', ofmap, alignment=BURST_ALIGNMENT) | ||
data_str += [format_ifdef_wrapper('BIST', result_def)] | ||
data_str = '\n\n'.join(data_str) | ||
|
||
return data_str | ||
|
||
|
||
def main(): | ||
|
||
parser = argparse.ArgumentParser(description='Generate data for layernorm kernel') | ||
parser.add_argument( | ||
"-c", "--cfg", | ||
type=pathlib.Path, | ||
required=True, | ||
help='Select param config file kernel' | ||
) | ||
parser.add_argument( | ||
'--section', | ||
type=str, | ||
help='Section to store matrices in') | ||
parser.add_argument( | ||
'output', | ||
type=pathlib.Path, | ||
help='Path of the output header file') | ||
args = parser.parse_args() | ||
|
||
# Load param config file | ||
with args.cfg.open() as f: | ||
param = hjson.loads(f.read()) | ||
param['section'] = args.section | ||
|
||
# Emit header file | ||
with open(args.output, 'w') as f: | ||
f.write(emit_header(**param)) | ||
|
||
|
||
if __name__ == '__main__': | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
// Copyright 2023 ETH Zurich and University of Bologna. | ||
// Licensed under the Apache License, Version 2.0, see LICENSE for details. | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// Luca Colagrande <[email protected]> | ||
|
||
#include "dnn.h" | ||
|
||
#include "data.h" | ||
|
||
int main() { | ||
softmax_layer(layer); | ||
return 0; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,85 @@ | ||
#!/usr/bin/env python3 | ||
# Copyright 2023 ETH Zurich and University of Bologna. | ||
# Licensed under the Apache License, Version 2.0, see LICENSE for details. | ||
# SPDX-License-Identifier: Apache-2.0 | ||
# | ||
# Luca Colagrande <[email protected]> | ||
|
||
import sys | ||
from pathlib import Path | ||
import numpy as np | ||
import torch | ||
from data.datagen import golden_model | ||
|
||
sys.path.append(str(Path(__file__).parent / '../../../util/sim/')) | ||
import verification # noqa: E402 | ||
from elf import Elf # noqa: E402 | ||
from data_utils import bytes_to_float, bytes_to_struct # noqa: E402 | ||
|
||
|
||
ERR_THRESHOLD = 0.003 | ||
|
||
PRECISION_T = { | ||
8: '64', | ||
4: '32', | ||
2: '16', | ||
1: '8' | ||
} | ||
|
||
NUMPY_T = { | ||
'64': np.float64, | ||
'32': np.float32, | ||
'16': np.float16 | ||
} | ||
|
||
|
||
def main(): | ||
# Run simulation and get outputs | ||
args = verification.parse_args() | ||
raw_results = verification.simulate(sim_bin=args.sim_bin, | ||
snitch_bin=args.snitch_bin, | ||
symbols_bin=args.symbols_bin, | ||
log=args.log, | ||
output_uids=['ofmap']) | ||
|
||
# Extract input operands from ELF file | ||
if args.symbols_bin: | ||
elf = Elf(args.symbols_bin) | ||
else: | ||
elf = Elf(args.snitch_bin) | ||
|
||
layer_struct = { | ||
'batch_size': 'I', | ||
'seq_len': 'I', | ||
'input_samples': 'I', | ||
'reduce_dim': 'i', | ||
'ifmap_ptr': 'I', | ||
'ofmap_ptr': 'I', | ||
'dtype': 'I' | ||
} | ||
layer = bytes_to_struct(elf.get_symbol_contents('layer'), layer_struct) | ||
batch_size = layer['batch_size'] | ||
seq_len = layer['seq_len'] | ||
input_samples = layer['input_samples'] | ||
reduce_dim = layer['reduce_dim'] | ||
prec = PRECISION_T[layer['dtype']] | ||
|
||
ifmap = np.array(bytes_to_float(elf.get_symbol_contents('ifmap'), prec), dtype=NUMPY_T[prec]) | ||
ifmap = ifmap.reshape(batch_size, seq_len, input_samples) | ||
ifmap = torch.from_numpy(ifmap) | ||
|
||
# Verify results | ||
ofmap_actual = np.array(bytes_to_float(raw_results['ofmap'], prec), dtype=NUMPY_T[prec]) | ||
ofmap_golden = golden_model(ifmap, reduce_dim).detach().numpy().flatten() | ||
|
||
absolute_err = np.absolute(ofmap_golden - ofmap_actual) | ||
fail = np.any(absolute_err > ERR_THRESHOLD) | ||
if (fail): | ||
verification.dump_results_to_csv([ofmap_golden, ofmap_actual, absolute_err], | ||
Path.cwd() / 'softmax_results.csv') | ||
|
||
return int(fail) | ||
|
||
|
||
if __name__ == "__main__": | ||
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.