diff --git a/.gitignore b/.gitignore index 35b546b..b5d14b2 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,31 @@ *.upa *.upb main.pdf -*.bin \ No newline at end of file +*.bin +*.o + +# hls projects +**/*.proj/ +**/*.comp/ + +# ignore any python virtual environments +**/.venv/ + +# ignore examples/tcl files since they are auto-generated +examples/*.tcl + +*.pyc + +.vscode/ + +examples/hls/ + +.gradle/ + +examples/*.out.dat +examples/out.*.dat +examples/out.dat +examples/video*.bmp + +scripts/tests/___summary_all_header.adoc + diff --git a/Makefile b/Makefile index 97226cc..429c3d4 100644 --- a/Makefile +++ b/Makefile @@ -1,13 +1,24 @@ base=main all: main.pdf +# pdflatex is part of the Texlive distribution. +# +# See https://tug.org/texlive/ for TexLive installation instructions. +# Docker image available at https://hub.docker.com/r/texlive/texlive/ + +# NOTE: You *must* put the path to your texlive installation here. +TEXLIVE_BINDIR=/wrk/xsjhdnobkup2/mpettigr/projects_nobkup/texlive/texlive/install-tl-20240625/./texlive/2024/bin/x86_64-linux +PDFLATEX=PATH=$(TEXLIVE_BINDIR):$$PATH $(TEXLIVE_BINDIR)/pdflatex +BIBTEX=PATH=$(TEXLIVE_BINDIR):$$PATH $(TEXLIVE_BINDIR)/bibtex + main.aux: all.bib - pdflatex \\nonstopmode\\input main.tex - bibtex main + $(PDFLATEX) \\nonstopmode\\input main.tex + $(BIBTEX) main main.pdf: main.aux *.tex - pdflatex \\nonstopmode\\input main.tex - pdflatex \\nonstopmode\\input main.tex + $(PDFLATEX) \\nonstopmode\\input main.tex + $(PDFLATEX) \\nonstopmode\\input main.tex clean: rm -rf main.pdf *.log *~ *.aux *.bbl *.blg *.out + diff --git a/commitlint.config.js b/commitlint.config.js new file mode 100644 index 0000000..dddcb17 --- /dev/null +++ b/commitlint.config.js @@ -0,0 +1,4 @@ +module.exports = { + extends: ['@commitlint/config-conventional'], + ignores: [(commit) => commit.includes('chore(release)')], +}; \ No newline at end of file diff --git a/examples/Makefile b/examples/Makefile index f6816d6..467b809 100644 --- a/examples/Makefile +++ b/examples/Makefile @@ -1,4 +1,21 @@ -VHLS = $(shell vivado_hls -r) +# This makefile was tested with Vivado HLS 2019.2 +# and Vitis v2024.1 +# +# + +# select the HLS compiler to use (vivado_hls or vitis) +USE_VITIS=1 + +ifeq ($(USE_VITIS),1) +VITIS_COMPILER := $(shell dirname $(shell which vitis_hls)) + +VHLS = $(VITIS_COMPILER)/../../../Vitis/2024.2 +HLS_COMPILER=vitis-run --mode hls --tcl +else +VHLS := $(shell vivado_hls -r) +HLS_COMPILER=vivado_hls -f +endif + EXAMPLES = $(shell ls *-top.c) EXAMPLES_CPP = $(shell ls *-top.cpp) EXAMPLES_BIN = $(patsubst %.c,%,$(EXAMPLES)) @@ -6,54 +23,145 @@ EXAMPLES_BIN += $(patsubst %.cpp,%,$(EXAMPLES_CPP)) EXAMPLES_LOG = $(patsubst %.c,%.log,$(EXAMPLES)) EXAMPLES_LOG += $(patsubst %.cpp,%.log,$(EXAMPLES_CPP)) +#CC = $(VHLS)/lnx64/tools/gcc/bin/gcc +#CXX = $(VHLS)/lnx64/tools/gcc/bin/g++ +#CC = $(VHLS)/lnx64/tools/clang/bin/clang +#CXX = $(VHLS)/lnx64/tools/clang/bin/clang++ +CC = gcc +CXX = g++ + +# insert new LD path to the LD_LIBRARY_PATH if it is not already there +# +LD_PATH_NEW = /usr/lib/x86_64-linux-gnu +LD_LIBRARY_PATH := $(shell echo $(value LD_LIBRARY_PATH) | grep -q $(LD_PATH_NEW) || echo $(LD_PATH_NEW):)$(value LD_LIBRARY_PATH) + +#GEN_TCL = $(CURDIR)/../scripts/gen_hls_csynth_script.py +GEN_TCL = $(CURDIR)/../scripts/gen_hls_runner_script.py + +#HLS_CONFIG_FILE = $(CURDIR)/__hls_config__.ini +HLS_CONFIG_FILE = $(CURDIR)/__hls_config__versal500__.ini + $(warning $(EXAMPLES_LOG)) -INPUTFILE=BBC_HD_test_1920x1080.bmp INPUTFILE=test_20x20.bmp +INPUTFILE_GOLDEN_REF=test_20x20_filtered_opencv.bmp video_simple-top.log: video_simple-top.bin ./$< $(INPUTFILE) video_simple.bmp > $@ video_2dfilter-top.log: video_2dfilter-top.bin - ./$< $(INPUTFILE) video_2dfilter.bmp > $@ + ./$< $(INPUTFILE) $(INPUTFILE_GOLDEN_REF) video_2dfilter.bmp > $@ video_2dfilter_boundary_condition-top.log: video_2dfilter_boundary_condition-top.bin - ./$< $(INPUTFILE) video_2dfilter_boundary_condition.bmp > $@ + ./$< $(INPUTFILE) $(INPUTFILE_GOLDEN_REF) video_2dfilter_boundary_condition.bmp > $@ video_2dfilter_linebuffer-top.log: video_2dfilter_linebuffer-top.bin - ./$< $(INPUTFILE) video_2dfilter_linebuffer.bmp > $@ + ./$< $(INPUTFILE) $(INPUTFILE_GOLDEN_REF) video_2dfilter_linebuffer.bmp > $@ video_2dfilter_linebuffer_extended-top.log: video_2dfilter_linebuffer_extended-top.bin - ./$< $(INPUTFILE) video_2dfilter_linebuffer_extended.bmp > $@ + ./$< $(INPUTFILE) $(INPUTFILE_GOLDEN_REF) video_2dfilter_linebuffer_extended.bmp > $@ video_2dfilter_linebuffer_extended_constant-top.log: video_2dfilter_linebuffer_extended_constant-top.bin - ./$< $(INPUTFILE) video_2dfilter_linebuffer_extended_constant.bmp > $@ + ./$< $(INPUTFILE) $(INPUTFILE_GOLDEN_REF) video_2dfilter_linebuffer_extended_constant.bmp > $@ + +%.o: %.c + $(CC) -I$(VHLS)/include -c $< -o $@ +%.o: %.cpp + $(CXX) -I$(VHLS)/include -c $< -o $@ + +complex_fir-top.bin: complex_fir-top.o complex_fir.o fir.o + LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) $(CXX) -I$(VHLS)/include -g -o $@ $^ %-top.bin: %-top.c %.c bitmap.c - gcc -I$(VHLS)/include -std=c99 $? -g -o $@ + LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) $(CC) -I$(VHLS)/include -std=c99 $^ -g -o $@ %-top.bin: %-top.cpp %.cpp - g++ -I$(VHLS)/include $? -g -o $@ + LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) $(CXX) -I$(VHLS)/include $? -g -o $@ insertion_cell_sort-top.bin: insertion_cell_sort-top.cpp insertion_cell_sort.cpp insertion_sort.cpp - g++ -I$(VHLS)/include $? -g -o $@ - - #: %-top.bin -%.tcl: - rm -f $@ - echo "open_project $*.proj -reset" > $@ - echo "add_file $*.c" >> $@ - echo "add_file -tb $*-top.c" >> $@ - echo "set_top $*" >> $@ - echo "open_solution solution -reset" >> $@ - echo "set_part virtex7" >> $@ - echo "create_clock -period 5" >> $@ - echo "csynth_design" >> $@ - -%.proj: %.tcl - vivado_hls $? + echo $(LD_LIBRARY_PATH) + LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) $(CXX) -I$(VHLS)/include $? -g -o $@ + +# missing cos_table and sin_table definition. No testbench files available +# dft.c \ +# dft_precompute.c \ + +CC_HLS_TARGET_FILES = \ + block_fir.c \ + fir.c \ + fir11_initial.c \ + fir_cslowed.c \ + firrolled.c \ + matrix_vector_base.c \ + matrix_vector_base_unroll_inner.c \ + matrix_vector_optimized.c \ + matrix_vector_unroll_inner2.c \ + video_2dfilter.c \ + video_2dfilter_boundary_condition.c \ + video_2dfilter_linebuffer.c \ + video_2dfilter_linebuffer_extended.c \ + video_2dfilter_linebuffer_extended_constant.c \ + video_simple.c + +CXX_HLS_TARGET_FILES = \ + block_mm.cpp \ + complex_fir.cpp \ + cordic.cpp \ + cordic_fixed.cpp \ + fft_stages.cpp \ + fft_stages_loop.cpp \ + fft_sw.cpp \ + histogramSW.cpp \ + histogram_dependence.cpp \ + histogram_opt1.cpp \ + huffman_encoding.cpp \ + insertion_cell_sort.cpp \ + insertion_sort.cpp \ + insertion_sort_parallel.cpp \ + insertion_sort_relaxed.cpp \ + matrixmultiplication.cpp \ + merge_sort.cpp \ + merge_sort_loop_merged.cpp \ + merge_sort_parallel.cpp \ + merge_sort_restructured.cpp \ + prefixsumBO.cpp \ + prefixsumHW.cpp \ + prefixsumSW.cpp \ + prefixsum_optimized.cpp \ + prefixsum_unrolled.cpp \ + restructured_merge_sort.cpp \ + spmv.cpp \ + spmv_restructured.cpp \ + spmv2.cpp \ + spmv2_interleaved.cpp \ + spmv2_restructured.cpp \ + spmv_unrolled.cpp + +HLS_TARGETS = $(patsubst %.c,%.comp,$(CC_HLS_TARGET_FILES)) +HLS_TARGETS += $(patsubst %.cpp,%.comp,$(CXX_HLS_TARGET_FILES)) + +%.comp: %.tcl +# vivado_hls -f $? + $(HLS_COMPILER) $? || (echo "$(HLS_COMPILER) $? failed $$?"; exit 0) + +%.tcl: %.c + $(GEN_TCL) -c $(HLS_CONFIG_FILE) -i $< -o $@ + +%.tcl: %.cpp + $(GEN_TCL) -c $(HLS_CONFIG_FILE) -i $< -o $@ %.log: %.bin - ./$< > $@ + -./$< > $@ || (echo "./$< > $@ failed $$?"; exit 1) + + +hls: $(HLS_TARGETS) test: $(EXAMPLES_LOG) + +clean: + rm -rf *.o *.log *.bin *.tcl *.comp logs/* hls/* + +.PHONY: clean hls test + +# Prevent make from deleting the .bin intermediate files +.PRECIOUS: %.bin %.tcl block_fir-top.bin fir-top.bin diff --git a/examples/__hls_config__versal500__.ini b/examples/__hls_config__versal500__.ini new file mode 100644 index 0000000..e427a7d --- /dev/null +++ b/examples/__hls_config__versal500__.ini @@ -0,0 +1,151 @@ +[DEFAULTS] +part = xcvh1522-vsva3697-3HP-e-S +period = 2 + +# modify each key = value pair so that the key is a section +# and the value is the item top = value +[block_mm.cpp] +top = blockmatmul +tb_files = none + + +[complex_fir.cpp] +top = complexFIR +files = fir.c \ + complex_fir.cpp +tb_files = complex_fir-top.cpp + +[cordic_fixed.cpp] +top = cordic + +[dft_precompute.c] +top = dft + +[fft_stages_loop.cpp] +top = fft_streaming + +[fft_stages.cpp] +top = fft_streaming + +[fft_sw.cpp] +top = fft + +[fir11_initial.c] +top = fir +tb_files = fir11_initial-top.c + +[fir_cslowed.c] +top = block_fir +tb_files = fir-top.c + +[firrolled.c] +top = block_fir +tb_files = fir-top.c + +[histogram_dependence.cpp] +top = histogram + +[histogram_opt1.cpp] +top = histogram + +[histogram_parallel.cpp] +top = histogram + +[histogramSW.cpp] +top = histogram +tb_files = histogram_opt1-top.cpp + +[huffman_encoding.cpp] +top = huffman_encoding +files = huffman_encoding.cpp \ + huffman_canonize_tree.cpp \ + huffman_compute_bit_length.cpp \ + huffman_create_codeword.cpp \ + huffman_create_tree.cpp \ + huffman_filter.cpp \ + huffman_sort.cpp \ + huffman_truncate_tree.cpp +tb_files = huffman_encoding_test.cpp + +[huffman_encoding.cpp:test2] +top = huffman_encoding.cpp:top +files = huffman_encoding.cpp:files +tb_files = huffman_encoding_test2.cpp + +[insertion_sort_relaxed.cpp] +top = insertion_sort +tb_files = insertion_sort-top.cpp + +[matrix_vector_base_unroll_inner.c] +top = matrix_vector +tb_files = matrix_vector_base-top.c + +[matrix_vector_base.c] +top = matrix_vector + +[matrix_vector_optimized.c] +top = matrix_vector + +[matrix_vector_unroll_inner2.c] +top = matrix_vector + +[matrixmultiplication.cpp] +top = matrixmul + +[merge_sort_loop_merged.cpp] +top = merge_sort + +[merge_sort_restructured.cpp] +top = merge_sort + +[merge_sort.cpp] +top = merge_sort + +[partial_insertion_cell_sort.cpp] +top = merge_sort +tb_files = none + +[prefixsum_optimized.cpp] +top = prefixsum + +[prefixsum_unrolled.cpp] +top = prefixsum +tb_files = prefixsum_unrolled-top.cpp + +[prefixsumBO.cpp] +top = prefixsum + +[prefixsumHW.cpp] +top = prefixsum + +[prefixsumSW.cpp] +top = prefixsum + +[restructured_merge_sort.cpp] +top = merge_sort +tb_files = merge_sort-top.cpp + +[spmv_restructured.cpp] +top = spmv + +[spmv_unrolled.cpp] +top = spmv + +[spmv.cpp] +top = spmv + +[spmv2_interleaved.cpp] +top = spmv +tb_files = spmv2-top.cpp + +[spmv2_restructured.cpp] +top = spmv + +[spmv2.cpp] +top = spmv + +[video_2dfilter_boundary_condition.c] +top = video_2dfilter + +[video_simple.c] +top = video_filter_rescale diff --git a/examples/bitmap.c b/examples/bitmap.c index cc91fea..5daefc4 100644 --- a/examples/bitmap.c +++ b/examples/bitmap.c @@ -407,6 +407,7 @@ int ReadBMP(const char *filename, int rows, int cols, unsigned char *r, free(header); return 1; } + PrintBMPInfoHeader(info); if (ReadBMPFileHeader(fp, header) != 0) { if (header) @@ -581,3 +582,126 @@ int WriteBMP(const char *filename, int rows, int cols, unsigned char *r, CleanupBMP(bmp); return 0; } + +// Create a version of ReadBMP that skips over checks so it can simply +// be used to read a bitmap into an array of r,g,b values. +// This is useful for testing. BMP files written by OpenCV don't have +// most its header info field set, so the checks fail. +int ReadBMPUnsafe(const char *filename, int rows, int cols, unsigned char *r, + unsigned char *g, unsigned char *b, unsigned int biSizeImage) { + BMPFileHeader *header = NULL; + BMPInfoHeader *info = NULL; + unsigned char *data = NULL; + FILE *fp = NULL; + int i, j; + + if (!(fp = OpenBMPInputFile(filename))) + return 1; + + header = (BMPFileHeader *)malloc(sizeof(BMPFileHeader)); + if (!header) { + printf("Failed to malloc BMPFileHeader in ReadRGBFromBMP\n"); + return 1; + } + + info = (BMPInfoHeader *)malloc(sizeof(BMPInfoHeader)); + if (!header) { + printf("Failed to malloc BMPInfoHeader in ReadRGBFromBMP\n"); + if (header) + free(header); + return 1; + } + PrintBMPInfoHeader(info); + + if (ReadBMPFileHeader(fp, header) != 0) { + if (header) + free(header); + if (info) + free(info); + return 1; + } + + if (ReadBMPInfoHeader(fp, info) != 0) { + if (header) + free(header); + if (info) + free(info); + return 1; + } + + // 24 bit bitmap has no RGBQUAD array, so don't try to read it + + data = (unsigned char *)malloc(biSizeImage); + if (!data) { + printf("Failed to malloc data storage in ReadRGBFromBMP\n"); + if (header) + free(header); + if (info) + free(info); + if (data) + free(data); + return 1; + } + if (ReadBMPData(fp, data, biSizeImage) != 0) { + if (header) + free(header); + if (info) + free(info); + if (data) + free(data); + return 1; + } + + if (rows != (int)info->biHeight) { + printf( + "Error: bitmap file Height (%i) differs from requested height (%i)\n", + info->biHeight, rows); + if (header) + free(header); + if (info) + free(info); + if (data) + free(data); + return 1; + } + + if (cols != (int)info->biWidth) { + printf( + "Error: bitmap file Height (%i) differs from requested height (%i)\n", + info->biWidth, cols); + if (header) + free(header); + if (info) + free(info); + if (data) + free(data); + return 1; + } + + unsigned char *datatmp = data; + + // copy the data into the r,g,b arrays (BMP is stored upside-down) + for (i = 0; i < rows; i++) { + for (j = 0; j < cols; j++) { + unsigned char rtmp, gtmp, btmp; + + btmp = *datatmp++; + gtmp = *datatmp++; + rtmp = *datatmp++; + + r[(rows - 1 - i) * cols + j] = rtmp; + g[(rows - 1 - i) * cols + j] = gtmp; + b[(rows - 1 - i) * cols + j] = btmp; + } + } + + CloseBMPFile(fp); + if (header) + free(header); + if (info) + free(info); + if (data) + free(data); + + return 0; +} \ No newline at end of file diff --git a/examples/bitmap.h b/examples/bitmap.h index 648435f..f32251d 100644 --- a/examples/bitmap.h +++ b/examples/bitmap.h @@ -49,3 +49,6 @@ int ReadBMP(const char *filename, int rows, int cols, unsigned char *r, unsigned char *g, unsigned char *b); int WriteBMP(const char *filename, int rows, int cols, unsigned char *r, unsigned char *g, unsigned char *b); + +int ReadBMPUnsafe(const char *filename, int rows, int cols, unsigned char *r, + unsigned char *g, unsigned char *b, unsigned int biSizeImage); diff --git a/examples/block_fir-top.c b/examples/block_fir-top.c index b7a1d31..d420bfa 100644 --- a/examples/block_fir-top.c +++ b/examples/block_fir-top.c @@ -6,19 +6,18 @@ const int SIZE = 256; int main() { int taps[] = {1,2,0,-3,0,4,-5,0,1,-2,0,-3,0,4,-5,0}; - int delay_line[NUM_TAPS]; - int input[256], output[256]; + int delay_line[NUM_TAPS] = {0}; + int input[256] = {0}, output[256] = {0}; for(int i = 0; i < SIZE; i++) { input[i] = i; + printf("input[%d] = %d output %d \n", i, input[i], output[i]); } block_fir(input, output, taps, delay_line); - for(int i = 0; i < SIZE; i++) { - printf("result = %d\n", output[i]); - if(output[i] == -1452) { - return 0; - } else { - return 1; - } + printf("result = %d\n", output[SIZE-1]); + if(output[SIZE-1] == -1452) { + return 0; + } else { + return 1; } } diff --git a/examples/block_mm-top.cpp b/examples/block_mm-top.cpp new file mode 100644 index 0000000..9546cdc --- /dev/null +++ b/examples/block_mm-top.cpp @@ -0,0 +1,63 @@ +// Merged blockmatmul_test_init.cpp and blockmatmul_test.cpp files. + +#include "block_mm.h" +#include +using namespace std; + +void matmatmul_sw(DTYPE A[SIZE][SIZE], DTYPE B[SIZE][SIZE], + DTYPE out[SIZE][SIZE]){ + DTYPE sum = 0; + for(int i = 0; i < SIZE; i++){ + for(int j = 0;j strm_matrix1("strm_matrix1"); + hls::stream strm_matrix2("strm_matrix2"); + blockvec strm_matrix1_element, strm_matrix2_element; + blockmat block_out; + DTYPE A[SIZE][SIZE], B[SIZE][SIZE]; + DTYPE matrix_swout[SIZE][SIZE], matrix_hwout[SIZE][SIZE]; + int row, col, it = 0; + + for(int it1 = 0; it1 < SIZE; it1 = it1 + BLOCK_SIZE) { + for(int it2 = 0; it2 < SIZE; it2 = it2 + BLOCK_SIZE) { + row = it1; //row + BLOCK_SIZE * factor_row; + col = it2; //col + BLOCK_SIZE * factor_col; + + for(int k = 0; k < SIZE; k++) { + for(int i = 0; i < BLOCK_SIZE; i++) { + if(it % (SIZE/BLOCK_SIZE) == 0) strm_matrix1_element.a[i] = A[row+i][k]; + strm_matrix2_element.a[i] = B[k][col+i]; + } + if(it % (SIZE/BLOCK_SIZE) == 0) strm_matrix1.write(strm_matrix1_element); + strm_matrix2.write(strm_matrix2_element); + } + blockmatmul(strm_matrix1, strm_matrix2, block_out, it); + + for(int i = 0; i < BLOCK_SIZE; i++) + for(int j = 0; j < BLOCK_SIZE; j++) + matrix_hwout[row+i][col+j] = block_out.out[i][j]; + it = it + 1; + } + } + + matmatmul_sw(A, B, matrix_swout); + + for(int i = 0; i +#include + +#include "complex_fir.h" + +const int SIZE = 256; + +int main() { + // Define taps for the complex FIR filter. Separate real and imaginary parts. + int Itaps[NUM_TAPS] = {1, 2, 0, -3, 0, 4, -5, 0, 1, -2, 0, -3, 0, 4, -5, 0}; + int Qtaps[NUM_TAPS] = {0, -1, 2, 0, -3, 0, 4, -5, 0, 1, -2, 0, 3, 0, -4, 5}; + + int Iout = 0, Qout = 0; + int expected_Iout = 6592, expected_Qout = -6558; + + for (int i = 0; i < SIZE; i++) { + complexFIR(i, -i, &Iout, &Qout, Itaps, Qtaps); + std::cout << "Result - Iout: " << Iout << ", Qout: " << Qout << std::endl; + } + + if (Iout == expected_Iout && Qout == expected_Qout) { + return 0; // Success + } else { + return 1; // Failure + } +} \ No newline at end of file diff --git a/examples/complex_fir.cpp b/examples/complex_fir.cpp index 7dfb4ea..f0481e7 100644 --- a/examples/complex_fir.cpp +++ b/examples/complex_fir.cpp @@ -1,17 +1,13 @@ -typedef int data_t; -void firI1(data_t *y, data_t x); -void firQ1(data_t *y, data_t x); -void firI2(data_t *y, data_t x); -void firQ2(data_t *y, data_t x); +#include "complex_fir.h" -void complexFIR(data_t Iin, data_t Qin, data_t *Iout, data_t *Qout) { +void complexFIR(data_t Iin, data_t Qin, data_t *Iout, data_t *Qout, coef_t Itaps[NUM_TAPS], coef_t Qtaps[NUM_TAPS]) { data_t IinIfir, QinQfir, QinIfir, IinQfir; - firI1(&IinIfir, Iin); - firQ1(&QinQfir, Qin); - firI2(&QinIfir, Qin); - firQ2(&IinQfir, Iin); + fir(Iin, &IinIfir, Itaps); // firI1 + fir(Qin, &QinQfir, Qtaps); // firQ1 + fir(Qin, &QinIfir, Itaps); // firI2 + fir(Iin, &IinQfir, Qtaps); // firQ2 *Iout = IinIfir + QinQfir; *Qout = QinIfir - IinQfir; diff --git a/examples/complex_fir.h b/examples/complex_fir.h new file mode 100644 index 0000000..66b1217 --- /dev/null +++ b/examples/complex_fir.h @@ -0,0 +1,16 @@ +#ifndef COMPLEX_FIR_H +#define COMPLEX_FIR_H + +typedef int data_t; +typedef int coef_t; + +#define NUM_TAPS 16 + +extern "C" { + #include "fir.h" +} + +// Declaration of the complexFIR function +void complexFIR(data_t Iin, data_t Qin, data_t *Iout, data_t *Qout, coef_t Itaps[NUM_TAPS], coef_t Qtaps[NUM_TAPS]); + +#endif // COMPLEX_FIR_H \ No newline at end of file diff --git a/examples/cordic-top.cpp b/examples/cordic-top.cpp index eeae9c5..7259a36 100644 --- a/examples/cordic-top.cpp +++ b/examples/cordic-top.cpp @@ -33,7 +33,7 @@ double abs_double(double var){ } int main(int argc, char **argv) { - + int fail=0; FILE *fp; @@ -57,6 +57,11 @@ int main(int argc, char **argv) zc = cos((double)radian); error_sin=(abs_double((double)s-zs)/zs)*100.0; error_cos=(abs_double((double)c-zc)/zc)*100.0; + + // fail test if error greater than 3.5% error + if (error_cos > 3.5 || error_sin > 3.5) { + fail=1; + } Total_Error_Sin=Total_Error_Sin+error_sin; Total_error_Cos=Total_error_Cos+error_cos; @@ -66,5 +71,14 @@ int main(int argc, char **argv) fclose(fp); printf ("Total_Error_Sin=%f, Total_error_Cos=%f, \n", Total_Error_Sin, Total_error_Cos); - return 0; + + if (fail==1) { + printf("Test failed\n"); + return 1; + } + else { + printf("Test passed\n"); + return 0; + } + } diff --git a/examples/cordic.cpp b/examples/cordic.cpp index 7e26f9e..385c982 100644 --- a/examples/cordic.cpp +++ b/examples/cordic.cpp @@ -10,6 +10,7 @@ void cordic(THETA_TYPE theta, COS_SIN_TYPE &s, COS_SIN_TYPE &c) // Set the initial vector that we will rotate // current_cos = I; current_sin = Q COS_SIN_TYPE current_cos = 0.60735; + //COS_SIN_TYPE current_cos = 0.607252935; COS_SIN_TYPE current_sin = 0.0; COS_SIN_TYPE factor = 1.0; diff --git a/examples/cordic.h b/examples/cordic.h index ab4d6d9..cdde421 100644 --- a/examples/cordic.h +++ b/examples/cordic.h @@ -4,8 +4,9 @@ #include "ap_fixed.h" typedef unsigned int UINTYPE_12; -typedef ap_fixed<12,2> THETA_TYPE; -typedef ap_fixed<12,2> COS_SIN_TYPE; +// increase from 12 to 16 bits to improve accuracy +typedef ap_fixed<16,2> THETA_TYPE; // 16 bits, 2 integer bits +typedef ap_fixed<16,2> COS_SIN_TYPE; const int NUM_ITERATIONS=32; const int NUM_DEGREE=90; diff --git a/examples/cordic.tcl b/examples/cordic.tcl deleted file mode 100644 index 3cfed53..0000000 --- a/examples/cordic.tcl +++ /dev/null @@ -1,17 +0,0 @@ -############################################################ -## This file is generated automatically by Vivado HLS. -## Please DO NOT edit it. -## Copyright (C) 2012 Xilinx Inc. All rights reserved. -############################################################ -open_project hls_cordic -set_top cordic_circ -add_files cordic.h -add_files cordic.cpp -add_files -tb out.gold.dat -add_files -tb cordic_test.cpp -open_solution "solution1" -set_part {xc7z020clg484-1} -create_clock -period 10 - -source "./directives.tcl" -csynth_design diff --git a/examples/cordic_fixed-top.cpp b/examples/cordic_fixed-top.cpp index eeae9c5..20b1943 100644 --- a/examples/cordic_fixed-top.cpp +++ b/examples/cordic_fixed-top.cpp @@ -33,7 +33,7 @@ double abs_double(double var){ } int main(int argc, char **argv) { - + int fail=0; FILE *fp; @@ -57,14 +57,29 @@ int main(int argc, char **argv) zc = cos((double)radian); error_sin=(abs_double((double)s-zs)/zs)*100.0; error_cos=(abs_double((double)c-zc)/zc)*100.0; + + // fail test if error greater than 3.5% error + if (error_cos > 3.5 || error_sin > 3.5) { + fail=1; + } + Total_Error_Sin=Total_Error_Sin+error_sin; Total_error_Cos=Total_error_Cos+error_cos; - + fprintf(fp, "degree=%d, radian=%f, cos=%f, sin=%f\n", i, (double)radian, (double)c, (double)s); } fclose(fp); printf ("Total_Error_Sin=%f, Total_error_Cos=%f, \n", Total_Error_Sin, Total_error_Cos); - return 0; + + if (fail==1) { + printf("Test failed\n"); + return 1; + } + else { + printf("Test passed\n"); + return 0; + } + } diff --git a/examples/fft_stages.cpp b/examples/fft_stages.cpp index 6e6eb57..62005aa 100644 --- a/examples/fft_stages.cpp +++ b/examples/fft_stages.cpp @@ -60,12 +60,17 @@ void fft_stage(int stage, DTYPE X_R[SIZE], DTYPE X_I[SIZE], k += step; } } - + void fft_streaming(DTYPE X_R[SIZE], DTYPE X_I[SIZE], DTYPE OUT_R[SIZE], DTYPE OUT_I[SIZE]) { - #pragma HLS dataflow - DTYPE Stage1_R[SIZE], Stage1_I[SIZE], +// #pragma HLS dataflow + DTYPE Stage_R[M][SIZE], Stage_I[M][SIZE]; + + bit_reverse(X_R, X_I, Stage_R[0], Stage_I[0]); - bit_reverse(X_R, X_I, Stage1_R, Stage1_I); - fft_stage(1, Stage1_R, Stage1_I, Stage2_R, Stage2_I); + stage_loop: + for (int stage = 1; stage < M; stage++) { // Do M-1 stages of butterflies + fft_stage(stage, Stage_R[stage-1], Stage_I[stage-1], Stage_R[stage], Stage_I[stage]); + } + fft_stage(M, Stage_R[M-1], Stage_I[M-1], OUT_R, OUT_I); } diff --git a/examples/fft_stages.tcl b/examples/fft_stages.tcl deleted file mode 100644 index e0cb6de..0000000 --- a/examples/fft_stages.tcl +++ /dev/null @@ -1,8 +0,0 @@ -open_project fft_stages.proj -reset -add_file fft_stages.cpp -add_file -tb fft_stages-top.cpp -set_top fft_streaming -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/fft_stages_loop.tcl b/examples/fft_stages_loop.tcl deleted file mode 100644 index 48c9a9c..0000000 --- a/examples/fft_stages_loop.tcl +++ /dev/null @@ -1,8 +0,0 @@ -open_project fft_stages_loop.proj -reset -add_file fft_stages_loop.cpp -add_file -tb fft_stages_loop-top.cpp -set_top fft_streaming -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/fft_sw.tcl b/examples/fft_sw.tcl deleted file mode 100644 index 0c22e36..0000000 --- a/examples/fft_sw.tcl +++ /dev/null @@ -1,15 +0,0 @@ - -open_project fft_sw.proj -reset -add_file fft_sw.c -add_file -tb fft_sw-top.c - -set_top fft_sw - -open_solution solution -reset - -set_part xc7k70tfbg676-1 - -create_clock -period 5 - -csynth_design -cosim_design diff --git a/examples/fir-top.c b/examples/fir-top.c index 544512c..6080e57 100644 --- a/examples/fir-top.c +++ b/examples/fir-top.c @@ -1,8 +1,7 @@ #include "stdio.h" -#define NUM_TAPS 4 -void fir(int input, int *output, int taps[NUM_TAPS]); +#include "fir.h" const int SIZE = 256; diff --git a/examples/fir.c b/examples/fir.c index afa1b50..ffebe7c 100644 --- a/examples/fir.c +++ b/examples/fir.c @@ -1,4 +1,4 @@ -#define NUM_TAPS 4 +#include "fir.h" void fir(int input, int *output, int taps[NUM_TAPS]) { @@ -9,10 +9,10 @@ void fir(int input, int *output, int taps[NUM_TAPS]) delay_line[i] = delay_line[i - 1]; } delay_line[0] = input; - - for (int i = 0; i < NUM_TAPS; i++) { - result += delay_line[i] * taps[i]; + + for (int j = 0; j < NUM_TAPS; j++) { + result += delay_line[j] * taps[j]; } - + *output = result; } diff --git a/examples/fir.h b/examples/fir.h new file mode 100644 index 0000000..d57af15 --- /dev/null +++ b/examples/fir.h @@ -0,0 +1,8 @@ +#ifndef FIR_H +#define FIR_H + +#define NUM_TAPS 16 + +void fir(int input, int *output, int taps[NUM_TAPS]); + +#endif // FIR_H \ No newline at end of file diff --git a/examples/fir.tcl b/examples/fir.tcl deleted file mode 100644 index ac7b8c3..0000000 --- a/examples/fir.tcl +++ /dev/null @@ -1,15 +0,0 @@ - -open_project fir.proj -reset -add_file fir.c -add_file -tb fir-top.c - -set_top fir - -open_solution solution -reset - -set_part xc7k70tfbg676-1 - -create_clock -period 5 - -csynth_design - diff --git a/examples/fir11_initial-top.c b/examples/fir11_initial-top.c new file mode 100644 index 0000000..8587b60 --- /dev/null +++ b/examples/fir11_initial-top.c @@ -0,0 +1,23 @@ + +#include "stdio.h" + +typedef int coef_t; +typedef int data_t; +typedef int acc_t; + +const int SIZE = 256; + +void fir(data_t *y, data_t x); + +int main() { + int out = 0; + for (int i = 0; i < SIZE; i++) { + fir(&out, i); + } + printf("result = %d\n", out); + if (out == 262500) { + return 0; + } else { + return 1; + } +} diff --git a/examples/fir11_initial.c b/examples/fir11_initial.c index c7ea6bc..fc1e675 100644 --- a/examples/fir11_initial.c +++ b/examples/fir11_initial.c @@ -1,5 +1,5 @@ #define N 11 -#include "ap_int.h" + typedef int coef_t; typedef int data_t; diff --git a/examples/fir_cslowed.c b/examples/fir_cslowed.c index e223f25..3b40aaa 100644 --- a/examples/fir_cslowed.c +++ b/examples/fir_cslowed.c @@ -1,22 +1,24 @@ #define NUM_TAPS 4 +#define K 4 void block_fir(int input[256][K], int output[256][K], int taps[NUM_TAPS], int delay_line[NUM_TAPS][K]) { - int i, j, k for (j = 0; j < 256; j++) { + int i, j, k; + for (j = 0; j < 256; j++) { for (k = 0; k < K; k++) { int result[K] = {}; for (i = NUM_TAPS - 1; i > 0; i--) { #pragma HLS unroll delay_line[i][k] = delay_line[i - 1][k]; } - delay_line[0][k] = input; + delay_line[0][k] = input[j][k]; for (i = 0; i < NUM_TAPS; i++) { #pragma HLS pipeline result[k] += delay_line[i][k] * taps[i]; } - output[j][k] = result; + output[j][k] = result[k]; } } } diff --git a/examples/firrolled.c b/examples/firrolled.c index 2be54ec..c2d526d 100644 --- a/examples/firrolled.c +++ b/examples/firrolled.c @@ -10,7 +10,7 @@ void block_fir(int input[256], int output[256], int taps[NUM_TAPS], #pragma HLS unroll delay_line[i] = delay_line[i - 1]; } - delay_line[0] = input; + delay_line[0] = input[j]; for (i = 0; i < NUM_TAPS; i++) { #pragma HLS pipeline diff --git a/examples/histogramSW.cpp b/examples/histogramSW.cpp index f559fff..ba331ae 100644 --- a/examples/histogramSW.cpp +++ b/examples/histogramSW.cpp @@ -1,3 +1,5 @@ +#include "histogram.h" + void histogram(int in[INPUT_SIZE], int hist[VALUE_SIZE]) { int val; for(int i = 0; i < INPUT_SIZE; i++) { diff --git a/examples/histogram_dependence.tcl b/examples/histogram_dependence.tcl deleted file mode 100644 index 9aebf1b..0000000 --- a/examples/histogram_dependence.tcl +++ /dev/null @@ -1,10 +0,0 @@ -open_project histogram_dependence.proj -reset -add_file histogram_dependence.cpp -add_file -tb histogram_dependence-top.cpp -set_top histogram -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -config_schedule -verbose -csynth_design -cosim_design diff --git a/examples/histogram_opt1.tcl b/examples/histogram_opt1.tcl deleted file mode 100644 index 4dc7f1d..0000000 --- a/examples/histogram_opt1.tcl +++ /dev/null @@ -1,9 +0,0 @@ -open_project histogram_opt1.proj -reset -add_file histogram_opt1.cpp -add_file -tb histogram_opt1-top.cpp -set_top histogram -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design -cosim_design diff --git a/examples/histogram_parallel.tcl b/examples/histogram_parallel.tcl deleted file mode 100644 index 12bf265..0000000 --- a/examples/histogram_parallel.tcl +++ /dev/null @@ -1,9 +0,0 @@ -open_project histogram_parallel.proj -reset -add_file histogram_parallel.cpp -add_file histogram_parallel-top.cpp -set_top histogram -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design -cosim_design diff --git a/examples/huffman_encoding.tcl b/examples/huffman_encoding.tcl deleted file mode 100644 index d532a14..0000000 --- a/examples/huffman_encoding.tcl +++ /dev/null @@ -1,12 +0,0 @@ -open_project huffman_encoding.proj -reset -add_files {huffman_canonize_tree.cpp huffman_create_tree.cpp huffman_filter.cpp huffman_compute_bit_length.cpp huffman_encoding.cpp huffman_sort.cpp huffman_create_codeword.cpp huffman_truncate_tree.cpp} - -add_files -tb {huffman_encoding_test.cpp} -add_files -tb {huffman.random256.txt huffman.random256.golden} -set_top huffman_encoding -#set_top create_tree -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csim_design -compiler clang -csynth_design diff --git a/examples/insertion_cell_sort.tcl b/examples/insertion_cell_sort.tcl deleted file mode 100644 index 90ed17b..0000000 --- a/examples/insertion_cell_sort.tcl +++ /dev/null @@ -1,9 +0,0 @@ -open_project insertion_cell_sort.proj -reset -add_file insertion_cell_sort.cpp -add_files -tb "insertion_cell_sort-top.cpp insertion_sort.cpp" -set_top insertion_cell_sort -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csim_design -csynth_design diff --git a/examples/insertion_sort.tcl b/examples/insertion_sort.tcl deleted file mode 100644 index fa89c39..0000000 --- a/examples/insertion_sort.tcl +++ /dev/null @@ -1,8 +0,0 @@ -open_project insertion_sort.proj -reset -add_file insertion_sort.cpp -add_file -tb insertion_sort-top.cpp -set_top insertion_sort -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/insertion_sort_parallel.tcl b/examples/insertion_sort_parallel.tcl deleted file mode 100644 index 727e45a..0000000 --- a/examples/insertion_sort_parallel.tcl +++ /dev/null @@ -1,9 +0,0 @@ -open_project insertion_sort_parallel.proj -reset -add_file insertion_sort_parallel.cpp -add_file -tb insertion_sort_parallel-top.cpp -set_top insertion_sort_parallel -open_solution solution -reset -set_part virtex7 -config_schedule -verbose -create_clock -period 5 -csynth_design diff --git a/examples/insertion_sort_relaxed.cpp b/examples/insertion_sort_relaxed.cpp index a64c4b5..beeb440 100644 --- a/examples/insertion_sort_relaxed.cpp +++ b/examples/insertion_sort_relaxed.cpp @@ -1,6 +1,7 @@ #include "insertion_sort.h" void insertion_sort(DTYPE A[SIZE]) { L1: + int i, j; for(i = 1; i < SIZE; i++) { DTYPE item = A[i]; j = i; diff --git a/examples/matrix_vector_base.tcl b/examples/matrix_vector_base.tcl deleted file mode 100644 index 68bf942..0000000 --- a/examples/matrix_vector_base.tcl +++ /dev/null @@ -1,7 +0,0 @@ -open_project matrix_vector_base.proj -reset -add_file matrix_vector_base.c -set_top matrix_vector -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/matrix_vector_optimized.tcl b/examples/matrix_vector_optimized.tcl deleted file mode 100644 index fa2b95b..0000000 --- a/examples/matrix_vector_optimized.tcl +++ /dev/null @@ -1,7 +0,0 @@ -open_project matrix_vector_optimized.proj -reset -add_file matrix_vector_optimized.c -set_top matrix_vector -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/matrix_vector_unroll_inner2.tcl b/examples/matrix_vector_unroll_inner2.tcl deleted file mode 100644 index ce887fa..0000000 --- a/examples/matrix_vector_unroll_inner2.tcl +++ /dev/null @@ -1,7 +0,0 @@ -open_project matrix_vector_unroll_inner2.proj -reset -add_file matrix_vector_unroll_inner2.c -set_top matrix_vector -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/matrixmultiplication.tcl b/examples/matrixmultiplication.tcl deleted file mode 100644 index f67dccf..0000000 --- a/examples/matrixmultiplication.tcl +++ /dev/null @@ -1,7 +0,0 @@ -open_project matrixmultiplication.proj -reset -add_file matrixmultiplication.cpp -set_top matrixmul -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/merge_sort.h b/examples/merge_sort.h index 0d13f16..80344fd 100644 --- a/examples/merge_sort.h +++ b/examples/merge_sort.h @@ -1,3 +1,7 @@ +#include "hls_stream.h" + const static int SIZE = 16; typedef float DTYPE; extern void merge_sort(DTYPE A[SIZE]); +typedef int INDEX_TYPE; +const static int HALF_SIZE = SIZE/2; diff --git a/examples/merge_sort.tcl b/examples/merge_sort.tcl deleted file mode 100644 index 2b13df6..0000000 --- a/examples/merge_sort.tcl +++ /dev/null @@ -1,9 +0,0 @@ -open_project merge_sort.proj -reset -add_file merge_sort.cpp -add_file -tb merge_sort-top.cpp -set_top merge_sort -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -config_schedule -verbose -csynth_design diff --git a/examples/merge_sort_loop_merged.tcl b/examples/merge_sort_loop_merged.tcl deleted file mode 100644 index 091b8c9..0000000 --- a/examples/merge_sort_loop_merged.tcl +++ /dev/null @@ -1,8 +0,0 @@ -open_project merge_sort_loop_merged.proj -reset -add_file merge_sort_loop_merged.cpp -add_file -tb merge_sort_loop_merged-top.cpp -set_top merge_sort -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/merge_sort_parallel.tcl b/examples/merge_sort_parallel.tcl deleted file mode 100644 index 4f3f142..0000000 --- a/examples/merge_sort_parallel.tcl +++ /dev/null @@ -1,9 +0,0 @@ -open_project merge_sort_parallel.proj -reset -add_file merge_sort_parallel.cpp -add_file -tb merge_sort_parallel-top.cpp -set_top merge_sort_parallel -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design -cosim_design -trace_level all diff --git a/examples/merge_sort_restructured.tcl b/examples/merge_sort_restructured.tcl deleted file mode 100644 index a84d647..0000000 --- a/examples/merge_sort_restructured.tcl +++ /dev/null @@ -1,8 +0,0 @@ -open_project merge_sort_restructured.proj -reset -add_file merge_sort_restructured.cpp -add_file -tb merge_sort_restructured-top.cpp -set_top merge_sort -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/prefixsumBO-top.cpp b/examples/prefixsumBO-top.cpp index e009294..3a5970b 100644 --- a/examples/prefixsumBO-top.cpp +++ b/examples/prefixsumBO-top.cpp @@ -5,6 +5,9 @@ extern void prefixsum(int in[SIZE], int out[SIZE]); int main() { int in[SIZE]; int out[SIZE]; + int fail = 0; + int expected_out = 8128; + for(int i = 0; i < SIZE; i++) { in[i] = i; } @@ -13,4 +16,18 @@ int main() { printf("%d ", out[i]); } printf("\n"); + + if (out[SIZE-1] != expected_out) { + fail = 1; + } else { + fail = 0; + } + + if(fail == 1) + printf("FAILED\n"); + else + printf("PASS\n"); + + return fail; + } diff --git a/examples/prefixsumBO.tcl b/examples/prefixsumBO.tcl deleted file mode 100644 index 3e9a601..0000000 --- a/examples/prefixsumBO.tcl +++ /dev/null @@ -1,7 +0,0 @@ -open_project prefixsumBO.proj -reset -add_file prefixsumBO.cpp -set_top prefixsum -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/prefixsumHW-top.cpp b/examples/prefixsumHW-top.cpp index e009294..49fdb4e 100644 --- a/examples/prefixsumHW-top.cpp +++ b/examples/prefixsumHW-top.cpp @@ -5,6 +5,9 @@ extern void prefixsum(int in[SIZE], int out[SIZE]); int main() { int in[SIZE]; int out[SIZE]; + int expected_out = 8128; + int fail = 0; + for(int i = 0; i < SIZE; i++) { in[i] = i; } @@ -13,4 +16,17 @@ int main() { printf("%d ", out[i]); } printf("\n"); + + if (out[SIZE-1] != expected_out) { + fail = 1; + } else { + fail = 0; + } + + if(fail == 1) + printf("FAILED\n"); + else + printf("PASS\n"); + + return fail; } diff --git a/examples/prefixsumHW.tcl b/examples/prefixsumHW.tcl deleted file mode 100644 index c5b3223..0000000 --- a/examples/prefixsumHW.tcl +++ /dev/null @@ -1,7 +0,0 @@ -open_project prefixsumHW.proj -reset -add_file prefixsumHW.cpp -set_top prefixsum -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design diff --git a/examples/prefixsumSW-top.cpp b/examples/prefixsumSW-top.cpp index e009294..a57a947 100644 --- a/examples/prefixsumSW-top.cpp +++ b/examples/prefixsumSW-top.cpp @@ -5,6 +5,9 @@ extern void prefixsum(int in[SIZE], int out[SIZE]); int main() { int in[SIZE]; int out[SIZE]; + int fail = 0; + int expected_out = 8128; + for(int i = 0; i < SIZE; i++) { in[i] = i; } @@ -13,4 +16,17 @@ int main() { printf("%d ", out[i]); } printf("\n"); + + if (out[SIZE-1] != expected_out) { + fail = 1; + } else { + fail = 0; + } + + if(fail == 1) + printf("FAILED\n"); + else + printf("PASS\n"); + + return fail; } diff --git a/examples/prefixsum_optimized-top.cpp b/examples/prefixsum_optimized-top.cpp index e009294..a57a947 100644 --- a/examples/prefixsum_optimized-top.cpp +++ b/examples/prefixsum_optimized-top.cpp @@ -5,6 +5,9 @@ extern void prefixsum(int in[SIZE], int out[SIZE]); int main() { int in[SIZE]; int out[SIZE]; + int fail = 0; + int expected_out = 8128; + for(int i = 0; i < SIZE; i++) { in[i] = i; } @@ -13,4 +16,17 @@ int main() { printf("%d ", out[i]); } printf("\n"); + + if (out[SIZE-1] != expected_out) { + fail = 1; + } else { + fail = 0; + } + + if(fail == 1) + printf("FAILED\n"); + else + printf("PASS\n"); + + return fail; } diff --git a/examples/prefixsum_unrolled-top.cpp b/examples/prefixsum_unrolled-top.cpp new file mode 100644 index 0000000..d4db872 --- /dev/null +++ b/examples/prefixsum_unrolled-top.cpp @@ -0,0 +1,32 @@ +#include "stdio.h" +#define SIZE 128 +extern void prefixsum(int in[SIZE], int out[SIZE]); + +int main() { + int in[SIZE]; + int out[SIZE]; + int fail = 0; + int expected_out = 8128; + + for(int i = 0; i < SIZE; i++) { + in[i] = i; + } + prefixsum(in, out); + for(int i = 0; i < SIZE; i++) { + printf("%d ", out[i]); + } + printf("\n"); + + if (out[SIZE-1] != expected_out) { + fail = 1; + } else { + fail = 0; + } + + if(fail == 1) + printf("FAILED\n"); + else + printf("PASS\n"); + + return fail; +} diff --git a/examples/prefixsum_unrolled.cpp b/examples/prefixsum_unrolled.cpp index 191c4a7..1fa7845 100644 --- a/examples/prefixsum_unrolled.cpp +++ b/examples/prefixsum_unrolled.cpp @@ -2,8 +2,8 @@ void prefixsum(int in[SIZE], int out[SIZE]) { #pragma HLS ARRAY_PARTITION variable=out cyclic factor=4 dim=1 #pragma HLS ARRAY_PARTITION variable=in cyclic factor=4 dim=1 - int A = in[0]; - for(int i=0; i < SIZE; i++) { + out[0] = in[0]; + for(int i=1; i < SIZE; i++) { #pragma HLS UNROLL factor=4 #pragma HLS PIPELINE out[i] = out[i-1] + in[i]; diff --git a/examples/restructured_merge_sort.cpp b/examples/restructured_merge_sort.cpp index 3631e72..c90e60f 100644 --- a/examples/restructured_merge_sort.cpp +++ b/examples/restructured_merge_sort.cpp @@ -1,4 +1,4 @@ -#include "MergeSort.h" +#include "merge_sort.h" //divide i into two streams. Every two elements in each of the streams is sorted. void split(DTYPE i[SIZE], hls::stream& out1, hls::stream& out2) diff --git a/examples/spmv.tcl b/examples/spmv.tcl deleted file mode 100644 index 70e388e..0000000 --- a/examples/spmv.tcl +++ /dev/null @@ -1,8 +0,0 @@ -open_project spmv.proj -reset -add_file spmv.cpp -set_top spmv -open_solution solution -reset -set_part virtex7 -config_compile -unsafe_math_optimizations -create_clock -period 5 -csynth_design diff --git a/examples/spmv2.cpp b/examples/spmv2.cpp index 954d5b8..b134170 100644 --- a/examples/spmv2.cpp +++ b/examples/spmv2.cpp @@ -5,7 +5,7 @@ const static int S = 8; void spmv(int rowPtr[NUM_ROWS+1], int columnIndex[NNZ], DTYPE values[NNZ], DTYPE y[SIZE], DTYPE x[SIZE]) { -#pragma HLS ARRAY_PARTITION variable=row_ptr cyclic factor=16 dim=1 +#pragma HLS ARRAY_PARTITION variable=rowPtr cyclic factor=NUM_ROWS+1 dim=1 int i; int cnt; @@ -27,8 +27,8 @@ int flag; y[row] = 0; ACC: for(i=0; i -const static int S = 8; +const static int S = SIZE; void spmv(int rowPtr[NUM_ROWS+1], int columnIndex[NNZ], DTYPE values[NNZ], DTYPE y[SIZE], DTYPE x[SIZE]) { //#pragma HLS ARRAY_PARTITION variable=rowPtr cyclic factor=2 dim=1 int currentrow[S]; - int LB; - int UB; + int LB[S]; + int UB[S]; int flag[S]; int row; diff --git a/examples/spmv2_restructured.cpp b/examples/spmv2_restructured.cpp index fad41ef..9442154 100644 --- a/examples/spmv2_restructured.cpp +++ b/examples/spmv2_restructured.cpp @@ -1,7 +1,7 @@ #include "spmv.h" #include -const static int S = 8; +const static int S = SIZE; void spmv(int rowPtr[NUM_ROWS+1], int columnIndex[NNZ], DTYPE values[NNZ], DTYPE y[SIZE], DTYPE x[SIZE]) { diff --git a/examples/spmv2_restructured.tcl b/examples/spmv2_restructured.tcl deleted file mode 100644 index e01e54d..0000000 --- a/examples/spmv2_restructured.tcl +++ /dev/null @@ -1,9 +0,0 @@ -open_project spmv2_restructured.proj -reset -add_file spmv2_restructured.cpp -add_file -tb spmv2_restructured-top.cpp -set_top spmv -open_solution solution -reset -set_part virtex7 -create_clock -period 5 -csynth_design -cosim_design diff --git a/examples/spmv_restructured.cpp b/examples/spmv_restructured.cpp index 3e41ff7..2097a4e 100644 --- a/examples/spmv_restructured.cpp +++ b/examples/spmv_restructured.cpp @@ -6,7 +6,7 @@ const static int S = 4; void spmv(int row_ptr[NUM_ROWS+1], int columnIndex[NNZ], DTYPE values[NNZ], DTYPE y[SIZE], DTYPE x[SIZE]) { -#pragma HLS ARRAY_PARTITION variable=row_ptr cyclic factor=16 dim=1 +#pragma HLS ARRAY_PARTITION variable=row_ptr cyclic factor=NUM_ROWS+1 dim=1 int i; int cnt[4]; @@ -31,6 +31,7 @@ int flag[4]; element_left[i] = UB[i] - LB[i]; element_done[i] = 0; flag[i] = 0; + y[i] = 0; // std::cout << "starting row " << i << " from " << LB[i] << " to " << UB[i] << "\n"; } diff --git a/examples/spmv_unrolled.tcl b/examples/spmv_unrolled.tcl deleted file mode 100644 index ed33bfa..0000000 --- a/examples/spmv_unrolled.tcl +++ /dev/null @@ -1,8 +0,0 @@ -open_project spmv_unrolled.proj -reset -add_file spmv_unrolled.cpp -set_top spmv -open_solution solution -reset -set_part virtex7 -config_compile -unsafe_math_optimizations -create_clock -period 5 -csynth_design diff --git a/examples/test_20x20_filtered_opencv.bmp b/examples/test_20x20_filtered_opencv.bmp new file mode 100644 index 0000000..cf6bb06 Binary files /dev/null and b/examples/test_20x20_filtered_opencv.bmp differ diff --git a/examples/test_20x20_filtered_opencv_border_replicate.bmp b/examples/test_20x20_filtered_opencv_border_replicate.bmp new file mode 100644 index 0000000..d1c8fb9 Binary files /dev/null and b/examples/test_20x20_filtered_opencv_border_replicate.bmp differ diff --git a/examples/video_2dfilter-top.c b/examples/video_2dfilter-top.c index a41b63a..56e5038 100644 --- a/examples/video_2dfilter-top.c +++ b/examples/video_2dfilter-top.c @@ -13,7 +13,13 @@ unsigned char G_in[MAX_HEIGHT*MAX_WIDTH]; unsigned char B_in[MAX_HEIGHT*MAX_WIDTH]; unsigned char R_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char G_out[MAX_HEIGHT*MAX_WIDTH]; -unsigned char B_out[MAX_HEIGHT*MAX_WIDTH]; +unsigned char B_out[MAX_HEIGHT*MAX_WIDTH]; + +// Golden Reference filtered image +rgb_pixel ref_pix[MAX_HEIGHT][MAX_WIDTH]; +unsigned char R_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char G_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char B_ref[MAX_HEIGHT*MAX_WIDTH]; void video_2dfilter(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], rgb_pixel pixel_out[MAX_HEIGHT][MAX_WIDTH]); @@ -21,14 +27,21 @@ void video_2dfilter(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], int main(int argc, char *argv[]) { int x, y; int width, height; + int width_ref, height_ref; char tempbuf[2000]; char tempbuf1[2000]; - if (argc != 3) { - printf("usage : %s \n", argv[0]); + if (argc != 4) { + printf("usage : %s \n", argv[0]); exit(1); } + // + // Input File + // + + printf("Input File: %s\n", argv[1]); + ReadBMPSize(argv[1], &height, &width); printf("height = %d, width = %d\n", height, width); assert(height <= MAX_HEIGHT); @@ -52,6 +65,36 @@ int main(int argc, char *argv[]) { } } + // + // Input Golden Filtered Reference File + // + + printf("Input Golden Filtered Reference File: %s\n", argv[2]); + + ReadBMPSize(argv[2], &height_ref, &width_ref); + printf("height_ref = %d, width_ref = %d\n", height_ref, width_ref); + assert(height_ref <= MAX_HEIGHT); + assert(width_ref <= MAX_WIDTH); + + // Fill a frame with data + read_tmp = ReadBMPUnsafe(argv[2], height_ref, width_ref, + R_ref, G_ref, B_ref, 1200); + if (read_tmp != 0) { + printf("%s Loading image failed\n", tempbuf); + exit(1); + } + + // Copy Image to pixel data structure + for (x = 0; x < height_ref; x++) { + for (y = 0; y < width_ref; y++) { + ref_pix[x][y].R = R_ref[x*width+y]; + ref_pix[x][y].G = G_ref[x*width+y]; + ref_pix[x][y].B = B_ref[x*width+y]; + } + } + + //////////////////////////////////////////////////////////////// + // Hardware Function video_2dfilter(in_pix, out_pix); @@ -65,10 +108,34 @@ int main(int argc, char *argv[]) { } // Write the image back to disk - int write_tmp = WriteBMP(argv[2], height, width, + int write_tmp = WriteBMP(argv[3], height, width, R_out, G_out, B_out); if (write_tmp != 0) { - printf("WriteBMP %s failed\n", argv[2]); + printf("WriteBMP %s failed\n", argv[3]); exit(1); } + + // + // Compare filtered image with golden reference + // + int sum_absdiff = 0; + int sum_absdiff_expected = 450; + + for (x = 0; x < height; x++) { + for (y = 0; y < width; y++) { + sum_absdiff += abs(out_pix[x][y].R - ref_pix[x][y].R); + sum_absdiff += abs(out_pix[x][y].G - ref_pix[x][y].G); + sum_absdiff += abs(out_pix[x][y].B - ref_pix[x][y].B); + } + } + + printf("sum_absdiff = %d\n", sum_absdiff); + + if (sum_absdiff < sum_absdiff_expected) { + printf("PASS\n"); + return 0; // Success + } else { + printf("FAIL\n"); + return 1; // Failure + } } diff --git a/examples/video_2dfilter_boundary_condition-top.c b/examples/video_2dfilter_boundary_condition-top.c index a41b63a..41ed4cf 100644 --- a/examples/video_2dfilter_boundary_condition-top.c +++ b/examples/video_2dfilter_boundary_condition-top.c @@ -13,7 +13,13 @@ unsigned char G_in[MAX_HEIGHT*MAX_WIDTH]; unsigned char B_in[MAX_HEIGHT*MAX_WIDTH]; unsigned char R_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char G_out[MAX_HEIGHT*MAX_WIDTH]; -unsigned char B_out[MAX_HEIGHT*MAX_WIDTH]; +unsigned char B_out[MAX_HEIGHT*MAX_WIDTH]; + +// Golden Reference filtered image +rgb_pixel ref_pix[MAX_HEIGHT][MAX_WIDTH]; +unsigned char R_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char G_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char B_ref[MAX_HEIGHT*MAX_WIDTH]; void video_2dfilter(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], rgb_pixel pixel_out[MAX_HEIGHT][MAX_WIDTH]); @@ -21,14 +27,21 @@ void video_2dfilter(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], int main(int argc, char *argv[]) { int x, y; int width, height; + int width_ref, height_ref; char tempbuf[2000]; char tempbuf1[2000]; - if (argc != 3) { - printf("usage : %s \n", argv[0]); + if (argc != 4) { + printf("usage : %s \n", argv[0]); exit(1); } + // + // Input File + // + + printf("Input File: %s\n", argv[1]); + ReadBMPSize(argv[1], &height, &width); printf("height = %d, width = %d\n", height, width); assert(height <= MAX_HEIGHT); @@ -52,6 +65,36 @@ int main(int argc, char *argv[]) { } } + // + // Input Golden Filtered Reference File + // + + printf("Input Golden Filtered Reference File: %s\n", argv[2]); + + ReadBMPSize(argv[2], &height_ref, &width_ref); + printf("height_ref = %d, width_ref = %d\n", height_ref, width_ref); + assert(height_ref <= MAX_HEIGHT); + assert(width_ref <= MAX_WIDTH); + + // Fill a frame with data + read_tmp = ReadBMPUnsafe(argv[2], height_ref, width_ref, + R_ref, G_ref, B_ref, 1200); + if (read_tmp != 0) { + printf("%s Loading image failed\n", tempbuf); + exit(1); + } + + // Copy Image to pixel data structure + for (x = 0; x < height_ref; x++) { + for (y = 0; y < width_ref; y++) { + ref_pix[x][y].R = R_ref[x*width+y]; + ref_pix[x][y].G = G_ref[x*width+y]; + ref_pix[x][y].B = B_ref[x*width+y]; + } + } + + //////////////////////////////////////////////////////////////// + // Hardware Function video_2dfilter(in_pix, out_pix); @@ -65,10 +108,38 @@ int main(int argc, char *argv[]) { } // Write the image back to disk - int write_tmp = WriteBMP(argv[2], height, width, + int write_tmp = WriteBMP(argv[3], height, width, R_out, G_out, B_out); if (write_tmp != 0) { - printf("WriteBMP %s failed\n", argv[2]); + printf("WriteBMP %s failed\n", argv[3]); exit(1); } + + // + // Compare filtered image with golden reference + // + int sum_absdiff = 0; + int sum_absdiff_expected = 450; + + for (x = 0; x < height; x++) { + for (y = 0; y < width; y++) { + // skip the first and last row and column + if (x == 0 || x == height-1 || y == 0 || y == width-1) { + continue; + } + sum_absdiff += abs(out_pix[x][y].R - ref_pix[x][y].R); + sum_absdiff += abs(out_pix[x][y].G - ref_pix[x][y].G); + sum_absdiff += abs(out_pix[x][y].B - ref_pix[x][y].B); + } + } + + printf("sum_absdiff = %d\n", sum_absdiff); + + if (sum_absdiff < sum_absdiff_expected) { + printf("PASS\n"); + return 0; // Success + } else { + printf("FAIL\n"); + return 1; // Failure + } } diff --git a/examples/video_2dfilter_boundary_condition.c b/examples/video_2dfilter_boundary_condition.c index d972e40..076e463 100644 --- a/examples/video_2dfilter_boundary_condition.c +++ b/examples/video_2dfilter_boundary_condition.c @@ -62,9 +62,22 @@ void video_2dfilter(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], } if(row >= MAX_HEIGHT-2 && col >= MAX_WIDTH-2) { printf("%d %d\n", row, col); - printf("%x %x %x\n", window[0][0], window[0][1], window[0][2]); - printf("%x %x %x\n", window[1][0], window[1][1], window[1][2]); - printf("%x %x %x\n", window[2][0], window[2][1], window[2][2]); + // printf each of the rgb elements of the window variable 3 pixels at a time + printf("r%x g%x b%x -- r%x g%x b%x -- r%x g%x b%x\n", + window[0][0].R, window[0][0].G, window[0][0].B, + window[0][1].R, window[0][1].G, window[0][1].B, + window[0][2].R, window[0][2].G, window[0][2].B); + // repeat the above printf but use window[1][0], window[1][1], window[1][2] + printf("r%x g%x b%x -- r%x g%x b%x -- r%x g%x b%x\n", + window[1][0].R, window[1][0].G, window[1][0].B, + window[1][1].R, window[1][1].G, window[1][1].B, + window[1][2].R, window[1][2].G, window[1][2].B); + // repeat the above printf but use window[2][0], window[2][1], window[2][2] + printf("r%x g%x b%x -- r%x g%x b%x -- r%x g%x b%x\n", + window[2][0].R, window[2][0].G, window[2][0].B, + window[2][1].R, window[2][1].G, window[2][1].B, + window[2][2].R, window[2][2].G, window[2][2].B); + } pixel_out[row][col] = filter(window); diff --git a/examples/video_2dfilter_linebuffer-top.c b/examples/video_2dfilter_linebuffer-top.c index 1b34a0e..3795a8f 100644 --- a/examples/video_2dfilter_linebuffer-top.c +++ b/examples/video_2dfilter_linebuffer-top.c @@ -15,20 +15,33 @@ unsigned char R_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char G_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char B_out[MAX_HEIGHT*MAX_WIDTH]; +// Golden Reference filtered image +rgb_pixel ref_pix[MAX_HEIGHT][MAX_WIDTH]; +unsigned char R_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char G_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char B_ref[MAX_HEIGHT*MAX_WIDTH]; + void video_2dfilter_linebuffer(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], rgb_pixel pixel_out[MAX_HEIGHT][MAX_WIDTH]); int main(int argc, char *argv[]) { int x, y; int width, height; + int width_ref, height_ref; char tempbuf[2000]; char tempbuf1[2000]; - if (argc != 3) { - printf("usage : %s \n", argv[0]); + if (argc != 4) { + printf("usage : %s \n", argv[0]); exit(1); } + // + // Input File + // + + printf("Input File: %s\n", argv[1]); + ReadBMPSize(argv[1], &height, &width); printf("height = %d, width = %d\n", height, width); assert(height <= MAX_HEIGHT); @@ -52,6 +65,36 @@ int main(int argc, char *argv[]) { } } + // + // Input Golden Filtered Reference File + // + + printf("Input Golden Filtered Reference File: %s\n", argv[2]); + + ReadBMPSize(argv[2], &height_ref, &width_ref); + printf("height_ref = %d, width_ref = %d\n", height_ref, width_ref); + assert(height_ref <= MAX_HEIGHT); + assert(width_ref <= MAX_WIDTH); + + // Fill a frame with data + read_tmp = ReadBMPUnsafe(argv[2], height_ref, width_ref, + R_ref, G_ref, B_ref, 1200); + if (read_tmp != 0) { + printf("%s Loading image failed\n", tempbuf); + exit(1); + } + + // Copy Image to pixel data structure + for (x = 0; x < height_ref; x++) { + for (y = 0; y < width_ref; y++) { + ref_pix[x][y].R = R_ref[x*width+y]; + ref_pix[x][y].G = G_ref[x*width+y]; + ref_pix[x][y].B = B_ref[x*width+y]; + } + } + + //////////////////////////////////////////////////////////////// + // Hardware Function video_2dfilter_linebuffer(in_pix, out_pix); @@ -65,10 +108,36 @@ int main(int argc, char *argv[]) { } // Write the image back to disk - int write_tmp = WriteBMP(argv[2], height, width, + int write_tmp = WriteBMP(argv[3], height, width, R_out, G_out, B_out); if (write_tmp != 0) { - printf("WriteBMP %s failed\n", argv[2]); + printf("WriteBMP %s failed\n", argv[3]); exit(1); } + + // + // Compare filtered image with golden reference + // + int sum_absdiff = 0; + int sum_absdiff_expected = 14000; + + for (x = 0; x < height; x++) { + for (y = 0; y < width; y++) { + sum_absdiff += abs(out_pix[x][y].R - ref_pix[x][y].R); + sum_absdiff += abs(out_pix[x][y].G - ref_pix[x][y].G); + sum_absdiff += abs(out_pix[x][y].B - ref_pix[x][y].B); + printf("R = %d, G = %d, B = %d\n", out_pix[x][y].R, out_pix[x][y].G, out_pix[x][y].B); + printf("R_ref = %d, G_ref = %d, B_ref = %d\n", ref_pix[x][y].R, ref_pix[x][y].G, ref_pix[x][y].B); + } + } + + printf("sum_absdiff = %d\n", sum_absdiff); + + if (sum_absdiff < sum_absdiff_expected) { + printf("PASS\n"); + return 0; // Success + } else { + printf("FAIL\n"); + return 1; // Failure + } } diff --git a/examples/video_2dfilter_linebuffer_extended-top.c b/examples/video_2dfilter_linebuffer_extended-top.c index c7afdae..687d56e 100644 --- a/examples/video_2dfilter_linebuffer_extended-top.c +++ b/examples/video_2dfilter_linebuffer_extended-top.c @@ -15,20 +15,33 @@ unsigned char R_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char G_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char B_out[MAX_HEIGHT*MAX_WIDTH]; +// Golden Reference filtered image +rgb_pixel ref_pix[MAX_HEIGHT][MAX_WIDTH]; +unsigned char R_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char G_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char B_ref[MAX_HEIGHT*MAX_WIDTH]; + void video_2dfilter_linebuffer_extended(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], rgb_pixel pixel_out[MAX_HEIGHT][MAX_WIDTH]); int main(int argc, char *argv[]) { int x, y; int width, height; + int width_ref, height_ref; char tempbuf[2000]; char tempbuf1[2000]; - if (argc != 3) { - printf("usage : %s \n", argv[0]); + if (argc != 4) { + printf("usage : %s \n", argv[0]); exit(1); } + // + // Input File + // + + printf("Input File: %s\n", argv[1]); + ReadBMPSize(argv[1], &height, &width); printf("height = %d, width = %d\n", height, width); assert(height <= MAX_HEIGHT); @@ -52,6 +65,35 @@ int main(int argc, char *argv[]) { } } + // + // Input Golden Filtered Reference File + // + + printf("Input Golden Filtered Reference File: %s\n", argv[2]); + + ReadBMPSize(argv[2], &height_ref, &width_ref); + printf("height_ref = %d, width_ref = %d\n", height_ref, width_ref); + assert(height_ref <= MAX_HEIGHT); + assert(width_ref <= MAX_WIDTH); + + // Fill a frame with data + read_tmp = ReadBMPUnsafe(argv[2], height_ref, width_ref, + R_ref, G_ref, B_ref, 1200); + if (read_tmp != 0) { + printf("%s Loading image failed\n", tempbuf); + exit(1); + } + + // Copy Image to pixel data structure + for (x = 0; x < height_ref; x++) { + for (y = 0; y < width_ref; y++) { + ref_pix[x][y].R = R_ref[x*width+y]; + ref_pix[x][y].G = G_ref[x*width+y]; + ref_pix[x][y].B = B_ref[x*width+y]; + } + } + + //////////////////////////////////////////////////////////////// // Hardware Function video_2dfilter_linebuffer_extended(in_pix, out_pix); @@ -65,10 +107,36 @@ int main(int argc, char *argv[]) { } // Write the image back to disk - int write_tmp = WriteBMP(argv[2], height, width, + int write_tmp = WriteBMP(argv[3], height, width, R_out, G_out, B_out); if (write_tmp != 0) { - printf("WriteBMP %s failed\n", argv[2]); + printf("WriteBMP %s failed\n", argv[3]); exit(1); } + + // + // Compare filtered image with golden reference + // + int sum_absdiff = 0; + int sum_absdiff_expected = 500; + + for (x = 0; x < height; x++) { + for (y = 0; y < width; y++) { + sum_absdiff += abs(out_pix[x][y].R - ref_pix[x][y].R); + sum_absdiff += abs(out_pix[x][y].G - ref_pix[x][y].G); + sum_absdiff += abs(out_pix[x][y].B - ref_pix[x][y].B); + printf("R = %d, G = %d, B = %d\n", out_pix[x][y].R, out_pix[x][y].G, out_pix[x][y].B); + printf("R_ref = %d, G_ref = %d, B_ref = %d\n", ref_pix[x][y].R, ref_pix[x][y].G, ref_pix[x][y].B); + } + } + + printf("sum_absdiff = %d\n", sum_absdiff); + + if (sum_absdiff < sum_absdiff_expected) { + printf("PASS\n"); + return 0; // Success + } else { + printf("FAIL\n"); + return 1; // Failure + } } diff --git a/examples/video_2dfilter_linebuffer_extended_constant-top.c b/examples/video_2dfilter_linebuffer_extended_constant-top.c index 3788793..6d25834 100644 --- a/examples/video_2dfilter_linebuffer_extended_constant-top.c +++ b/examples/video_2dfilter_linebuffer_extended_constant-top.c @@ -15,20 +15,33 @@ unsigned char R_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char G_out[MAX_HEIGHT*MAX_WIDTH]; unsigned char B_out[MAX_HEIGHT*MAX_WIDTH]; +// Golden Reference filtered image +rgb_pixel ref_pix[MAX_HEIGHT][MAX_WIDTH]; +unsigned char R_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char G_ref[MAX_HEIGHT*MAX_WIDTH]; +unsigned char B_ref[MAX_HEIGHT*MAX_WIDTH]; + void video_2dfilter_linebuffer_extended_constant(rgb_pixel pixel_in[MAX_HEIGHT][MAX_WIDTH], rgb_pixel pixel_out[MAX_HEIGHT][MAX_WIDTH]); int main(int argc, char *argv[]) { int x, y; int width, height; + int width_ref, height_ref; char tempbuf[2000]; char tempbuf1[2000]; - if (argc != 3) { - printf("usage : %s \n", argv[0]); + if (argc != 4) { + printf("usage : %s \n", argv[0]); exit(1); } + // + // Input File + // + + printf("Input File: %s\n", argv[1]); + ReadBMPSize(argv[1], &height, &width); printf("height = %d, width = %d\n", height, width); assert(height <= MAX_HEIGHT); @@ -52,6 +65,35 @@ int main(int argc, char *argv[]) { } } + // + // Input Golden Filtered Reference File + // + + printf("Input Golden Filtered Reference File: %s\n", argv[2]); + + ReadBMPSize(argv[2], &height_ref, &width_ref); + printf("height_ref = %d, width_ref = %d\n", height_ref, width_ref); + assert(height_ref <= MAX_HEIGHT); + assert(width_ref <= MAX_WIDTH); + + // Fill a frame with data + read_tmp = ReadBMPUnsafe(argv[2], height_ref, width_ref, + R_ref, G_ref, B_ref, 1200); + if (read_tmp != 0) { + printf("%s Loading image failed\n", tempbuf); + exit(1); + } + + // Copy Image to pixel data structure + for (x = 0; x < height_ref; x++) { + for (y = 0; y < width_ref; y++) { + ref_pix[x][y].R = R_ref[x*width+y]; + ref_pix[x][y].G = G_ref[x*width+y]; + ref_pix[x][y].B = B_ref[x*width+y]; + } + } + + //////////////////////////////////////////////////////////////// // Hardware Function video_2dfilter_linebuffer_extended_constant(in_pix, out_pix); @@ -65,10 +107,36 @@ int main(int argc, char *argv[]) { } // Write the image back to disk - int write_tmp = WriteBMP(argv[2], height, width, + int write_tmp = WriteBMP(argv[3], height, width, R_out, G_out, B_out); if (write_tmp != 0) { - printf("WriteBMP %s failed\n", argv[2]); + printf("WriteBMP %s failed\n", argv[3]); exit(1); } + + // + // Compare filtered image with golden reference + // + int sum_absdiff = 0; + int sum_absdiff_expected = 31000; + + for (x = 0; x < height; x++) { + for (y = 0; y < width; y++) { + sum_absdiff += abs(out_pix[x][y].R - ref_pix[x][y].R); + sum_absdiff += abs(out_pix[x][y].G - ref_pix[x][y].G); + sum_absdiff += abs(out_pix[x][y].B - ref_pix[x][y].B); + printf("R = %d, G = %d, B = %d\n", out_pix[x][y].R, out_pix[x][y].G, out_pix[x][y].B); + printf("R_ref = %d, G_ref = %d, B_ref = %d\n", ref_pix[x][y].R, ref_pix[x][y].G, ref_pix[x][y].B); + } + } + + printf("sum_absdiff = %d\n", sum_absdiff); + + if (sum_absdiff < sum_absdiff_expected) { + printf("PASS\n"); + return 0; // Success + } else { + printf("FAIL\n"); + return 1; // Failure + } } diff --git a/main.tex b/main.tex index c8d5835..d36c289 100644 --- a/main.tex +++ b/main.tex @@ -164,8 +164,8 @@ \newcommand{\sym}[1]{\texttt{#1}} \newcommand{\code}[1]{\texttt{#1}} \newcommand{\term}[1]{{\color{blue}\textit{#1}}\index{#1}} % FIXME: add an index/glossary here. -\newcommand{\vivado}{Vivado\textsuperscript{\textregistered}} -\newcommand{\VHLS}{Vivado\textsuperscript{\textregistered} HLS\xspace} +\newcommand{\vivado}{Vitis\textsuperscript{\textregistered}} +\newcommand{\VHLS}{Vitis\textsuperscript{\textregistered} HLS\xspace} \newcommand{\tabspace}{\vspace{1em}} \newcommand{\stevecomment}[1]{\pdfmargincomment[color=red,icon=Insert,author={steve}]} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..f75adf6 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,14 @@ +Jinja2==3.1.4 +jinja2-cli==0.8.2 +kaleido==0.2.1 +MarkupSafe==2.1.5 +numpy==2.0.0 +packaging==24.1 +pandas==2.2.2 +plotly==5.22.0 +python-dateutil==2.9.0.post0 +pytz==2024.1 +PyYAML==6.0.1 +six==1.16.0 +tenacity==8.4.1 +tzdata==2024.1 diff --git a/scripts/Makefile b/scripts/Makefile new file mode 100644 index 0000000..39ec53d --- /dev/null +++ b/scripts/Makefile @@ -0,0 +1,37 @@ +# Define the compiler +CXX=g++ + +# Compiler flags +CXXFLAGS=-std=c++11 `pkg-config --cflags opencv4` + +# Linker flags +LDFLAGS=`pkg-config --libs opencv4` + +# Target executable names +TARGET1=video_2dfilter_opencv +TARGET2=compare_images_opencv + +# Source files for each target +SOURCES1=video_2dfilter_opencv.cpp +SOURCES2=compare_images_opencv.cpp + +# Object files for each target +OBJECTS1=$(SOURCES1:.cpp=.o) +OBJECTS2=$(SOURCES2:.cpp=.o) + +# Rule to create object files +%.o: %.cpp + $(CXX) $(CXXFLAGS) -c $< -o $@ + +# Default rule to make +all: $(TARGET1) $(TARGET2) + +$(TARGET1): $(OBJECTS1) + $(CXX) $(OBJECTS1) -o $(TARGET1) $(LDFLAGS) + +$(TARGET2): $(OBJECTS2) + $(CXX) $(OBJECTS2) -o $(TARGET2) $(LDFLAGS) + +# Rule for cleaning files generated during compilation +clean: + rm -f $(OBJECTS1) $(TARGET1) $(OBJECTS2) $(TARGET2) \ No newline at end of file diff --git a/scripts/__hls_csynth_report_template__.adoc.j2 b/scripts/__hls_csynth_report_template__.adoc.j2 new file mode 100644 index 0000000..a314c2a --- /dev/null +++ b/scripts/__hls_csynth_report_template__.adoc.j2 @@ -0,0 +1,68 @@ +{% set command = data.command[0] %} +{% set script_name = command | regex('.*?(\S+\.tcl)') | basename %} +{% set report_name = script_name | regex('(\S+)\.tcl') %} +[[{{ report_name }}]] +== {{ report_name }} Component Status Report + +This report provides a summary of the status of the `{{ script_name }} -- {{ data.top[0] | first_item("") }}` component csynth compiliation status. + +Compiler: `{{ data.hls_compiler[0] }} version {{ data.version[0] }}` + +Command: `{{ data.command[0] }}` + +Timestamp: `{{ data.user_host_timestamp[0] }}` + +Operating System: `{{ data.operating_system[0] }}` + +Device: `{{ data.device[0] }}` + +Part: `{{ data.part[0] | first_item("Unavailable") }}` + +Top: `{{ data.top[0] | first_item("Unavailable") }}` + +Flow Target: `{{ data.flow_target[0] | first_item("Unavailable") }}` + +Estimated Fmax: `{{ data.estimated_fmax|first_item("Unavailable") }}` + +Total Elapsed Time: `{{ data.total_elapsed_time|first_item("Unavailable") }}` + +{# print this error count line if the dictionary +key errors_count exists#} +Error count: `{{ data.errors_count|default("0") }}` + +{# print this warning count line if the dictionary +key warning_count exists#} +Warning count: `{{ data.warnings_count|default("0") }}` + +{% if 'errors' in data %} +.Error Messages +|=== +{#- iterate over the error messages #} +|Error Message +{%- for error in data.errors %} +|{{ error[0] }} +{%- endfor %} +|=== +{%- endif %} + +{% if 'warnings' in data %} +.Warning Messages +{#- iterate over the warning messages #} +|=== +|Warning Message +{%- for warning in data.warnings %} +|{{ warning[0] }} +{%- endfor %} +|=== +{%- endif %} + +{% if pipelining_results in data %} +Pipelining Results: + +{#- iterate over the pipelining results #} +{%- for pipelining_results in data.pipelining_results %} +{{ pipelining_results[0] }} +{%- endfor %} +{%- endif %} + +<<< diff --git a/scripts/__hls_log_data_extract__.yml b/scripts/__hls_log_data_extract__.yml new file mode 100644 index 0000000..cd8c3a6 --- /dev/null +++ b/scripts/__hls_log_data_extract__.yml @@ -0,0 +1,123 @@ +hls_compiler_type: + vitis_hls: + pattern: "\\*\\*\\*\\*\\*\\* Vitis HLS - High-Level Synthesis" + vivado_hls: + pattern: "\\*\\*\\*\\*\\*\\* Vivado\\(TM\\) HLS - High-Level Synthesis" + +component_start: + vitis_hls: + pattern: "vitis-run --mode hls --tcl\\s+(\\S+)\\.tcl" + vivado_hls: + pattern: "vivado_hls -f\\s+(\\S+)\\.tcl \\|" + +component_end: + vitis_hls: + pattern: "INFO: \\[vitis-run 60-1662\\] Stopping dispatch session having empty uuid." + vivado_hls: + pattern: "INFO: \\[Common 17-206\\] Exiting vivado_hls at" + +component: + + version: + vitis_hls: + pattern: "\\*\\*\\*\\*\\*\\* vitis-run (.*)" + vivado_hls: + pattern: "\\*\\*\\*\\*\\*\\* Vivado\\(TM\\) HLS - High-Level Synthesis from C, C\\+\\+ and SystemC (.*)" + + command: + vitis_hls: + pattern: "INFO: \\[vitis-run 82-31\\] Launching vitis_hls: (.*)" + vivado_hls: + pattern: "(vivado_hls -f .*?) \\|" + + hls_compiler: + vitis_hls: + pattern: "INFO: \\[vitis-run 82-31\\] Launching .*?: (\\S+)" + vivado_hls: + pattern: "INFO: \\[HLS 200-10\\] Running '.*/(vivado_hls)'" + + user_host_timestamp: + pattern: "INFO: \\[HLS 200-10\\] (For user .*)" + + operating_system: + pattern: "INFO: \\[HLS 200-10\\] On os (.*)" + + top: + vitis_hls: + pattern: "INFO: \\[HLS 200-1510\\] Running: set_top (\\S+)" + vivado_hls: + pattern: "Running: set_top (\\S+)" + + flow_target: + pattern: "INFO: \\[HLS 200-1505\\] Using default flow_target (\\S+)" + + # type list will grab every instance of pattern match for current_allocated_memory + # and store it in a list. Because there are 2 paren sets in the pattern, the + # generated list is a list of tuples. + current_allocated_memory: + type: list + pattern: "INFO: \\[HLS 200-111\\] (.*:) .*? current allocated memory: ([0-9.]+) MB." + + total_elapsed_time: + vitis_hls: + pattern: "INFO: \\[vitis-run 60-791\\] (Total elapsed time: .*s)" + vivado_hls: + pattern: "INFO: \\[HLS 200-112\\] (Total elapsed time: .*);" + + estimated_fmax: + pattern: "INFO: \\[HLS 200-789\\] \\*\\*\\*\\* Estimated Fmax: ([0-9.]+ MHz)" + + cp_achieved_post_synth: + pattern: "CP achieved post-synthesis:\\s+([0-9.]+)" + + cp_achieved_post_impl: + pattern: "CP achieved post-implementation:\\s+([0-9.]+)" + + csynth_run_stats: + pattern: "INFO: \\[HLS 200-2161\\] (Finished Command csynth_design.*)" + + pipelining_result: + vitis_hls: + type: list + pattern: "(INFO: \\[HLS 200-1470\\] Pipelining result : .*)" + vivado_hls: + type: list + pattern: + - "\\[SCHED 204-61\\] (Pipelining loop .*)" + - "INFO: \\[SCHED 204-61\\] (Pipelining result : .*)" + + part: + vitis_hls: + pattern: "INFO: \\[HLS 200-1510\\] Running: set_part (\\S+)" + vivado_hls: + pattern: "Running: set_part (\\S+)" + + + device: + vitis_hls: + pattern: "INFO: \\[HLS 200-1611\\] Setting target device to '(\\S+)'" + vivado_hls: + pattern: "INFO: \\[HLS 200-10\\] Setting target device to '(\\S+)'" + + clock_period: + vitis_hls: + pattern: "INFO: \\[HLS 200-1510\\] Running: create_clock -period ([0-9.]+)" + vivado_hls: + pattern: "INFO: \\[SYN 201-201\\] Setting up clock 'default' with a period of (.*)ns\\." + + # type counter will count the number of times the pattern is matched + warnings_count: + type: counter + pattern: "WARNING: .*" + + warnings: + type: list + pattern: "(WARNING: .*)" + + errors_count: + type: counter + pattern: "ERROR: .*" + + errors: + type: list + pattern: "(ERROR: .*)" diff --git a/scripts/compare_images_opencv.cpp b/scripts/compare_images_opencv.cpp new file mode 100644 index 0000000..95fb1d8 --- /dev/null +++ b/scripts/compare_images_opencv.cpp @@ -0,0 +1,79 @@ +#include +#include + +using namespace cv; +using namespace std; + +void zeroOutEdges(cv::Mat& image) { + // Assuming image is a valid cv::Mat object + int thickness = 1; // Thickness of the edge to be zeroed out + + // Set top edge to zero + cv::rectangle(image, cv::Point(0, 0), cv::Point(image.cols, thickness - 1), cv::Scalar(0, 0, 0), cv::FILLED); + + // Set bottom edge to zero + cv::rectangle(image, cv::Point(0, image.rows - thickness), cv::Point(image.cols, image.rows), cv::Scalar(0, 0, 0), cv::FILLED); + + // Set left edge to zero + cv::rectangle(image, cv::Point(0, 0), cv::Point(thickness - 1, image.rows), cv::Scalar(0, 0, 0), cv::FILLED); + + // Set right edge to zero + cv::rectangle(image, cv::Point(image.cols - thickness, 0), cv::Point(image.cols, image.rows), cv::Scalar(0, 0, 0), cv::FILLED); +} + +// NOTE: There are small differences between the HLS Book 2d filter output +// and OpenCV 2d filter output. The differences are due to the +// quantization error. +// +int main(int argc, char** argv) { + if (argc != 3) { + cout << "Usage: " << argv[0] << " " << endl; + return -1; + } + + // Load the two images + Mat img1 = imread(argv[1], IMREAD_GRAYSCALE); + Mat img2 = imread(argv[2], IMREAD_GRAYSCALE); + + // Zero out the edges of the images + // This is done to ignore any differences in the edges of the images + // The opencv 2d filter can filter up to the edge of the image. The HLS + // Book example skips this edge case. + // + zeroOutEdges(img1); + zeroOutEdges(img2); + + // send the images to std::out + cout << "Image 1:" << endl; + cout << img1 << endl; + cout << "Image 2:" << endl; + cout << img2 << endl; + + if (img1.empty() || img2.empty()) { + cout << "Could not open or find one of the images" << endl; + return -1; + } + + // Ensure the images are of the same size and type + if (img1.size() != img2.size() || img1.type() != img2.type()) { + cout << "The images have different sizes or types and cannot be compared" << endl; + return -1; + } + + // Calculate the absolute difference + Mat diff; + absdiff(img1, img2, diff); + + // send the diff to std::out + cout << diff << endl; + + // Sum the differences + //Scalar sumOfDifferences = sum(diff); + //double sumOfDifferences = sum(diff)[0] / (diff.rows * diff.cols * diff.channels()); + double sumOfDifferences = sum(diff)[0]; + + // Output the sum of the absolute differences + cout << "Sum of absolute differences: " << sumOfDifferences << endl; + + return 0; +} \ No newline at end of file diff --git a/scripts/docToolchainConfig.groovy b/scripts/docToolchainConfig.groovy new file mode 100644 index 0000000..bf07691 --- /dev/null +++ b/scripts/docToolchainConfig.groovy @@ -0,0 +1,544 @@ +outputPath = 'build' + +// If you want to use the Antora integration, set this to true. +// This requires your project to be setup as Antora module. +// You can use `downloadTemplate` task to bootstrap your project. +//useAntoraIntegration = false + +// Path where the docToolchain will search for the input files. +// This path is appended to the docDir property specified in gradle.properties +// or in the command line, and therefore must be relative to it. + +//inputPath = 'src/docs'; +inputPath = '.'; + +// if you need to register custom Asciidoctor extensions, this is the right place +// configure the name and path to your extension, relative to the root of your project +// (relative to dtcw). For example: 'src/ruby/asciidoctor-lists.rb'. +// this is the same as the `requires`-list of the asciidoctor gradle plugin. The extensions will be +// registered for generateDeck, generateHtml, generatePdf and generateDocbook tasks, only. +// rubyExtensions = [] + +// the pdfThemeDir config in this file is outdated. +// please check http://doctoolchain.org/docToolchain/v2.0.x/020_tutorial/030_generateHTML.html#_pdf_style for further details +// pdfThemeDir = './src/docs/pdfTheme' + +inputFiles = [ + [file: 'output_vitis_hls.adoc', formats: ['pdf']], + //[file: 'arc42-template.adoc', formats: ['html','pdf']], + /** inputFiles **/ +] + +//folders in which asciidoc will find images. +//these will be copied as resources to ./images +//folders are relative to inputPath +// Hint: If you define an imagepath in your documents like +// :imagesdir: ./whatsoever +// define it conditional like +// ifndef::imagesdir[:imagesdir: ./whatsoever] +// as doctoolchain defines :imagesdir: during generation +imageDirs = [ + /** imageDirs **/ +] + +// whether the build should fail when detecting broken image references +// if this config is set to true all images will be embedded +failOnMissingImages = true + +// these are directories (dirs) and files which Gradle monitors for a change +// in order to decide if the docs have to be re-build +taskInputsDirs = [ + "${inputPath}", +// "${inputPath}/src", +// "${inputPath}/images", + ] + +taskInputsFiles = [] + +//***************************************************************************************** + +// Configuration for customTasks +// create a new Task with ./dtcw createTask +customTasks = [ +/** customTasks **/ +] + + +//***************************************************************************************** + +//Configuration for microsite: generateSite + previewSite + +microsite = [:] + +// these properties will be set as jBake properties +// microsite.foo will be site.foo in jBake and can be used as config.site_foo in a template +// see https://jbake.org/docs/2.6.4/#configuration for how to configure jBake +// other properties listed here might be used in the jBake templates and thus are not +// documented in the jBake docs but hopefully in the template docs. +microsite.with { + /** start:microsite **/ + + // is your microsite deployed with a context path? + contextPath = '/' + // the folder of a site definition (theme) relative to the docDir+inputPath + //siteFolder = '../site' + + /** end:microsite **/ + + //project theme + //site folder relative to the docs folder + //see 'copyTheme' for more details + siteFolder = '../site' + + // the title of the microsite, displayed in the upper left corner + title = '##site-title##' + // the next items configure some links in the footer + // + // contact eMail + // example: mailto:bert@example.com + footerMail = '##footer-email##' + // + // twitter account url + footerTwitter = '##twitter-url##' + // + // Stackoverflow QA + footerSO = '##Stackoverflow-url##' + // + // Github Repository + footerGithub = '##Github-url##' + // + // Slack Channel + footerSlack = '##Slack-url##' + // + // Footer Text + // example: built with docToolchain and jBake
theme: docsy
+ footerText = 'built with docToolchain and jBake
theme: docsy
' + // + // site title if no other title is given + title = 'docToolchain' + // + // the url to create an issue in github + // Example: https://github.com/docToolchain/docToolchain/issues/new + issueUrl = '##issue-url##' + // + // the base url for code files in github + // Example: https://github.com/doctoolchain/doctoolchain/edit/master/src/docs + branch = System.getenv("DTC_PROJECT_BRANCH")?:'-' + gitRepoUrl = '##git-repo-url##' + + // + // the location of the landing page + landingPage = 'landingpage.gsp' + // the menu of the microsite. A map of [code:'title'] entries to specify the order and title of the entries. + // the codes are autogenerated from the folder names or :jbake-menu: attribute entries from the .adoc file headers + // set a title to '-' in order to remove this menu entry. + menu = [:] + +//tag::additionalConverters[] +/** + +if you need support for additional markup converters, you can configure them here +you have three different types of script you can define: + +- groovy: just groovy code as string +- groovyFile: path to a groovy script +- bash: a bash command. It will receive the name of the file to be converted as first argument + +`groovy` and `groovyFile` will have access to the file and config object + +`dtcw:rstToHtml.py` is an internal script to convert restructuredText. +Needs `python3` and `docutils` installed. + +**/ + additionalConverters = [ + //'.one': [command: 'println "test"+file.canonicalPath', type: 'groovy'], + //'.two': [command: 'scripts/convert-md.groovy', type: 'groovyFile'], + //'.rst': [command: 'dtcw:rstToHtml.py', type: 'bash'], + ] +//end::additionalConverters[] + + // if you prefer another convention regarding the automatic generation + // of jBake headers, you can configure a script to modify them here + // the script has access to + // - file: the current object + // - sourceFolder: the copy of the docs-source on which the build operates + // default `/microsite/tmp/site/doc` + // - config: the config object (this file, but parsed) + // - headers: already parsed headers to be modified + /** + customConvention = """ + System.out.println file.canonicalPath + headers.title += " - from CustomConvention" + """.stripIndent() + **/ +} + +//***************************************************************************************** + +//Configuration for exportChangelog + +exportChangelog = [:] + +changelog.with { + + // Directory of which the exportChangelog task will export the changelog. + // It should be relative to the docDir directory provided in the + // gradle.properties file. + dir = 'src/docs' + + // Command used to fetch the list of changes. + // It should be a single command taking a directory as a parameter. + // You cannot use multiple commands with pipe between. + // This command will be executed in the directory specified by changelogDir + // it the environment inherited from the parent process. + // This command should produce asciidoc text directly. The exportChangelog + // task does not do any post-processing + // of the output of that command. + // + // See also https://git-scm.com/docs/pretty-formats + cmd = 'git log --pretty=format:%x7c%x20%ad%x20%n%x7c%x20%an%x20%n%x7c%x20%s%x20%n --date=short' + +} + +//***************************************************************************************** + +//tag::confluenceConfig[] +//Configuration for publishToConfluence + +confluence = [:] + +/** +//tag::input-config[] + +*input* + +is an array of files to upload to Confluence with the ability +to configure a different parent page for each file. + +=== Attributes + +- `file`: absolute or relative path to the asciidoc generated html file to be exported +- `url`: absolute URL to an asciidoc generated html file to be exported +- `ancestorName` (optional): the name of the parent page in Confluence as string; + this attribute has priority over ancestorId, but if page with given name doesn't exist, + ancestorId will be used as a fallback +- `ancestorId` (optional): the id of the parent page in Confluence as string; leave this empty + if a new parent shall be created in the space + +The following four keys can also be used in the global section below + +- `spaceKey` (optional): page specific variable for the key of the confluence space to write to +- `subpagesForSections` (optional): The number of nested sub-pages to create. Default is '1'. + '0' means creating all on one page. + The following migration for removed configuration can be used. +** `allInOnePage = true` is the same as `subpagesForSections = 0` +** `allInOnePage = false && createSubpages = false` is the same as `subpagesForSections = 1` +** `allInOnePage = false && createSubpages = true` is the same as `subpagesForSections = 2` +- `pagePrefix` (optional): page specific variable, the pagePrefix will be a prefix for the page title and it's sub-pages + use this if you only have access to one confluence space but need to store several + pages with the same title - a different pagePrefix will make them unique +- `pageSuffix` (optional): same usage as prefix but appended to the title and it's subpages + +only 'file' or 'url' is allowed. If both are given, 'url' is ignored + +//end::input-config[] +**/ + +confluence.with { + input = [ + [ file: "build/html5/arc42-template-de.html" ], + ] + + // endpoint of the confluenceAPI (REST) to be used + // if you use Confluence Cloud, you can set this value to + // https://[yourServer] + // a working example is https://arc42-template.atlassian.net + // if you use Confluence Server, you may need to set a context: + // https://[yourServer]/[context] + // a working example is https://arc42-template.atlassian.net/wiki + api = 'https://[yourServer]/[context]' + + // requests per second for confluence API calls + rateLimit = 10 + + // if true API V1 only will be used. Default is true. + // useV1Api = true + + // if true, the new editor v2 will be used. Default is false. + // enforceNewEditor = false + + // Additionally, spaceKey, subpagesForSections, pagePrefix and pageSuffix can be globally defined here. The assignment in the input array has precedence + + // the key of the confluence space to write to + spaceKey = 'asciidoc' + + // if true, all pages will be created using the new editor v2 + // enforceNewEditor = false + + // variable to determine how many layers of sub pages should be created + subpagesForSections = 1 + + // the pagePrefix will be a prefix for each page title + // use this if you only have access to one confluence space but need to store several + // pages with the same title - a different pagePrefix will make them unique + pagePrefix = '' + + pageSuffix = '' + + // the comment used for the page version + pageVersionComment = '' + + /* + WARNING: It is strongly recommended to store credentials securely instead of commiting plain text values to your git repository!!! + + Tool expects credentials that belong to an account which has the right permissions to to create and edit confluence pages in the given space. + Credentials can be used in a form of: + - passed parameters when calling script (-PconfluenceUser=myUsername -PconfluencePass=myPassword) which can be fetched as a secrets on CI/CD or + - gradle variables set through gradle properties (uses the 'confluenceUser' and 'confluencePass' keys) + Often, same credentials are used for Jira & Confluence, in which case it is recommended to pass CLI parameters for both entities as + -Pusername=myUser -Ppassword=myPassword + */ + + //optional API-token to be added in case the credentials are needed for user and password exchange. + //apikey = "[API-token]" + + // HTML Content that will be included with every page published + // directly after the TOC. If left empty no additional content will be + // added + // extraPageContent = 'This is a generated page, do not edit! + extraPageContent = '' + + // enable or disable attachment uploads for local file references + enableAttachments = false + + // variable to limit number of pages retreived per REST-API call + pageLimit = 100 + + // default attachmentPrefix = attachment - All files to attach will require to be linked inside the document. + // attachmentPrefix = "attachment" + + + // Optional proxy configuration, only used to access Confluence + // schema supports http and https + // proxy = [host: 'my.proxy.com', port: 1234, schema: 'http'] + + // Optional: specify which Confluence OpenAPI Macro should be used to render OpenAPI definitions + // possible values: ["confluence-open-api", "open-api", true]. true is the same as "confluence-open-api" for backward compatibility + // useOpenapiMacro = "confluence-open-api" + + // for exportConfluence-Task + export = [ + srcDir: 'sample_data', + destDir: 'src/docs' + ] + +} +//end::confluenceConfig[] + +//***************************************************************************************** +//tag::exportEAConfig[] +//Configuration for the export script 'exportEA.vbs'. +// The following parameters can be used to change the default behaviour of 'exportEA'. +// All parameter are optionally. +// Parameter 'connection' allows to select a certain database connection by using the ConnectionString as used for +// directly connecting to the project database instead of looking for EAP/EAPX files inside and below the 'src' folder. +// Parameter 'packageFilter' is an array of package GUID's to be used for export. All images inside and in all packages below the package represented by its GUID are exported. +// A packageGUID, that is not found in the currently opened project, is silently skipped. +// PackageGUID of multiple project files can be mixed in case multiple projects have to be opened. + +exportEA.with { +// OPTIONAL: Set the connection to a certain project or comment it out to use all project files inside the src folder or its child folder. +// connection = "DBType=1;Connect=Provider=SQLOLEDB.1;Integrated Security=SSPI;Persist Security Info=False;Initial Catalog=[THE_DB_NAME_OF_THE_PROJECT];Data Source=[server_hosting_database.com];LazyLoad=1;" +// OPTIONAL: Add one or multiple packageGUIDs to be used for export. All packages are analysed, if no packageFilter is set. +// packageFilter = [ +// "{A237ECDE-5419-4d47-AECC-B836999E7AE0}", +// "{B73FA2FB-267D-4bcd-3D37-5014AD8806D6}" +// ] +// OPTIONAL: relative path to base 'docDir' to which the diagrams and notes are to be exported +// exportPath = "src/docs/" +// OPTIONAL: relative path to base 'docDir', in which Enterprise Architect project files are searched +// searchPath = "src/docs/" + +} +//end::exportEAConfig[] + +//tag::htmlSanityCheckConfig[] +htmlSanityCheck.with { + //sourceDir = "build/html5/site" + //checkingResultsDir = +} +//end::htmlSanityCheckConfig[] + +//tag::jiraConfig[] +// Configuration for Jira related tasks +jira = [:] + +jira.with { + + // endpoint of the JiraAPI (REST) to be used + api = 'https://your-jira-instance' + + // requests per second for jira API calls + rateLimit = 10 + + /* + WARNING: It is strongly recommended to store credentials securely instead of commiting plain text values to your git repository!!! + + Tool expects credentials that belong to an account which has the right permissions to read the JIRA issues for a given project. + Credentials can be used in a form of: + - passed parameters when calling script (-PjiraUser=myUsername -PjiraPass=myPassword) which can be fetched as a secrets on CI/CD or + - gradle variables set through gradle properties (uses the 'jiraUser' and 'jiraPass' keys) + Often, Jira & Confluence credentials are the same, in which case it is recommended to pass CLI parameters for both entities as + -Pusername=myUser -Ppassword=myPassword + */ + + // the key of the Jira project + project = 'PROJECTKEY' + + // the format of the received date time values to parse + dateTimeFormatParse = "yyyy-MM-dd'T'H:m:s.SSSz" // i.e. 2020-07-24'T'9:12:40.999 CEST + + // the format in which the date time should be saved to output + dateTimeFormatOutput = "dd.MM.yyyy HH:mm:ss z" // i.e. 24.07.2020 09:02:40 CEST + + // the label to restrict search to + label = + + // Legacy settings for Jira query. This setting is deprecated & support for it will soon be completely removed. Please use JiraRequests settings + //jql = "project='%jiraProject%' AND labels='%jiraLabel%' ORDER BY priority DESC, duedate ASC" + + // Base filename in which Jira query results should be stored + resultsFilename = 'JiraTicketsContent' + + saveAsciidoc = true // if true, asciidoc file will be created with *.adoc extension + saveExcel = true // if true, Excel file will be created with *.xlsx extension + + // Output folder for this task inside main outputPath + resultsFolder = 'JiraRequests' + + /* + List of requests to Jira API: + These are basically JQL expressions bundled with a filename in which results will be saved. + User can configure custom fields IDs and name those for column header, + i.e. customfield_10026:'Story Points' for Jira instance that has custom field with that name and will be saved in a coloumn named "Story Points" + */ + exports = [ + [ + filename:"File1_Done_issues", + jql:"project='%jiraProject%' AND status='Done' ORDER BY duedate ASC", + customfields: [customfield_10026:'Story Points'] + ], + [ + filename:'CurrentSprint', + jql:"project='%jiraProject%' AND Sprint in openSprints() ORDER BY priority DESC, duedate ASC", + customfields: [customfield_10026:'Story Points'] + ], + ] +} +//end::jiraConfig[] + +//tag::openApiConfig[] +// Configuration for OpenAPI related task +openApi = [:] + +// 'specFile' is the name of OpenAPI specification yaml file. Tool expects this file inside working dir (as a filename or relative path with filename) +// 'infoUrl' and 'infoEmail' are specification metadata about further info related to the API. By default this values would be filled by openapi-generator plugin placeholders +// + +openApi.with { + specFile = 'src/docs/petstore-v2.0.yaml' // i.e. 'petstore.yaml', 'src/doc/petstore.yaml' + infoUrl = 'https://my-api.company.com' + infoEmail = 'info@company.com' +} +//end::openApiConfig[] + +//tag::sprintChangelogConfig[] +// Sprint changelog configuration generate changelog lists based on tickets in sprints of an Jira instance. +// This feature requires at least Jira API & credentials to be properly set in Jira section of this configuration +sprintChangelog = [:] +sprintChangelog.with { + sprintState = 'closed' // it is possible to define multiple states, i.e. 'closed, active, future' + ticketStatus = "Done, Closed" // it is possible to define multiple ticket statuses, i.e. "Done, Closed, 'in Progress'" + + showAssignee = false + showTicketStatus = false + showTicketType = true + sprintBoardId = 12345 // Jira instance probably have multiple boards; here it can be defined which board should be used + + // Output folder for this task inside main outputPath + resultsFolder = 'Sprints' + + // if sprintName is not defined or sprint with that name isn't found, release notes will be created on for all sprints that match sprint state configuration + sprintName = 'PRJ Sprint 1' // if sprint with a given sprintName is found, release notes will be created just for that sprint + allSprintsFilename = 'Sprints_Changelogs' // Extension will be automatically added. +} +//end::sprintChangelogConfig[] + +//tag::collectIncludesConfig[] +collectIncludes = [:] + +collectIncludes.with { + + fileFilter = "adoc" // define which files are considered. default: "ad|adoc|asciidoc" + + minPrefixLength = "3" // define what minimum length the prefix. default: "3" + + maxPrefixLength = "3" // define what maximum length the prefix. default: "" + + separatorChar = "_" // define the allowed separators after prefix. default: "-_" + + cleanOutputFolder = true // should the output folder be emptied before generation? default: false + + excludeDirectories = [] // define additional directories that should not be traversed. + +} +//end::collectIncludesConfig[] + +//tag::structurizrConfig[] +// Configuration for Structurizr related tasks +structurizr = [:] + +structurizr.with { + + // Configure where `exportStructurizr` looks for the Structurizr model. + workspace = { + // The directory in which the Structurizr workspace file is located. + // path = 'src/docs/structurizr' + + // By default `exportStructurizr` looks for a file '${structurizr.workspace.path}/workspace.dsl'. + // You can customize this behavior with 'filename'. Note that the workspace filename is provided without '.dsl' extension. + // filename = 'workspace' + } + + export = { + // Directory for the exported diagrams. + // + // WARNING: Do not put manually created/changed files into this directory. + // If a valid Structurizr workspace file is found the directory is deleted before the diagram files are generated. + // outputPath = 'src/docs/structurizr/diagrams' + + // Format of the exported diagrams. Defaults to 'plantuml' if the parameter is not provided. + // + // Following formats are supported: + // - 'plantuml': the same as 'plantuml/structurizr' + // - 'plantuml/structurizr': exports views to PlantUML + // - 'plantuml/c4plantuml': exports views to PlantUML with https://github.com/plantuml-stdlib/C4-PlantUML + // format = 'plantuml' + } +} +//end::structurizrConfig[] + +//tag::openAIConfig[] +// Configuration for openAI related tasks +openAI = [:] + +openAI.with { + // This task requires a person access token for openAI. + // Ensure to pass this token as parameters when calling the task + // using -PopenAI.token=xx-xxxxxxxxxxxxxx + + //model = "text-davinci-003" + //maxToken = '500' + //temperature = '0.3' +} +//end::openAIConfig[] diff --git a/scripts/gen_hls_component_log_data.py b/scripts/gen_hls_component_log_data.py new file mode 100755 index 0000000..8ae3864 --- /dev/null +++ b/scripts/gen_hls_component_log_data.py @@ -0,0 +1,296 @@ +#!/usr/bin/env python3 +# +# disable logging-fstring-interpolation / W1203 +# pylint: disable=W1203 + +""" +Scrap HLS compiler log file and extract data specified with config yaml file to +generate HLS component compile status data yaml file. + +This script reads a YAML config file and an input file, and based on the provided +configuration data, it splits the input file into sections and extracts the component +compile data from each section. The extracted data is then written to an output file +in YAML format. + +Usage: + python gen_hls_component_log_data.py -i -o -y + +Arguments: + -i, --input_file: The input file containing the compiler log. + -o, --output_file: The output file to write the component compile log data in YAML format. + -y, --yaml_config_file: The YAML config file specifying the regex patterns for identifying + the start and end of each section, as well as the patterns for + extracting the required information. + +Example YAML Config File: + component_start: + pattern: "START_PATTERN" + component_end: + pattern: "END_PATTERN" + component: + KEY1: + pattern: "PATTERN1" + KEY2: + pattern: "PATTERN2" + type: "counter" + KEY3: + pattern: "PATTERN3" + type: "list" + +Note: + - The script assumes that the input file and the YAML config file exist. + - The script requires the PyYAML library to be installed. + +""" + +import os +import sys +import argparse +import logging +import re +import yaml + +logger = logging.getLogger(__name__) + + +def read_yaml_config(yaml_config_file): + """ + Read YAML config file and load into a dictionary + """ + with open(yaml_config_file, "r", encoding="utf-8") as stream: + try: + data = yaml.safe_load(stream) + except yaml.YAMLError as exc: + print(exc) + sys.exit(1) + + logger.debug(f"YAML data: {data}") + return data + + +def read_input_file(input_file): + """ + Read input file and load into a dictionary + """ + + with open(input_file, "r", encoding="utf-8") as f: + data = f.read() + return data + + +def get_hls_compiler_type(input_data, yaml_data): + """ + Get HLS compiler type from input data + """ + hls_compiler_type = None + + # if input_data is a list of strings do nothing otherwise split into a list of strings + if not isinstance(input_data, list): + input_data = input_data.splitlines() + + for line in input_data: + if re.match(yaml_data["hls_compiler_type"]["vivado_hls"]["pattern"], line): + hls_compiler_type = "vivado_hls" + break + elif re.match(yaml_data["hls_compiler_type"]["vitis_hls"]["pattern"], line): + hls_compiler_type = "vitis_hls" + break + + return hls_compiler_type + + +# Given input file string and config data, split input file string to list of strings +# where each string starts with the component_start and component_end config patterns +def split_input_file_string_into_sections(input_data, config_data): + """ + Splits the input string into sections based on the provided configuration data. + + Args: + input_data (str): The input file string to be split into sections. + config_data (dict): The configuration data containing the regex patterns for identifying + the start and end of each section. + + Returns: + list: A list of sections, where each section is a string containing the lines between the + start and end patterns. + + """ + + sections = [] + + hls_compiler_type = get_hls_compiler_type(input_data, config_data) + + logger.debug( + f"component_start: {config_data['component_start'][hls_compiler_type]['pattern']}" + ) + logger.debug( + f"component_end: {config_data['component_end'][hls_compiler_type]['pattern']}" + ) + match_start = re.compile( + config_data["component_start"][hls_compiler_type]["pattern"] + ) + match_end = re.compile(config_data["component_end"][hls_compiler_type]["pattern"]) + inside_section = 0 + + for line in input_data.splitlines(): + logger.debug(f"inside_section: {inside_section} Line: {line}") + # if line contains component_start regex pattern, add it to sections + if match_start.search(line): + logger.debug(f"Matched start pattern: {line}") + match = match_start.search(line) + list_start = [match.group(1)] + sections.append(list_start) + sections[-1].append(line) + logger.debug(f"Sections: {sections}") + inside_section = 1 + elif match_end.search(line) and inside_section == 1: + sections[-1].append(line) + inside_section = 0 + elif inside_section == 1: + logger.debug(f"Inside section: {line}") + sections[-1].append(line) + + logger.debug(f"Sections: {sections}") + return sections + + +def extract_component_log_data(hls_compiler_type, log_file_string, config_data): + """ + Extract the component compile data from log file string. + + Args: + log_file_string (list): A list of strings containing the lines of the component compile log. + config_data (dict): The configuration data containing the regex patterns for identifying + the required information. + + Returns: + dict: A dictionary containing the extracted information. + + """ + + component_data = {} + logger.debug(f"extract_component_log_data - log_file_string: {log_file_string}") + for line in log_file_string: + logger.debug(f"extract_component_log_data - Line: {line}") + for key, value in config_data["component"].items(): + # skip the special type keys for now + logger.debug(f"Key: {key} Value: {value}") + logger.debug(f"value keys: {value.keys()}") + logger.debug( + f"hls_compiler_type: {hls_compiler_type} Key: {key} Value: {value} line: {line}" + ) + if hls_compiler_type in value: + logger.debug( + f"hls_compiler_type {hls_compiler_type} found!! - Value: {value}" + ) + value = value[hls_compiler_type] + logger.debug(f"Value: {value}") + + # iterate over the pattern list (ensure it is a list) + for pattern in ( + value["pattern"] + if isinstance(value["pattern"], list) + else [value["pattern"]] + ): + logger.debug(f"Pattern: {pattern} - Line: {line}") + match = re.search(pattern, line) + if match: + logger.debug(f"Match: {match} -- line: {line}") + if "type" not in value: + # make a list of all of the group matches + component_data[key] = match.groups() + logger.debug(f"Component data {key}: {component_data[key]}") + else: + if value["type"] == "counter": + component_data[key] = component_data.get(key, 0) + 1 + elif value["type"] == "list": + component_data[key] = component_data.get(key, []) + component_data[key].append(match.groups()) + elif value["type"] == "dict": + component_data[key] = component_data.get(key, {}) + component_data[key][match.group(1)] = match.group(2) + + return component_data + + +def main(args): + """ + Main function for generating an HLS component compile report. + + Args: + args (Namespace): Command-line arguments. + + Returns: + None + """ + + yaml_data = read_yaml_config(args.yaml_config_file) + input_data = read_input_file(args.input_file) + + hls_compiler_type = get_hls_compiler_type(input_data, yaml_data) + + sections = split_input_file_string_into_sections(input_data, yaml_data) + + components_data = {} + + for section in sections: + component_data = extract_component_log_data( + hls_compiler_type, section, yaml_data + ) + logger.debug(f"Section: {section} Component data: {component_data}") + component_data = {section[0]: component_data} + + logger.debug(f"Component data: {component_data}") + + components_data = components_data | component_data + + logger.debug(f"Components data: {components_data}") + + # write components_data dictionary out to a yaml file + with open(args.output_file, "w", encoding="utf-8") as f: + yaml.dump(components_data, f) + + logger.info(f"Generated HLS component compile yaml report: {args.output_file}") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Generate HLS component compile status" + ) + # add required argument with switches -i and --input_file to accept input file + parser.add_argument("-i", "--input_file", help="Input file", required=True) + # add required argument with switches -o and --output_file to accept output file + parser.add_argument("-o", "--output_file", help="Output file", required=True) + # add required argument with switches -y and --yaml_config_file to accept yaml config file + parser.add_argument( + "-y", "--yaml_config_file", help="YAML config file", required=True + ) + + parser.add_argument( + "-v", + "--verbose", + help="Increase output verbosity", + action="store_const", + const=logging.DEBUG, + default=logging.INFO, + ) + + args = parser.parse_args() + + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + + # verify input file exists + if not os.path.exists(args.input_file): + print(f"Input file {args.input_file} does not exist") + sys.exit(1) + # verify yaml config file exists + if not os.path.exists(args.yaml_config_file): + print(f"YAML config file {args.yaml_config_file} does not exist") + sys.exit(1) + + args = parser.parse_args() + + main(args) diff --git a/scripts/gen_hls_csynth_script.py b/scripts/gen_hls_csynth_script.py new file mode 100755 index 0000000..caa48db --- /dev/null +++ b/scripts/gen_hls_csynth_script.py @@ -0,0 +1,274 @@ +#!/usr/bin/env python3 +# +# disable logging-fstring-interpolation / W1203 +# pylint: disable=W1203 +# + +""" +This script generates a Vivado HLS csynth script based on the provided input file and configuration. + +Usage: + python gen_vivado_hls_csynth_script.py -i + -o + [-v] [-c ] [-id ] + +Arguments: + -i, --input: Input file name (required) + -o, --output: Output file name (required) + -v, --verbose: Increase output verbosity (optional) + -c, --config: Config file name (default: __hls_config__.ini) + -id, --id: ID tag when accessing multiple versions (optional) + +The script reads the provided input file and configuration file, and generates a Vivado +HLS csynth script based on the specified parameters. The csynth script is then written +to the output file. + +The input file should be a C/C++ file containing void functions. The configuration +file should be in INI format, and should contain the modified values for the csynth +script parameters. The csynth script parameters can be overridden for specific sections. +Different configurations for same top-level HLS C++ can be selected +using the ID tag. For example, + +[huffman_encoding.cpp] +top = huffman_encoding +files = huffman_encoding.cpp \ + huffman_canonize_tree.cpp \ + huffman_compute_bit_length.cpp \ + huffman_create_codeword.cpp \ + huffman_create_tree.cpp \ + huffman_filter.cpp \ + huffman_sort.cpp +tb_files = huffman_encoding_test.cpp + +[huffman_encoding.cpp:test2] +top = huffman_encoding.cpp:top +files = huffman_encoding.cpp:files +tb_files = huffman_encoding_test2.cpp + +You can select huffman_coding.cpp:test2 with "-i huffman_encoding.cpp -id test2" + +Note also: You can use the colon separator to reference another value in the config file. + +Example: + python gen_vivado_hls_csynth_script.py -i input_file.cpp + -o output_file.tcl -v -c config.ini -id version1 +""" + +from pathlib import Path +import sys +import argparse +import logging +import re +import configparser +import textwrap + +logger = logging.getLogger(__name__) + + +def read_config_file(config_file): + """Read the config file and return the config object""" + config = configparser.ConfigParser() + config.read(config_file) + return config + + +def write_csynth_script( + input_file, + output_file, + template_string, + params, +): + "Write the csynth script to the output file" + + file_root = input_file.split(".")[0] + + # get the csynth script parameters + top = params["top"] + part = params["part"] + period = params["period"] + files = params["files"] + tb_files = params["tb_files"] + + # if tb_files in ini file is defined tb_files = none then don't + # add tb_files in the csynth script + if tb_files == "none" or tb_files == "{}": + tb_add_files = "" + else: + tb_add_files = f"add_files -tb [list {tb_files}]" + + with open(output_file, "w", encoding="utf-8") as file: + script_text = f"""\ + open_project {file_root}.proj -reset + add_files [list {files}] + {tb_add_files} + set_top {top} + puts "Running: set_top {top}" + open_solution solution -reset + set_part {part} + puts "Running: set_part {part}" + create_clock -period {period} + csynth_design + exit""" + logger.debug(f"Script text: {textwrap.dedent(script_text)}") + file.write(textwrap.dedent(script_text)) + + logging.debug(f"csynth script written to {output_file}") + + return + + +def get_csynth_script_parameters(input_file, config, id_tag=None): + """ + Get the csynth script parameters from the config file. + + Args: + input_file (str): The path to the input file. + config (ConfigParser): The configuration object containing the script parameters. + id_tag (str, optional): An optional tag to identify the specific section in the config file. + Defaults to None. + + Returns: + dict: A dictionary containing the csynth script parameters. + + """ + # get the basename of the input file + file_basename = Path(input_file).name + logger.debug(f"basename Input file: {file_basename}") + + # get the base name of the input file without extension + file_rootname = input_file.split(".")[0] + logger.debug(f"Rootname Input file: {file_rootname}") + + # get the extension of the input file + file_suffix = input_file.split(".")[1] + logger.debug(f"Suffix Input file: {file_suffix}") + + config_dict = {section: dict(config[section]) for section in config.sections()} + logger.debug(f"config: {config_dict}") + # set the parameters for the csynth script using default values + parameters = { + "top": file_rootname, + "part": config["DEFAULTS"]["part"], + "period": config["DEFAULTS"]["period"], + "files": file_basename, + "tb_files": f"{file_rootname}-top.{file_suffix}", + } + + # update each parameter if it is defined in the config file and + # its option exists + if id_tag is not None: + file_basename_id = f"{file_basename}:{id_tag}" + else: + file_basename_id = file_basename + + # handle the case where a value in the config file references another + # value using a colon separator. + if config.has_section(file_basename_id): + for key in parameters: + if config.has_option(file_basename_id, key): + # if value contains colon found with regex then use the referenced value + if re.search(r":", config[file_basename_id][key]): + referenced_value = config[file_basename_id][key].split(":") + parameters[key] = config[referenced_value[0]][referenced_value[1]] + else: + parameters[key] = config[file_basename][key] + + return parameters + + +def get_tcl_template_string(template_file): + """Read the template file and return the template string""" + + # if template_file is None, return default template string else read from a file + if template_file is None: + template_string = """ + open_project {file_root}.proj -reset + add_files {files} + {tb_add_files} + set_top {top} + open_solution solution -reset + set_part {part} + create_clock -period {period} + csynth_design + """ + template_string = textwrap.dedent(template_string) + else: + with open(template_file, "r", encoding="utf-8") as file: + template_string = file.read() + + return template_string + + +def main(input_file, output_file, config_file, id_tag=None, template_file=None): + """Main function to process input file and write to output file""" + + logging.debug( + f"Processing input file {input_file} and writing to output file {output_file}" + ) + + # assert that the input file exists + input_file_path = Path(input_file) + assert input_file_path.exists(), f"Input file {input_file} not found" + + config_file_path = Path(config_file) + assert config_file_path.exists(), f"Config file {config_file} not found" + + # if template_file is not None, verify it exists + if template_file is not None: + template_file_path = Path(template_file) + assert template_file_path.exists(), f"Template file {template_file} not found" + + template_string = get_tcl_template_string(template_file) + + # read the config file + config = read_config_file(config_file) + + # get the csynth script parameters + params = get_csynth_script_parameters(input_file, config, id_tag) + + write_csynth_script( + input_file, + output_file, + template_string, + params, + ) + + output_file = Path(output_file) + assert output_file.exists(), f"Output file {output_file} not found" + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Process some files.") + parser.add_argument("-i", "--input", help="Input file name", required=True) + parser.add_argument("-o", "--output", help="Output file name", required=True) + # add optional verbose flag for logging with default value INFO and set to DEBUG if enabled. + parser.add_argument( + "-v", + "--verbose", + help="Increase output verbosity", + action="store_const", + const=logging.DEBUG, + default=logging.INFO, + ) + # add optional config ini file + parser.add_argument( + "-c", + "--config", + help="Config file name", + default="__hls_config__.ini", + ) + # add optional argument to specify id + parser.add_argument( + "-id", "--id", help="ID tag when accessing multiple versions", required=False + ) + # add optional argument to specify a tcl template file + parser.add_argument("-t", "--template", help="TCL template file", required=False) + + args = parser.parse_args() + + logging.basicConfig(level=args.verbose) + + # add info log message of the python command line + logging.info(f"Running: {' '.join(sys.argv)}") + + main(args.input, args.output, args.config, args.id, args.template) diff --git a/scripts/gen_hls_runner_script.py b/scripts/gen_hls_runner_script.py new file mode 100755 index 0000000..6b37ee4 --- /dev/null +++ b/scripts/gen_hls_runner_script.py @@ -0,0 +1,258 @@ +#!/usr/bin/env python3 +# +# disable logging-fstring-interpolation / W1203 +# pylint: disable=W1203 +# + +""" +This script generates a HLS flow Tcl script based on the provided input file and configuration. + +Usage: + python gen_hls_runner_script.py -i + -o + [-v] [-c ] [-id ] + +Arguments: + -i, --input: Input file name (required) + -o, --output: Output file name (required) + -v, --verbose: Increase output verbosity (optional) + -c, --config: Config file name (default: __hls_config__.ini) + -id, --id: ID tag when accessing multiple versions (optional) + -imp, --impl: include implementation run (optional) + +The script reads the provided input file and configuration file, and generates an +HLS Tcl script based on the specified parameters. The HLS Tcl script is then written +to the output file. The HLS flow can optionally include running the implementation as well. + +The input file should be a C/C++ file containing void functions. The configuration +file should be in INI format, and should contain the modified values for the HLS Tcl +script parameters. The HLS Tcl script parameters can be overridden for specific sections. +Different configurations for same top-level HLS C++ can be selected +using the ID tag. For example, + +[huffman_encoding.cpp] +top = huffman_encoding +files = huffman_encoding.cpp \ + huffman_canonize_tree.cpp \ + huffman_compute_bit_length.cpp \ + huffman_create_codeword.cpp \ + huffman_create_tree.cpp \ + huffman_filter.cpp \ + huffman_sort.cpp +tb_files = huffman_encoding_test.cpp + +[huffman_encoding.cpp:test2] +top = huffman_encoding.cpp:top +files = huffman_encoding.cpp:files +tb_files = huffman_encoding_test2.cpp + +You can select huffman_coding.cpp:test2 with "-i huffman_encoding.cpp -id test2" + +Note also: You can use the colon separator to reference another value in the config file. + +Example: + python gen_vivado_hls_runner_script.py -i input_file.cpp + -o output_file.tcl -v -c config.ini -id version1 +""" + +from pathlib import Path +import sys +import argparse +import logging +import re +import configparser +import textwrap + +logger = logging.getLogger(__name__) + + +def read_config_file(config_file): + """Read the config file and return the config object""" + config = configparser.ConfigParser() + config.read(config_file) + return config + + +def write_script( + input_file, + output_file, + impl, + params, +): + "Write the HLS script to the output file" + + file_root = input_file.split(".")[0] + + # get the HLS Tcl script parameters + top = params["top"] + part = params["part"] + period = params["period"] + files = params["files"] + tb_files = params["tb_files"] + + # if tb_files in ini file is defined tb_files = none then don't + # add tb_files in the Tcl script + if tb_files in ("none", "{}"): + tb_add_files = "" + else: + tb_add_files = f"add_files -tb [list {tb_files}]" + + # if impl is true then add the implementation flow to the script + if impl: + impl_flow = f"""\ + export_design -description "{file_root}" -display_name "{file_root}" -flow impl -format syn_dcp -ipname {file_root} -library user -taxonomy user -vendor amd -version 1.0.0 -rtl verilog + """ + else: + impl_flow = "" + + with open(output_file, "w", encoding="utf-8") as file: + script_text = f"""\ + open_component {file_root}.comp -reset + add_files [list {files}] + {tb_add_files} + set_top {top} + puts "Running: set_top {top}" + set_part {part} + puts "Running: set_part {part}" + create_clock -period {period} + csynth_design + {impl_flow} + exit""" + logger.debug(f"Script text: {textwrap.dedent(script_text)}") + file.write(textwrap.dedent(script_text)) + + logging.debug(f"HLS Tcl script written to {output_file}") + + +def get_script_parameters(input_file, config, id_tag=None): + """ + Get the HLS script parameters from the config file. + + Args: + input_file (str): The path to the input file. + config (ConfigParser): The configuration object containing the script parameters. + id_tag (str, optional): An optional tag to identify the specific section in the config file. + Defaults to None. + + Returns: + dict: A dictionary containing the HLS Tcl script parameters. + + """ + # get the basename of the input file + file_basename = Path(input_file).name + logger.debug(f"basename Input file: {file_basename}") + + # get the base name of the input file without extension + file_rootname = input_file.split(".")[0] + logger.debug(f"Rootname Input file: {file_rootname}") + + # get the extension of the input file + file_suffix = input_file.split(".")[1] + logger.debug(f"Suffix Input file: {file_suffix}") + + config_dict = {section: dict(config[section]) for section in config.sections()} + logger.debug(f"config: {config_dict}") + # set the parameters for the Tcl script using default values + parameters = { + "top": file_rootname, + "part": config["DEFAULTS"]["part"], + "period": config["DEFAULTS"]["period"], + "files": file_basename, + "tb_files": f"{file_rootname}-top.{file_suffix}", + } + + # update each parameter if it is defined in the config file and + # its option exists + if id_tag is not None: + file_basename_id = f"{file_basename}:{id_tag}" + else: + file_basename_id = file_basename + + # handle the case where a value in the config file references another + # value using a colon separator. + if config.has_section(file_basename_id): + for key in parameters: + if config.has_option(file_basename_id, key): + # if value contains colon found with regex then use the referenced value + if re.search(r":", config[file_basename_id][key]): + referenced_value = config[file_basename_id][key].split(":") + parameters[key] = config[referenced_value[0]][referenced_value[1]] + else: + parameters[key] = config[file_basename][key] + + return parameters + + +def main(input_file, output_file, config_file, id_tag=None, impl=False): + """Main function to process input file and write to output file""" + + logging.debug( + f"Processing input file {input_file} and writing to output file {output_file}" + ) + + # assert that the input and config files exists + input_file_path = Path(input_file) + assert input_file_path.exists(), f"Input file {input_file} not found" + + config_file_path = Path(config_file) + assert config_file_path.exists(), f"Config file {config_file} not found" + + # read the config file + config = read_config_file(config_file) + + # get the script parameters + params = get_script_parameters(input_file, config, id_tag) + + write_script( + input_file, + output_file, + impl, + params, + ) + + output_file = Path(output_file) + assert output_file.exists(), f"Output file {output_file} not found" + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Process some files.") + parser.add_argument("-i", "--input", help="Input file name", required=True) + parser.add_argument("-o", "--output", help="Output file name", required=True) + # add optional verbose flag for logging with default value INFO and set to DEBUG if enabled. + parser.add_argument( + "-v", + "--verbose", + help="Increase output verbosity", + action="store_const", + const=logging.DEBUG, + default=logging.INFO, + ) + # add optional config ini file + parser.add_argument( + "-c", + "--config", + help="Config file name", + default="__hls_config__.ini", + ) + # add optional argument to specify id + parser.add_argument( + "-id", "--id", help="ID tag when accessing multiple versions", required=False + ) + # add optional argument to specify adding implementation to flow in Tcl script + parser.add_argument( + "-imp", + "--impl", + help="Include implementation in flow when generating Tcl script", + action="store_true", + default=False, + required=False, + ) + + args = parser.parse_args() + + logging.basicConfig(level=args.verbose) + + # add info log message of the python command line + logging.info(f"Running: {' '.join(sys.argv)}") + + main(args.input, args.output, args.config, args.id, args.impl) diff --git a/scripts/gen_template_report.py b/scripts/gen_template_report.py new file mode 100755 index 0000000..9aefd45 --- /dev/null +++ b/scripts/gen_template_report.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python3 +# +# disable logging-fstring-interpolation / W1203 +# pylint: disable=W1203 +# +# +""" +This script reads a yaml file and jinja2 template that accesses the data specified in the +yaml file and generates a file with the rendered version of the template. + +The jinja2 template file should access the data in the yaml file as a dictionary with the +keyword data, so, for example, + +{{ data.key }} + +in the jinja2 template file would access the value of the key in the yaml file specified as +key: value + +resulting in the rendered file containing + +value + +""" + +import os +import sys +import re +import argparse +import logging +import yaml + + +from jinja2 import Environment, FileSystemLoader + +logger = logging.getLogger(__name__) + + +def read_yaml_file(yaml_config_file): + """ + Read YAML config file and load into a dictionary + """ + with open(yaml_config_file, "r", encoding="utf-8") as stream: + try: + # data = yaml.safe_load(stream) + data = yaml.load(stream, Loader=yaml.FullLoader) + except yaml.YAMLError as exc: + print(exc) + sys.exit(1) + + logger.debug(f"YAML data: {data}") + return data + + +def get_asciidoc_report(data, template_jinja2_file): + """ + Generate ASCIIDOC report using Jinja2 template + """ + logger.debug(f"data: {data}") + env = Environment(loader=FileSystemLoader(os.path.dirname(template_jinja2_file))) + env.filters["default"] = lambda value, default: value if value else default + env.filters["first_item"] = lambda value, default: value[0] if value else default + env.filters["basename"] = lambda value: os.path.basename(value) + env.filters["dirname"] = lambda value: os.path.dirname(value) + env.filters["regex"] = lambda value, pattern, group=1: ( + re.search(pattern, value).group(group) if re.search(pattern, value) else "" + ) + template = env.get_template(os.path.basename(template_jinja2_file)) + + # dump contents of template file + # get contents of template file + with open(template_jinja2_file, "r", encoding="utf-8") as f: + template_contents = f.read() + + logger.debug(f"Jinja2 template file: {template_contents}") + + asciidoc_report = template.render(data=data) + logger.debug(f"ASCIIDOC report: {asciidoc_report}") + return asciidoc_report + + +def get_asciidoc_summary_all_header_file(data, filename="___summary_all_header.adoc"): + """ + Generate the header of the summary report in ASCIIDOC format. + + Parameters: + - data: The data dictionary containing the component information. + - filename: The name of the temporary file to store the header. + + Returns: + - The filename of the temporary file. + """ + # Create a temporary file to store the header of the summary report + with open(f"{filename}", "w", encoding="utf-8") as f: + f.write( + """ +:toc: preamble += HLS Component Status Report + +|=== +|Component Name | Errors | Warnings +{%- for key, value in data.items() %} +{%- set command = value.command[0] %} +{%- set script_name = command | regex('.*?(\\S+\\.tcl)') | basename %} +{%- set report_name = script_name | regex('(\\S+)\\.tcl') %} +| <<{{ report_name }}>> | {{ value.errors_count | default("0") }} | {{ value.warnings_count | default("0") }} +{%- endfor %} +|=== + +toc::[] + +""" + ) + return filename + + +def main(arg): + """ + Main function + """ + # read yaml config file + data = read_yaml_file(arg.input_file) + logger.debug(f"YAML data: {data}") + + logger.debug(f"Jinja2 template file: {arg.template_jinja2_file}") + + if arg.key == "all": + asciidoc_reports = [] + if arg.header: + asciidoc_reports.append(get_asciidoc_summary_all_header_file(data)) + for key in data: + data_key = data[key] + logger.debug(f"Data key: {data_key}") + + # generate ASCIIDOC report + asciidoc_report = get_asciidoc_report(data_key, arg.template_jinja2_file) + logger.debug(f"ASCIIDOC report: {asciidoc_report}") + + asciidoc_reports.append(asciidoc_report) + + asciidoc_report = "\n".join(asciidoc_reports) + else: + data_key = data[arg.key] + logger.debug(f"Data key: {data_key}") + # generate ASCIIDOC report + asciidoc_report = get_asciidoc_report(data_key, arg.template_jinja2_file) + logger.debug(f"ASCIIDOC report: {asciidoc_report}") + + # write ASCIIDOC report to output file + with open(arg.output_file, "w", encoding="utf-8") as f: + f.write(asciidoc_report) + + logger.info(f"ASCIIDOC report written to {arg.output_file}") + logger.debug(f"ASCIIDOC report: {asciidoc_report}") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate HLS component status report") + # add required argument with switches -i and --input_file to accept input file + parser.add_argument( + "-i", "--input_file", help="Input yaml data file", required=True + ) + # add required argument with switches -o and --output_file to accept output file + parser.add_argument( + "-o", "--output_file", help="Output ASCIIDOC file", required=True + ) + # add required argument with switches -y and --yaml_config_file to accept yaml config file + parser.add_argument( + "-j", + "--template_jinja2_file", + help="Jinja2 report template file", + required=True, + ) + # add argument with switches -y and --yaml_config_file to accept yaml config file + parser.add_argument( + "-k", + "--key", + help="generate report for component", + default="all", + ) + # add optional verbose argument + parser.add_argument( + "-v", + "--verbose", + help="Increase output verbosity", + action="store_const", + const=logging.DEBUG, + default=logging.INFO, + ) + # add optional boolean header argument to include header. If not specified, default to true + parser.add_argument( + "-hd", + "--header", + help="Include header", + type=bool, + default=False, + const=True, + nargs="?", + ) + + args = parser.parse_args() + + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + + # verify input file exists + if not os.path.exists(args.input_file): + print(f"Input file {args.input_file} does not exist") + sys.exit(1) + # verify template file exists + if not os.path.exists(args.template_jinja2_file): + print(f"Template file {args.template_jinja2_file} does not exist") + sys.exit(1) + + args = parser.parse_args() + + main(args) diff --git a/scripts/tests/conftest.py b/scripts/tests/conftest.py new file mode 100644 index 0000000..4641798 --- /dev/null +++ b/scripts/tests/conftest.py @@ -0,0 +1,19 @@ +import os +import sys + +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import pytest + + +@pytest.fixture +def test_file(): + """Generate a test file for the test cases.""" + # Setup: create a test file + with open("test_file.c", "w", encoding="utf-8") as file: + file.write("void test_function() {}\n") + file.write("int another_function() {}\n") + file.write("void test_function2() {}\n") + yield "test_file.c" + # Teardown: remove the test file + os.remove("test_file.c") diff --git a/scripts/tests/test_gen_hls_component_log_data.py b/scripts/tests/test_gen_hls_component_log_data.py new file mode 100644 index 0000000..5626465 --- /dev/null +++ b/scripts/tests/test_gen_hls_component_log_data.py @@ -0,0 +1,352 @@ +""" +This module contains unit tests for the functions in the gen_hls_component_compile_report module. + +The gen_hls_component_compile_report module provides functions for reading input files, parsing YAML configuration files, +and splitting input file strings into sections. + +The unit tests in this module verify the correctness of these functions by creating temporary files, writing data to them, +and then using the functions to read and process the data. + +""" + +import os +import tempfile +import logging +import pytest +import yaml + +from gen_hls_component_log_data import * + +# get script directory +script_dir = os.path.dirname(os.path.realpath(__file__)) + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + +# get script directory +script_dir = os.path.dirname(os.path.realpath(__file__)) + + +def test_read_input_file(): + """ + Test case for the read_input_file function. + + This test case creates a temporary file, writes some data to it, and then uses the + read_input_file function to read the data from the file. + It checks that the data read from the file is correct. + + """ + # Create a temporary file and write some data to it + with tempfile.NamedTemporaryFile(delete=False, mode="w", encoding="utf-8") as f: + f.write("Test data") + temp_file_name = f.name + + # Use the function to read the data from the file + data = read_input_file(temp_file_name) + + # Check that the data is correct + assert data == "Test data" + + +def test_read_yaml_config(): + """ + Test case for the read_yaml_config function. + + This test case creates a temporary file, writes some data to it using the yaml.dump function, + and then uses the read_yaml_config function to read the data from the file. It checks that + the data read from the file is correct. + + Returns: + None + """ + # Create a temporary file and write some data to it + with tempfile.NamedTemporaryFile(delete=False, mode="w", encoding="utf-8") as f: + yaml.dump({"key": "value"}, f) + temp_file_name = f.name + + # Use the function to read the data from the file + data = read_yaml_config(temp_file_name) + + # Check that the data is correct + assert data == {"key": "value"} + + +def test_get_hls_compiler_type(): + """ + Test function for the get_hls_compiler_type function. + + This function tests the get_hls_compiler_type function by providing different input data and + checking if the returned result matches the expected output. + + """ + + # Define the input data and the expected output + input_data = "vivado_hls" + yaml_data = { + "hls_compiler_type": { + "vivado_hls": {"pattern": "vivado_hls"}, + "vitis_hls": {"pattern": "vitis_hls"}, + } + } + expected_output = "vivado_hls" + + # Call the function with the input data + result = get_hls_compiler_type(input_data, yaml_data) + + # Assert that the result is as expected + assert result == expected_output + + input_data = "vitis_hls" + + expected_output = "vitis_hls" + + # Call the function with the input data + result = get_hls_compiler_type(input_data, yaml_data) + + # Assert that the result is as expected + assert result == expected_output + + +def test_split_input_file_string_into_sections(): + # Create a string that simulates the input data + input_data = """ +Makefile:35: block_fir-top.log fir-top.log matrix_vector_base-top.log matrix_vector_optimized-top.log matrix_vector_unroll_inner2-top.log video_2dfilter_boundary_condition-top.log video_2dfilter_linebuffer_extended_constant-top.log video_2dfilter_linebuffer_extended-top.log video_2dfilter_linebuffer-top.log video_2dfilter-top.log video_simple-top.log cordic_fixed-top.log cordic-top.log fft_stages_loop-top.log fft_stages-top.log fft_sw-top.log histogram_dependence-top.log histogram_opt1-top.log histogram_parallel-top.log insertion_cell_sort-top.log insertion_sort_parallel-top.log insertion_sort-top.log matrixmultiplication-top.log merge_sort_loop_merged-top.log merge_sort_parallel-top.log merge_sort_restructured-top.log merge_sort-top.log prefixsumBO-top.log prefixsumHW-top.log prefixsum_optimized-top.log prefixsumSW-top.log spmv2_restructured-top.log spmv2-top.log spmv_restructured-top.log spmv-top.log spmv_unrolled-top.log +/proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/../scripts/gen_vivado_hls_csynth_script.py -c /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/__hls_config__.ini -i block_fir.c -o block_fir.tcl +INFO:root:Running: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/../scripts/gen_vivado_hls_csynth_script.py -c /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/__hls_config__.ini -i block_fir.c -o block_fir.tcl +vitis-run --mode hls --tcl block_fir.tcl + +****** vitis-run v2024.1 (64-bit) + **** SW Build 5074859 on 2024-05-20-23:21:20 + **** Start of session at: Thu May 23 12:30:52 2024 + ** Copyright 1986-2022 Xilinx, Inc. All Rights Reserved. + ** Copyright 2022-2024 Advanced Micro Devices, Inc. All Rights Reserved. +INFO: [HLS 200-112] Total CPU user time: 7.08 seconds. Total CPU system time: 2.01 seconds. Total elapsed time: 16.53 seconds; peak allocated memory: 373.109 MB. +INFO: [vitis-run 60-791] Total elapsed time: 0h 0m 18s +INFO: [vitis-run 60-1662] Stopping dispatch session having empty uuid. +INFO:root:Running: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/../scripts/gen_vivado_hls_csynth_script.py -c /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/__hls_config__.ini -i fir.c -o fir.tcl +vitis-run --mode hls --tcl fir.tcl + +****** vitis-run v2024.1 (64-bit) + **** SW Build 5074859 on 2024-05-20-23:21:20 + **** Start of session at: Thu May 23 12:31:12 2024 + ** Copyright 1986-2022 Xilinx, Inc. All Rights Reserved. + ** Copyright 2022-2024 Advanced Micro Devices, Inc. All Rights Reserved. + +INFO: [vitis-run 82-31] Launching vitis_hls: vitis_hls -nolog -run tcl -f /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.tcl -work_dir /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples +INFO: [HLS 200-2161] Finished Command csynth_design Elapsed time: 00:00:10; Allocated memory: 47.531 MB. +INFO: [HLS 200-112] Total CPU user time: 6.83 seconds. Total CPU system time: 1.86 seconds. Total elapsed time: 15.29 seconds; peak allocated memory: 381.781 MB. +INFO: [vitis-run 60-791] Total elapsed time: 0h 0m 16s +INFO: [vitis-run 60-1662] Stopping dispatch session having empty uuid. + +****** Vitis HLS - High-Level Synthesis from C, C++ and OpenCL v2024.1 (64-bit) + """ + + config_data = read_yaml_config( + os.path.join(script_dir, "..", "__hls_csynth_log_data_extract__.yml") + ) + + # Use the function to split the input data into sections + sections = split_input_file_string_into_sections(input_data, config_data) + + # Check that the sections are correct + assert sections[0][0] == "block_fir" + assert len(sections[0]) == 11 + assert sections[1][0] == "fir" + assert len(sections[1]) == 14 + + +def test_split_input_file_into_sections(): + """ + Test function for splitting an input file into sections. + + This function reads a "real" input file and a default csynth configuration file, and then splits + the input file into sections based on the configuration data. It asserts that the number of + sections is equal to 47, and that each section has a length greater than 1. + """ + input_file_string = read_input_file(os.path.join(script_dir, "make_hls.log")) + config_data = read_yaml_config( + os.path.join(script_dir, "..", "__hls_csynth_log_data_extract__.yml") + ) + + sections = split_input_file_string_into_sections(input_file_string, config_data) + + assert len(sections) == 47 + + for each_section in sections: + assert len(each_section) > 1 + + +def test_extract_component_log_data(): + # input_file_string = read_input_file(os.path.join(script_dir, "make_hls.log")) + input_file_string = """ +/proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/../scripts/gen_vivado_hls_csynth_script.py -c /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/__hls_config__.ini -i fir.c -o fir.tcl +INFO:root:Running: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/../scripts/gen_vivado_hls_csynth_script.py -c /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/__hls_config__.ini -i fir.c -o fir.tcl +vitis-run --mode hls --tcl fir.tcl + +****** vitis-run v2024.1 (64-bit) + **** SW Build 5074859 on 2024-05-20-23:21:20 + **** Start of session at: Thu May 23 12:31:12 2024 + ** Copyright 1986-2022 Xilinx, Inc. All Rights Reserved. + ** Copyright 2022-2024 Advanced Micro Devices, Inc. All Rights Reserved. + +INFO: [vitis-run 82-31] Launching vitis_hls: vitis_hls -nolog -run tcl -f /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.tcl -work_dir /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples + +****** Vitis HLS - High-Level Synthesis from C, C++ and OpenCL v2024.1 (64-bit) + **** SW Build 5069499 on May 21 2024 + **** IP Build 5075265 on Wed May 22 21:45:21 MDT 2024 + **** SharedData Build 5076995 on Wed May 22 18:29:18 MDT 2024 + **** Start of session at: Thu May 23 12:31:14 2024 + ** Copyright 1986-2022 Xilinx, Inc. All Rights Reserved. + ** Copyright 2022-2024 Advanced Micro Devices, Inc. All Rights Reserved. + +source /proj/xbuilds/SWIP/2024.1_0522_2023/installs/lin64/Vitis_HLS/2024.1/scripts/vitis_hls/hls.tcl -notrace +INFO: [HLS 200-10] For user 'mpettigr' on host 'xsjapps59' (Linux_x86_64 version 5.15.0-83-generic) on Thu May 23 12:31:16 PDT 2024 +INFO: [HLS 200-10] On os Ubuntu 22.04.3 LTS +INFO: [HLS 200-10] In directory '/proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples' +Sourcing Tcl script '/proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.tcl' +INFO: [HLS 200-1510] Running: open_project fir.proj -reset +INFO: [HLS 200-10] Creating and opening project '/proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj'. +INFO: [HLS 200-1510] Running: add_files fir.c +INFO: [HLS 200-10] Adding design file 'fir.c' to the project +INFO: [HLS 200-1510] Running: add_files -tb fir-top.c +INFO: [HLS 200-10] Adding test bench file 'fir-top.c' to the project +INFO: [HLS 200-1510] Running: set_top fir +INFO: [HLS 200-1510] Running: open_solution solution -reset +INFO: [HLS 200-10] Creating and opening solution '/proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution'. +INFO: [HLS 200-10] Cleaning up the solution database. +INFO: [HLS 200-1505] Using default flow_target 'vivado' +Resolution: For help on HLS 200-1505 see docs.xilinx.com/access/sources/dita/topic?Doc_Version=2024.1%20English&url=ug1448-hls-guidance&resourceid=200-1505.html +INFO: [HLS 200-1510] Running: set_part virtex7 +INFO: [HLS 200-1611] Setting target device to 'xc7v585t-ffg1761-2' +INFO: [HLS 200-1510] Running: create_clock -period 5 +INFO: [SYN 201-201] Setting up clock 'default' with a period of 5ns. +INFO: [HLS 200-1510] Running: csynth_design +INFO: [HLS 200-111] Finished File checks and directory preparation: CPU user time: 0.08 seconds. CPU system time: 0 seconds. Elapsed time: 0.09 seconds; current allocated memory: 334.250 MB. +INFO: [HLS 200-10] Analyzing design file 'fir.c' ... +INFO: [HLS 200-111] Finished Source Code Analysis and Preprocessing: CPU user time: 0.29 seconds. CPU system time: 0.65 seconds. Elapsed time: 1.47 seconds; current allocated memory: 335.816 MB. +INFO: [HLS 200-777] Using interface defaults for 'Vivado' flow target. +INFO: [HLS 200-1995] There were 37 instructions in the design after the 'Compile/Link' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 34 instructions in the design after the 'Unroll/Inline (step 1)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 27 instructions in the design after the 'Unroll/Inline (step 2)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 24 instructions in the design after the 'Unroll/Inline (step 3)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 24 instructions in the design after the 'Unroll/Inline (step 4)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 24 instructions in the design after the 'Array/Struct (step 1)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 24 instructions in the design after the 'Array/Struct (step 2)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 24 instructions in the design after the 'Array/Struct (step 3)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 24 instructions in the design after the 'Array/Struct (step 4)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 25 instructions in the design after the 'Array/Struct (step 5)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 25 instructions in the design after the 'Performance (step 1)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 24 instructions in the design after the 'Performance (step 2)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 37 instructions in the design after the 'Performance (step 3)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 36 instructions in the design after the 'Performance (step 4)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 42 instructions in the design after the 'HW Transforms (step 1)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 200-1995] There were 50 instructions in the design after the 'HW Transforms (step 2)' phase of compilation. See the Design Size Report for more details: /proj/xsjhdstaff2/mpettigr/projects/hls_book/pp4fpgas/examples/fir.proj/solution/syn/report/csynth_design_size.rpt +INFO: [HLS 214-376] automatically set the pipeline for Loop< VITIS_LOOP_8_1> at fir.c:8:18 +INFO: [HLS 214-376] automatically set the pipeline for Loop< VITIS_LOOP_13_2> at fir.c:13:19 +INFO: [HLS 214-421] Automatically partitioning small array 'fir.delay_line' completely based on array size. (fir.c:5:0) +INFO: [HLS 214-270] Inferring pragma 'array_partition type=complete dim=1' for array 'fir.delay_line' due to pipeline pragma (fir.c:5:0) +INFO: [HLS 214-248] Applying array_partition to 'fir.delay_line': Complete partitioning on dimension 1. (fir.c:5:0) +INFO: [HLS 200-111] Finished Compiling Optimization and Transform: CPU user time: 2.09 seconds. CPU system time: 0.5 seconds. Elapsed time: 7.3 seconds; current allocated memory: 345.379 MB. +INFO: [HLS 200-111] Finished Checking Pragmas: CPU user time: 0 seconds. CPU system time: 0 seconds. Elapsed time: 0 seconds; current allocated memory: 345.379 MB. +INFO: [HLS 200-10] Starting code transformations ... +INFO: [HLS 200-111] Finished Standard Transforms: CPU user time: 0.01 seconds. CPU system time: 0.01 seconds. Elapsed time: 0.02 seconds; current allocated memory: 345.496 MB. +INFO: [HLS 200-10] Checking synthesizability ... +INFO: [HLS 200-111] Finished Checking Synthesizability: CPU user time: 0.01 seconds. CPU system time: 0 seconds. Elapsed time: 0.01 seconds; current allocated memory: 345.496 MB. +INFO: [HLS 200-111] Finished Loop, function and other optimizations: CPU user time: 0.03 seconds. CPU system time: 0 seconds. Elapsed time: 0.03 seconds; current allocated memory: 367.074 MB. +INFO: [HLS 200-111] Finished Architecture Synthesis: CPU user time: 0.03 seconds. CPU system time: 0 seconds. Elapsed time: 0.03 seconds; current allocated memory: 376.492 MB. +INFO: [HLS 200-10] Starting hardware synthesis ... +INFO: [HLS 200-10] Synthesizing 'fir' ... +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [HLS 200-42] -- Implementing module 'fir_Pipeline_VITIS_LOOP_8_1' +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [SCHED 204-11] Starting scheduling ... +INFO: [SCHED 204-61] Pipelining loop 'VITIS_LOOP_8_1'. +INFO: [HLS 200-1470] Pipelining result : Target II = NA, Final II = 1, Depth = 1, loop 'VITIS_LOOP_8_1' +INFO: [SCHED 204-11] Finished scheduling. +INFO: [HLS 200-111] Finished Scheduling: CPU user time: 0.03 seconds. CPU system time: 0.03 seconds. Elapsed time: 0.09 seconds; current allocated memory: 377.453 MB. +INFO: [BIND 205-100] Starting micro-architecture generation ... +INFO: [BIND 205-101] Performing variable lifetime analysis. +INFO: [BIND 205-101] Exploring resource sharing. +INFO: [BIND 205-101] Binding ... +INFO: [BIND 205-100] Finished micro-architecture generation. +INFO: [HLS 200-111] Finished Binding: CPU user time: 0.01 seconds. CPU system time: 0.01 seconds. Elapsed time: 0.03 seconds; current allocated memory: 377.453 MB. +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [HLS 200-42] -- Implementing module 'fir_Pipeline_VITIS_LOOP_13_2' +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [SCHED 204-11] Starting scheduling ... +INFO: [SCHED 204-61] Pipelining loop 'VITIS_LOOP_13_2'. +INFO: [HLS 200-1470] Pipelining result : Target II = NA, Final II = 1, Depth = 8, loop 'VITIS_LOOP_13_2' +INFO: [SCHED 204-11] Finished scheduling. +INFO: [HLS 200-111] Finished Scheduling: CPU user time: 0.04 seconds. CPU system time: 0 seconds. Elapsed time: 0.06 seconds; current allocated memory: 377.453 MB. +INFO: [BIND 205-100] Starting micro-architecture generation ... +INFO: [BIND 205-101] Performing variable lifetime analysis. +INFO: [BIND 205-101] Exploring resource sharing. +INFO: [BIND 205-101] Binding ... +INFO: [BIND 205-100] Finished micro-architecture generation. +INFO: [HLS 200-111] Finished Binding: CPU user time: 0.02 seconds. CPU system time: 0.01 seconds. Elapsed time: 0.03 seconds; current allocated memory: 377.453 MB. +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [HLS 200-42] -- Implementing module 'fir' +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [SCHED 204-11] Starting scheduling ... +INFO: [SCHED 204-11] Finished scheduling. +INFO: [HLS 200-111] Finished Scheduling: CPU user time: 0.03 seconds. CPU system time: 0 seconds. Elapsed time: 0.04 seconds; current allocated memory: 377.453 MB. +INFO: [BIND 205-100] Starting micro-architecture generation ... +INFO: [BIND 205-101] Performing variable lifetime analysis. +INFO: [BIND 205-101] Exploring resource sharing. +INFO: [BIND 205-101] Binding ... +INFO: [BIND 205-100] Finished micro-architecture generation. +INFO: [HLS 200-111] Finished Binding: CPU user time: 0.03 seconds. CPU system time: 0.05 seconds. Elapsed time: 0.09 seconds; current allocated memory: 377.453 MB. +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [HLS 200-10] -- Generating RTL for module 'fir_Pipeline_VITIS_LOOP_8_1' +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [RTGEN 206-100] Generating core module 'sparsemux_7_2_32_1_1': 1 instance(s). +INFO: [RTGEN 206-100] Finished creating RTL model for 'fir_Pipeline_VITIS_LOOP_8_1'. +INFO: [HLS 200-111] Finished Creating RTL model: CPU user time: 0.03 seconds. CPU system time: 0 seconds. Elapsed time: 0.05 seconds; current allocated memory: 377.453 MB. +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [HLS 200-10] -- Generating RTL for module 'fir_Pipeline_VITIS_LOOP_13_2' +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [HLS 200-1030] Apply Unified Pipeline Control on module 'fir_Pipeline_VITIS_LOOP_13_2' pipeline 'VITIS_LOOP_13_2' pipeline type 'loop pipeline' +INFO: [RTGEN 206-100] Generating core module 'mul_32s_32s_32_5_1': 1 instance(s). +INFO: [RTGEN 206-100] Generating core module 'sparsemux_9_2_32_1_1': 1 instance(s). +INFO: [RTGEN 206-100] Finished creating RTL model for 'fir_Pipeline_VITIS_LOOP_13_2'. +INFO: [HLS 200-111] Finished Creating RTL model: CPU user time: 0.05 seconds. CPU system time: 0.01 seconds. Elapsed time: 0.1 seconds; current allocated memory: 377.453 MB. +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [HLS 200-10] -- Generating RTL for module 'fir' +INFO: [HLS 200-10] ---------------------------------------------------------------- +INFO: [RTGEN 206-500] Setting interface mode on port 'fir/input_r' to 'ap_none'. +INFO: [RTGEN 206-500] Setting interface mode on port 'fir/output_r' to 'ap_vld'. +INFO: [RTGEN 206-500] Setting interface mode on port 'fir/taps' to 'ap_memory'. +INFO: [RTGEN 206-500] Setting interface mode on function 'fir' to 'ap_ctrl_hs'. +WARNING: [RTGEN 206-101] Register 'fir_delay_line_0' is power-on initialization. +WARNING: [RTGEN 206-101] Register 'fir_delay_line_1' is power-on initialization. +WARNING: [RTGEN 206-101] Register 'fir_delay_line_2' is power-on initialization. +WARNING: [RTGEN 206-101] Register 'fir_delay_line_3' is power-on initialization. +INFO: [RTGEN 206-100] Finished creating RTL model for 'fir'. +INFO: [HLS 200-111] Finished Creating RTL model: CPU user time: 0.06 seconds. CPU system time: 0.01 seconds. Elapsed time: 0.1 seconds; current allocated memory: 377.453 MB. +INFO: [HLS 200-111] Finished Generating all RTL models: CPU user time: 0.17 seconds. CPU system time: 0.03 seconds. Elapsed time: 0.32 seconds; current allocated memory: 380.082 MB. +INFO: [HLS 200-111] Finished Updating report files: CPU user time: 0.21 seconds. CPU system time: 0.03 seconds. Elapsed time: 0.31 seconds; current allocated memory: 381.656 MB. +INFO: [VHDL 208-304] Generating VHDL RTL for fir. +INFO: [VLOG 209-307] Generating Verilog RTL for fir. +INFO: [HLS 200-790] **** Loop Constraint Status: All loop constraints were satisfied. +INFO: [HLS 200-789] **** Estimated Fmax: 353.48 MHz +INFO: [HLS 200-2161] Finished Command csynth_design Elapsed time: 00:00:10; Allocated memory: 47.531 MB. +INFO: [HLS 200-112] Total CPU user time: 6.83 seconds. Total CPU system time: 1.86 seconds. Total elapsed time: 15.29 seconds; peak allocated memory: 381.781 MB. +INFO: [vitis-run 60-791] Total elapsed time: 0h 0m 16s +INFO: [vitis-run 60-1662] Stopping dispatch session having empty uuid. +""" + config_data = read_yaml_config( + os.path.join(script_dir, "..", "__hls_csynth_log_data_extract__.yml") + ) + + sections = split_input_file_string_into_sections(input_file_string, config_data) + + hls_compiler_type = get_hls_compiler_type(sections[0], config_data) + logger.debug(f"hls_compiler_type: {hls_compiler_type}") + result = extract_component_log_data(hls_compiler_type, sections[0], config_data) + + logger.debug(f"result: {result}") + logger.debug(f"result['version']: {result['version']}") + + # Check if the function correctly extracted the data + assert result["version"][0].__contains__("2024.1") + assert result["command"][0].__contains__("vitis_hls") diff --git a/scripts/tests/test_gen_hls_csynth_script.py b/scripts/tests/test_gen_hls_csynth_script.py new file mode 100644 index 0000000..b2de449 --- /dev/null +++ b/scripts/tests/test_gen_hls_csynth_script.py @@ -0,0 +1,72 @@ +import os +import textwrap +from gen_hls_csynth_script import * + +import tempfile +from configparser import ConfigParser + +import logging + +logging.basicConfig(level=logging.DEBUG) +logger = logging.getLogger(__name__) + + +def test_read_config_file(): + """ + Test the read_config_file function. + """ + # Create a temporary configuration file + with tempfile.NamedTemporaryFile(mode="w", delete=False) as tmpfile: + config_content = "[DEFAULTS]\npart = xc7z010clg400-1\nperiod = 10\n" + tmpfile.write(config_content) + tmpfile.flush() + + # Call the function with the temporary file + config = read_config_file(tmpfile.name) + + # Check that the config object has the expected content + assert config.get("DEFAULTS", "part") == "xc7z010clg400-1" + assert config.get("DEFAULTS", "period") == "10" + + +def test_write_csynth_script(): + """ + Test the write_csynth_script function. + """ + # Define parameters + input_file = "test_input.cpp" + params = { + "top": "test_top", + "part": "xc7z010clg400-1", + "period": "10", + "files": "test_input.cpp", + "tb_files": "none", + } + + # Create a temporary output file + with tempfile.NamedTemporaryFile(mode="w", delete=False) as tmpfile: + output_file = tmpfile.name + + # Call the function with the parameters and temporary output file + write_csynth_script(input_file, output_file, "", params) + + # Read the content of the output file + with open(output_file, "r") as file: + content = file.read() + + logger.debug(content) + # Define the expected script content + expected_content = """\ + open_project test_input.proj -reset + add_files [list test_input.cpp] + + set_top test_top + puts "Running: set_top test_top" + open_solution solution -reset + set_part xc7z010clg400-1 + puts "Running: set_part xc7z010clg400-1" + create_clock -period 10 + csynth_design + exit""" + # Check that the content matches the expected script + assert content.strip() == textwrap.dedent(expected_content) diff --git a/scripts/tests/test_gen_template_report.py b/scripts/tests/test_gen_template_report.py new file mode 100644 index 0000000..38fc736 --- /dev/null +++ b/scripts/tests/test_gen_template_report.py @@ -0,0 +1,97 @@ +import os +import subprocess +import pytest +from jinja2 import Environment, FileSystemLoader + +from gen_template_report import get_asciidoc_report + +# get script directory +script_dir = os.path.dirname(os.path.realpath(__file__)) + + +def test_get_asciidoc_report(): + """ + Test the get_asciidoc_report function. + + This function tests the functionality of the get_asciidoc_report function by + creating a mock jinja2 environment and template, generating a template + file using the mock data, and then calling the get_asciidoc_report function + with the mock data and template. The result is then asserted to be equal to + the expected value. + """ + + # Mock data and template + data = {"key": "value"} + template_file = "template.jinja2" + + # Create a mock jinja2 environment and template + env = Environment(loader=FileSystemLoader(".")) + template = env.from_string("{{ key }}") + template.stream(data).dump(template_file) + + # Call the function with the mock data and template + result = get_asciidoc_report(data, template_file) + + # Assert the result is as expected + assert result == "value" + + # Cleanup + os.remove(template_file) + + +def test_cli(): + """ + Test the command-line interface of the gen_template_report script. + + This function performs the following steps: + 1. Creates a temporary test input yaml file. + 2. Creates a temporary jinja2 template file to render from the input yaml file data. + 3. Runs the gen_template_report script with some input. + 4. Checks the exit code of the script. + 5. Verifies that the output file contains the expected content. + 6. Cleans up the temporary files. + + Raises: + AssertionError: If the exit code is not 0 or the output file does not contain + the expected content. + """ + # create a temporary test input yaml file + with open("input.yaml", "w", encoding="utf-8") as f: + f.write("foo:\n") + f.write(" key: value") + + # create a temporary jinja2 template file + # to render from the input yaml file data + with open("template.j2", "w", encoding="utf-8") as f: + f.write("{{ data.key }}") + + # Run the script with some input + result = subprocess.run( + [ + "python3", + os.path.join(script_dir, "..", "gen_template_report.py"), + "-i", + "input.yaml", + "-o", + "output.adoc", + "-j", + "template.j2", + "-v", + ], + capture_output=True, + text=True, + check=True, # Add the check argument + ) + + # Check the exit code + assert result.returncode == 0 + + # check the contents of the output file + # verify that the output file contains the expected content + with open("output.adoc", encoding="utf-8") as f: + assert f.read() == "value" + + # Cleanup + os.remove("input.yaml") + os.remove("template.j2") + os.remove("output.adoc") diff --git a/scripts/video_2dfilter_opencv.cpp b/scripts/video_2dfilter_opencv.cpp new file mode 100644 index 0000000..3ce46c9 --- /dev/null +++ b/scripts/video_2dfilter_opencv.cpp @@ -0,0 +1,108 @@ +#include +#include + +using namespace cv; +using namespace std; + +void zeroOutEdges(cv::Mat& image) { + // Assuming image is a valid cv::Mat object + int thickness = 1; // Thickness of the edge to be zeroed out + + // Set top edge to zero + cv::rectangle(image, cv::Point(0, 0), cv::Point(image.cols, thickness - 1), cv::Scalar(0, 0, 0), cv::FILLED); + + // Set bottom edge to zero + cv::rectangle(image, cv::Point(0, image.rows - thickness), cv::Point(image.cols, image.rows), cv::Scalar(0, 0, 0), cv::FILLED); + + // Set left edge to zero + cv::rectangle(image, cv::Point(0, 0), cv::Point(thickness - 1, image.rows), cv::Scalar(0, 0, 0), cv::FILLED); + + // Set right edge to zero + cv::rectangle(image, cv::Point(image.cols - thickness, 0), cv::Point(image.cols, image.rows), cv::Scalar(0, 0, 0), cv::FILLED); +} + +void applyCustom2DFilter(const Mat& inputImage, Mat& outputImage, cv::BorderTypes borderType, int borderConstantValue) { + // Define the kernel. Note that OpenCV expects a floating point matrix for the filter2D function. + Mat kernel = (Mat_(3,3) << 1, 2, 1, + 2, 4, 2, + 1, 2, 1); + // Normalize the kernel to ensure the brightness of the output image is similar to the input image. + kernel = kernel / 16.0; + + // Apply the custom 2D filter + if (borderType == cv::BORDER_CONSTANT) { + filter2D(inputImage, outputImage, -1, kernel, Point(-1, -1), borderConstantValue, borderType); + } else { + filter2D(inputImage, outputImage, -1, kernel, Point(-1, -1), 0, borderType); + } +} + +int main(int argc, char** argv) { + cv::BorderTypes borderType = cv::BORDER_DEFAULT; + int borderConstantValue = 0; + + if (argc < 3 or argc > 6) { + cout << "Usage: " << argv[0] << " [] []" << endl; + return -1; + } + + // If a border type is provided, use it. Otherwise, use the default border type. + // Convert the string to cv::BorderTypes value. + if (argc == 4) { + string borderTypeString = argv[3]; + if (borderTypeString == "BORDER_CONSTANT") { + borderType = cv::BORDER_CONSTANT; + if (argc == 5) { + // convert argv[4] to an integer + borderConstantValue = atoi(argv[4]); + } else { + borderConstantValue = 0; + } + } else if (borderTypeString == "BORDER_REPLICATE") { + borderType = cv::BORDER_REPLICATE; + } else if (borderTypeString == "BORDER_REFLECT") { + borderType = cv::BORDER_REFLECT; + } else if (borderTypeString == "BORDER_REFLECT_101") { + borderType = cv::BORDER_REFLECT_101; + } else if (borderTypeString == "BORDER_TRANSPARENT") { + borderType = cv::BORDER_TRANSPARENT; + } else if (borderTypeString == "BORDER_REFLECT101") { + borderType = cv::BORDER_REFLECT_101; + } else if (borderTypeString == "BORDER_DEFAULT") { + borderType = cv::BORDER_DEFAULT; + } else if (borderTypeString == "BORDER_ISOLATED") { + borderType = cv::BORDER_ISOLATED; + } else if (borderTypeString == "BORDER_WRAP") { + borderType = cv::BORDER_WRAP; + } else { + cout << "Invalid border type. Using default border type." << endl; + } + } + + // Read the image file + Mat image = imread(argv[1]); + if (image.empty()) { + cout << "Could not open or find the image" << endl; + return -1; + } + + Mat filteredImage; + applyCustom2DFilter(image, filteredImage, borderType, borderConstantValue); + + // Zero out the edges of the filtered image + // This is done to ignore any differences in the edges of the images + // The opencv 2d filter can filter up to the edge of the image. The HLS + // Book example skips this edge case. + // + //zeroOutEdges(filteredImage); + + // Write the filtered image to disk + bool writeSuccess = imwrite(argv[2], filteredImage); + if (!writeSuccess) { + cout << "Failed to write the image" << endl; + return -1; + } + + cout << "Image processed and saved successfully" << endl; + return 0; +} \ No newline at end of file