Skip to content

Commit

Permalink
Add support for quantized_bits(keep_neg=False) and quantized_relu(slo…
Browse files Browse the repository at this point in the history
…pe=0)
  • Loading branch information
Aba committed Sep 17, 2023
1 parent 3adc71f commit 3bca141
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 11 deletions.
4 changes: 2 additions & 2 deletions c/model.h
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#define N_BUNDLES 7
Bundle_t bundles [N_BUNDLES] = {
{.n=8, .l=2, .kw=11, .coe=2, .coe_tl=2, .r_ll=8, .h=16, .w=8, .w_kw2=3, .t=8, .p=3, .cm=1, .cm_p0=1, .w_bpt=140, .w_bpt_p0=140, .x_bpt=840, .x_bpt_p0=840, .is_bias=1, .b_offset=0, .b_val_shift=9, .b_bias_shift=0, .ca_nzero=1, .ca_shift=15, .ca_pl_scale=3, .x_header=414341061322735616, .x_header_p0=414341061322735616, .w_header=414587437826703360, .w_header_p0=414341061322735616 },
{.n=8, .l=2, .kw=11, .coe=2, .coe_tl=2, .r_ll=8, .h=16, .w=8, .w_kw2=3, .t=8, .p=3, .cm=1, .cm_p0=1, .w_bpt=140, .w_bpt_p0=140, .x_bpt=840, .x_bpt_p0=840, .is_bias=1, .b_offset=0, .b_val_shift=9, .b_bias_shift=0, .ca_nzero=0, .ca_shift=12, .ca_pl_scale=0, .x_header=414341061322735616, .x_header_p0=414341061322735616, .w_header=414587437826703360, .w_header_p0=414341061322735616 },
{.n=8, .l=2, .kw=1, .coe=24, .coe_tl=0, .r_ll=8, .h=16, .w=8, .w_kw2=8, .t=1, .p=1, .cm=20, .cm_p0=16, .w_bpt=200, .w_bpt_p0=200, .x_bpt=13320, .x_bpt_p0=13320, .is_bias=0, .b_offset=16, .b_val_shift=0, .b_bias_shift=0, .ca_nzero=1, .ca_shift=3, .ca_pl_scale=0, .x_header=8700964375684448256, .x_header_p0=8700964375684448256, .w_header=8701210795138088960, .w_header_p0=8700964375684448256 },
{.n=8, .l=2, .kw=7, .coe=3, .coe_tl=4, .r_ll=8, .h=16, .w=8, .w_kw2=5, .t=6, .p=8, .cm=2, .cm_p0=2, .w_bpt=176, .w_bpt_p0=176, .x_bpt=1672, .x_bpt_p0=1672, .is_bias=1, .b_offset=16, .b_val_shift=9, .b_bias_shift=0, .ca_nzero=1, .ca_shift=12, .ca_pl_scale=0, .x_header=846686625550303232, .x_header_p0=846686625550303232, .w_header=846933027824074752, .w_header_p0=846686625550303232 },
{.n=8, .l=2, .kw=5, .coe=4, .coe_tl=4, .r_ll=8, .h=16, .w=8, .w_kw2=6, .t=4, .p=4, .cm=4, .cm_p0=4, .w_bpt=248, .w_bpt_p0=248, .x_bpt=3336, .x_bpt_p0=3336, .is_bias=0, .b_offset=34, .b_val_shift=0, .b_bias_shift=0, .ca_nzero=1, .ca_shift=6, .ca_pl_scale=3, .x_header=1927550536119222272, .x_header_p0=1927550536119222272, .w_header=1927796989932601344, .w_header_p0=1927550536119222272 },
{.n=8, .l=2, .kw=3, .coe=8, .coe_tl=8, .r_ll=8, .h=16, .w=8, .w_kw2=7, .t=3, .p=3, .cm=6, .cm_p0=4, .w_bpt=224, .w_bpt_p0=152, .x_bpt=5000, .x_bpt_p0=3336, .is_bias=1, .b_offset=34, .b_val_shift=9, .b_bias_shift=0, .ca_nzero=1, .ca_shift=15, .ca_pl_scale=3, .x_header=3008414446688141312, .x_header_p0=1855492942081294336, .w_header=3008660883321651200, .w_header_p0=1855492942081294336 },
{.n=8, .l=2, .kw=3, .coe=8, .coe_tl=8, .r_ll=8, .h=16, .w=8, .w_kw2=7, .t=3, .p=3, .cm=6, .cm_p0=4, .w_bpt=224, .w_bpt_p0=152, .x_bpt=5000, .x_bpt_p0=3336, .is_bias=1, .b_offset=34, .b_val_shift=9, .b_bias_shift=0, .ca_nzero=0, .ca_shift=12, .ca_pl_scale=0, .x_header=3008414446688141312, .x_header_p0=1855492942081294336, .w_header=3008660883321651200, .w_header_p0=1855492942081294336 },
{.n=8, .l=2, .kw=1, .coe=24, .coe_tl=2, .r_ll=8, .h=16, .w=8, .w_kw2=8, .t=3, .p=2, .cm=20, .cm_p0=4, .w_bpt=248, .w_bpt_p0=56, .x_bpt=16648, .x_bpt_p0=3336, .is_bias=0, .b_offset=58, .b_val_shift=0, .b_bias_shift=0, .ca_nzero=1, .ca_shift=6, .ca_pl_scale=3, .x_header=11006807384898142208, .x_header_p0=1783435348043366400, .w_header=11007053838711521280, .w_header_p0=1783435348043366400 },
{.n=1, .l=1, .kw=1, .coe=24, .coe_tl=0, .r_ll=8, .h=8, .w=1, .w_kw2=1, .t=1, .p=320, .cm=20, .cm_p0=20, .w_bpt=248, .w_bpt_p0=248, .x_bpt=138, .x_bpt_p0=138, .is_bias=1, .b_offset=58, .b_val_shift=9, .b_bias_shift=0, .ca_nzero=1, .ca_shift=15, .ca_pl_scale=3, .x_header=10952754293765046272, .x_header_p0=10952754293765046272, .w_header=10952754456973803520, .w_header_p0=10952754293765046272 }
};
Expand Down
12 changes: 7 additions & 5 deletions test/py/bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,17 +68,21 @@ def __init__(self,
def extract_act(signature):
ilayer = QActivation(signature)
d = ilayer.quantizer.get_config()
sign_bit = d['keep_negative'] if 'keep_negative' in d else (d['negative_slope'] !=0 if 'negative_slope' in d else (0))
sign_bit = 1 # We always use signed integers
int_bit = d['integer'] if 'integer' in d else 0
frac = d['bits']-int_bit-sign_bit

if isinstance(ilayer.quantizer, quantized_bits):
if not d['keep_negative']:
d['keep_negative'] = True
ilayer.quantizer.keep_negative = True
print("Note: Only signed integers are allowed. Therefore, keep_negative is changed to True")
return { 'layer':ilayer, 'type':'quant', 'bits':d['bits'], 'frac':frac, 'plog_slope': 0, 'non_zero':1}
elif 'relu' in str(ilayer.quantizer.__class__) and ilayer.quantizer.negative_slope != 0:
elif 'relu' in str(ilayer.quantizer.__class__):
slope = ilayer.quantizer.negative_slope
if slope == 0:
assert ilayer.quantizer.bits != 1, "Error: Cannot use bits=1 with Relu. Use leaky_relu. Reason: Qkeras keeps relu signed"
ilayer.quantizer.bits -= 1
ilayer.quantizer.bits = ilayer.quantizer.bits-1
non_zero = 1*(slope != 0)
log_slope = np.log2(slope) if non_zero else 0
assert int(log_slope) == log_slope and log_slope <= 0, f"Error: negative_slope:{slope} of leaky_relu has to be a negative power of two. eg.0.125"
Expand Down Expand Up @@ -301,11 +305,9 @@ def apply_act(act_dict):

act_dict['shift_bits'] = shift_bits
self.proc['int'], self.proc['bits'], self.proc['frac'] = x, bits, frac
print(f'----------------------- shift:{shift_bits}, plog:{plog_slope}, nzero:{non_zero}')

apply_act(self.core['act'])
assert np.all(self.proc['int'] == self.core['tensor'].numpy() * 2**self.proc['frac']), f"Core + act output of bundle {self.idx} is not fixed point"

self.o_exp = self.proc['int']

if self.add is not None:
Expand Down
8 changes: 4 additions & 4 deletions test/py/param_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from bundle import Bundle
from qkeras import *
from tensorflow.keras.layers import Input
# keras.utils.set_random_seed(0)
keras.utils.set_random_seed(0)


# Simulator: xsim on windows, verilator otherwise
Expand Down Expand Up @@ -191,11 +191,11 @@ def test_dnn_engine(COMPILE):

input_shape = (8,16,8,3) # (XN, XH, XW, CI)
model_config = [
Config(11, 16, True , f'quantized_relu({c.X_BITS},0,negative_slope=0.125)'),
Config(1 , 16, False, f'quantized_bits({c.K_BITS},0,False,True,1)'),
Config(11, 16, True , f'quantized_relu({c.X_BITS},0,negative_slope=0)'),
Config(1 , 16, False, f'quantized_bits({c.K_BITS},0,False,False,1)'),
Config(7 , 16, True , f'quantized_bits({c.K_BITS},0,False,True,1)'),
Config(5 , 16, False, f'quantized_relu({c.X_BITS},0,negative_slope=0.125)'),
Config(3 , 24, True , f'quantized_relu({c.X_BITS},0,negative_slope=0.125)'),
Config(3 , 24, True , f'quantized_relu({c.X_BITS},0,negative_slope=0)'),
Config(1 , 50, False, f'quantized_relu({c.X_BITS},0,negative_slope=0.125)', flatten=True),
Config(1 , 10, True , f'quantized_relu({c.X_BITS},0,negative_slope=0.125)', dense= True),
]
Expand Down

0 comments on commit 3bca141

Please sign in to comment.