From f9279d71f381aefd2bbe66968717b108cb324bfa Mon Sep 17 00:00:00 2001 From: Francesco Conti Date: Wed, 17 Apr 2024 19:19:21 +0000 Subject: [PATCH] Generate stimuli a bit less randomly Using normal distribution for weights and a bit tighter constraints for bias/scale, we get results that are distributed a bit more similarly to a "real" distribution. In the future, we need to make this controllable from the outside so that we can perform some tests with these base settings and others with more targeted ones. --- test/NnxTestClasses.py | 33 ++++++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/test/NnxTestClasses.py b/test/NnxTestClasses.py index edf227b..41d5131 100644 --- a/test/NnxTestClasses.py +++ b/test/NnxTestClasses.py @@ -193,6 +193,12 @@ def load_if_exist(filename: str) -> Optional[torch.Tensor]: class NnxTestGenerator: _DEFAULT_SEED = 0 + _DEFAULT_WEIGHT_MEAN = 0.5 # as we use torch.floor(), this makes the generation unbiased + _DEFAULT_WEIGHT_STDEV = 0.27 + _DEFAULT_SCALE_MAX_BIT_32BIT = 17 + _DEFAULT_SCALE_MAX_BIT_16BIT = 11 + _DEFAULT_SCALE_MAX_BIT_8BIT = 5 + _DEFAULT_BIAS_MAX_BIT = 18 @staticmethod def _calculate_global_shift( @@ -204,8 +210,15 @@ def _calculate_global_shift( return torch.ceil(torch.log2(s / target_s)).type(torch.int32) @staticmethod - def _random_data(_type: IntegerType, shape: Tuple): - return torch.randint(_type.min, _type.max, size=shape) + def _random_data(_type: IntegerType, shape: Tuple, extremes: Tuple = None): + if extremes is None: + return torch.randint(_type.min, _type.max, size=shape) + else: + return torch.randint(max(_type.min, extremes[0]), min(_type.max, extremes[1]), size=shape) + + @staticmethod + def _random_data_normal(_type: IntegerType, shape: Tuple, mean: float64 = 0.5, std: float64=0.27): + return torch.floor(torch.clip(torch.normal(mean, std, size=shape), _type.min, _type.max)).type(torch.int64) @staticmethod def from_conf( @@ -236,7 +249,11 @@ def from_conf( ) if weight is None: - weight = NnxTestGenerator._random_data( + weight_mean = NnxTestGenerator._DEFAULT_WEIGHT_MEAN + weight_std = NnxTestGenerator._DEFAULT_WEIGHT_STDEV * (1<<(conf.weight_type._bits-1)-1) + weight = NnxTestGenerator._random_data_normal( + mean = weight_mean, + std = weight_std, _type=conf.weight_type, shape=weight_shape, ) @@ -244,13 +261,19 @@ def from_conf( if conf.has_norm_quant: if scale is None: assert conf.scale_type is not None + # same limits as in old NE16 generator + scale_extremes = (1, (1<