Skip to content

Commit

Permalink
update keras activation parsing, especially leaky relu (#1085)
Browse files Browse the repository at this point in the history
* update keras activation parsing, especially leaky relu

* fix assert in pytest as in oneAPI branch
  • Loading branch information
jmitrevs authored Oct 25, 2024
1 parent 4518537 commit f9a2412
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 14 deletions.
7 changes: 6 additions & 1 deletion hls4ml/converters/keras/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ def parse_activation_layer(keras_layer, input_names, input_shapes, data_reader):
layer['class_name'] = 'ELU' # always use ELU type for elu, even if passed as activation

if layer['class_name'] == 'LeakyReLU':
layer['activ_param'] = keras_layer['config'].get('alpha', 0.3)
# the name changes for version 3
layer['activ_param'] = keras_layer['config'].get('negative_slope', keras_layer['config'].get('alpha', 0.3))
elif layer['class_name'] == 'ThresholdedReLU':
layer['activ_param'] = keras_layer['config'].get('theta', 1.0)
elif layer['class_name'] == 'ELU':
Expand All @@ -83,6 +84,10 @@ def parse_activation_layer(keras_layer, input_names, input_shapes, data_reader):
layer['class_name'] = 'HardActivation'
if layer['class_name'] == 'Softmax':
layer['axis'] = keras_layer['config'].get('axis', -1)
if layer['class_name'] == 'Activation' and layer['activation'] == 'leaky_relu':
layer['class_name'] = 'LeakyReLU'
# The parameter name changes for API v3; the default is different than in LeakyReLU layer
layer['activ_param'] = keras_layer['config'].get('negative_slope', keras_layer['config'].get('alpha', 0.2))

return layer, [shape for shape in input_shapes[0]]

Expand Down
18 changes: 5 additions & 13 deletions hls4ml/converters/keras_to_hls.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,26 +297,18 @@ def parse_keras_model(model_arch, reader):
layer_list.append(layer)
if 'activation' in layer and layer['class_name'] not in activation_layers + recurrent_layers: # + qkeras_layers:
act_layer = {}
act_details = layer['activation']
# Workaround for QKeras activations passed as an argument
if isinstance(layer['activation'], dict):
act_details = layer['activation']
if isinstance(act_details, dict):
act_layer['class_name'] = 'QActivation'
act_layer['config'] = {
'name': layer['name'] + '_' + act_details['class_name'],
'activation': act_details,
}
act_layer, output_shape = layer_handlers['QActivation'](act_layer, None, [output_shape], reader)
else:
act_layer['name'] = layer['name'] + '_' + layer['activation']
act_layer['activation'] = layer['activation']
if 'activ_param' in layer:
act_layer['activ_param'] = layer['activ_param']
act_layer['class_name'] = layer['activation']
elif layer['activation'] == 'softmax':
act_layer['class_name'] = 'Softmax'
act_layer['axis'] = -1
else:
act_layer['class_name'] = 'Activation'
act_layer['class_name'] = 'Activation'
act_layer['config'] = {'name': layer['name'] + '_' + act_details, 'activation': act_details}
act_layer, output_shape = layer_handlers[act_layer['class_name']](act_layer, None, [output_shape], reader)
inputs_map[layer['name']] = act_layer['name']
if output_layers is not None and layer['name'] in output_layers:
output_layers = [act_layer['name'] if name == layer['name'] else name for name in output_layers]
Expand Down
1 change: 1 addition & 0 deletions test/pytest/test_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
[
(ReLU(), 'relu'),
(LeakyReLU(alpha=1.5), 'leaky_relu'),
(Activation('leaky_relu'), 'leaky_relu_act'),
(ThresholdedReLU(theta=0.75), 'threshold_relu'),
(ELU(alpha=1.25), 'elu'),
(Activation('selu'), 'selu'),
Expand Down

0 comments on commit f9a2412

Please sign in to comment.