Commit b625b992 by Siju Committed by Tianqi Chen

keras.layers.ReLU added (#1530)

parent e678a4d4
......@@ -74,7 +74,9 @@ def _convert_activation(insym, keras_layer, _):
def _convert_advanced_activation(insym, keras_layer, symtab):
act_type = type(keras_layer).__name__
if act_type == 'LeakyReLU':
if act_type == 'ReLU':
return _sym.relu(insym)
elif act_type == 'LeakyReLU':
return _sym.leaky_relu(insym, alpha=keras_layer.alpha)
elif act_type == 'ELU':
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
......@@ -382,6 +384,7 @@ def _default_skip(insym, keras_layer, _): # pylint: disable=unused-argument
_convert_map = {
'Dense' : _convert_dense,
'Activation' : _convert_activation,
'ReLU' : _convert_advanced_activation,
'LeakyReLU' : _convert_advanced_activation,
'PReLU' : _convert_advanced_activation,
'ELU' : _convert_advanced_activation,
......
......@@ -138,6 +138,7 @@ def test_forward_activations():
weights = np.random.rand(1, 32, 32, 3)
act_funcs = [keras.layers.Activation('softmax'),
keras.layers.Activation('softplus'),
keras.layers.ReLU(),
keras.layers.LeakyReLU(alpha=0.3),
keras.layers.PReLU(weights=weights, alpha_initializer="zero"),
keras.layers.ELU(alpha=0.5),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment