Commit 31223fa2 by Pariksheet Pinjari Committed by Tianqi Chen

[NNVM]Activations support added in Keras Frontend (#1210)

* [NNVM]Activations support added in Keras Frontend

* Helper for ELU added

* All activations test cases clubbed to one
parent dcf18a3c
...@@ -23,6 +23,10 @@ def _get_pad_pair(input1d, kernel1d, stride1d): ...@@ -23,6 +23,10 @@ def _get_pad_pair(input1d, kernel1d, stride1d):
pad_after = pad - pad_before pad_after = pad - pad_before
return [pad_before, pad_after] return [pad_before, pad_after]
def _get_elu(insym, alpha):
""" A helper method for elu.
"""
return -alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym)
def _convert_activation(insym, keras_layer, _): def _convert_activation(insym, keras_layer, _):
if isinstance(keras_layer, str): if isinstance(keras_layer, str):
...@@ -50,27 +54,43 @@ def _convert_activation(insym, keras_layer, _): ...@@ -50,27 +54,43 @@ def _convert_activation(insym, keras_layer, _):
elif act_type == 'softplus': elif act_type == 'softplus':
return _sym.log(_sym.__add_scalar__(_sym.exp(insym), scalar=1)) return _sym.log(_sym.__add_scalar__(_sym.exp(insym), scalar=1))
elif act_type == 'elu': elif act_type == 'elu':
raise NotImplementedError('elu not implemented') alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return _get_elu(insym, alpha)
elif act_type == 'selu':
# Alpha, Gamma values, obtained from https://arxiv.org/abs/1706.02515
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1.6732
gamma = keras_layer.gamma if hasattr(keras_layer, "gamma") else 1.0507
return gamma * _get_elu(insym, alpha)
elif act_type == 'relu6': elif act_type == 'relu6':
return _sym.clip(insym, a_min=0, a_max=6) return _sym.clip(insym, a_min=0, a_max=6)
elif act_type == 'softsign': elif act_type == 'softsign':
raise NotImplementedError('softsign not implemented') return insym / (1 + (_sym.relu(insym) + _sym.relu(_sym.negative(insym))))
elif act_type == 'hard_sigmoid': elif act_type == 'hard_sigmoid':
raise NotImplementedError('hard_sigmoid not implemented') transformX = (0.2 * insym) + 0.5
return _sym.clip(transformX, a_min=0, a_max=1)
else: else:
raise TypeError("Unsupported activation type : {}".format(act_type)) raise TypeError("Unsupported activation type : {}".format(act_type))
def _convert_advanced_activation(insym, keras_layer, _): def _convert_advanced_activation(insym, keras_layer, symtab):
act_type = type(keras_layer).__name__ act_type = type(keras_layer).__name__
if act_type == 'LeakyReLU': if act_type == 'LeakyReLU':
return _sym.leaky_relu(insym, alpha=keras_layer.alpha) return _sym.leaky_relu(insym, alpha=keras_layer.alpha)
elif act_type == 'ELU': elif act_type == 'ELU':
raise NotImplementedError('ELU not implemented') alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return _get_elu(insym, alpha)
elif act_type == 'PReLU': elif act_type == 'PReLU':
raise NotImplementedError('PReLU not implemented') assert hasattr(keras_layer, "alpha"), \
"alpha required for PReLU."
_check_data_format(keras_layer)
size = len(keras_layer.alpha.shape)
return -symtab.new_const(keras_layer.get_weights()[0] \
.transpose(np.roll(range(size), 1))) \
* _sym.relu(-insym) + _sym.relu(insym)
elif act_type == 'ThresholdedReLU': elif act_type == 'ThresholdedReLU':
raise NotImplementedError('ThresholdedReLU not implemented') theta = keras_layer.theta if hasattr(keras_layer, "theta") else 1.0
theta_tensor = _sym.full_like(insym[0], fill_value=float(theta))
return _sym.elemwise_mul(insym[0], _sym.greater(insym[0], theta_tensor, out_type="float32"))
else: else:
raise TypeError("Unsupported advanced activation type : {}".format(act_type)) raise TypeError("Unsupported advanced activation type : {}".format(act_type))
......
...@@ -61,34 +61,6 @@ def test_forward_elemwise_add(): ...@@ -61,34 +61,6 @@ def test_forward_elemwise_add():
keras_model = keras.models.Model(data, y) keras_model = keras.models.Model(data, y)
verify_keras_frontend(keras_model) verify_keras_frontend(keras_model)
def test_forward_softmax():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('softmax')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)
def test_forward_softrelu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('softplus')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)
def test_forward_leaky_relu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.LeakyReLU(alpha=0.3)(data)
x = keras.layers.Add()([x, x])
x = keras.layers.GlobalAveragePooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)
def test_forward_dense(): def test_forward_dense():
data = keras.layers.Input(shape=(32,32,3)) data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.MaxPooling2D(pool_size=(2,2))(data) x = keras.layers.MaxPooling2D(pool_size=(2,2))(data)
...@@ -127,16 +99,6 @@ def test_forward_upsample(): ...@@ -127,16 +99,6 @@ def test_forward_upsample():
keras_model = keras.models.Model(data, x) keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model) verify_keras_frontend(keras_model)
def test_forward_relu6():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation(keras.applications.mobilenet.relu6)(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)
def test_forward_reshape(): def test_forward_reshape():
data = keras.layers.Input(shape=(32,32,3)) data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Reshape(target_shape=(32,32,3))(data) x = keras.layers.Reshape(target_shape=(32,32,3))(data)
...@@ -168,6 +130,27 @@ def test_forward_mobilenet(): ...@@ -168,6 +130,27 @@ def test_forward_mobilenet():
input_shape=(224,224,3), classes=1000) input_shape=(224,224,3), classes=1000)
verify_keras_frontend(keras_model) verify_keras_frontend(keras_model)
def test_forward_activations():
data = keras.layers.Input(shape=(32,32,3))
weights = np.random.rand(1, 32, 32, 3)
act_funcs = [keras.layers.Activation('softmax'),
keras.layers.Activation('softplus'),
keras.layers.LeakyReLU(alpha=0.3),
keras.layers.Activation(keras.applications.mobilenet.relu6),
keras.layers.PReLU(weights=weights, alpha_initializer="zero"),
keras.layers.ELU(alpha=0.5),
keras.layers.Activation('selu'),
keras.layers.ThresholdedReLU(theta=0.5),
keras.layers.Activation('softsign'),
keras.layers.Activation('hard_sigmoid'),
keras.layers.Activation('sigmoid'),
keras.layers.Activation('tanh'),
keras.layers.Activation('linear')]
for act_func in act_funcs:
x = act_func(data)
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)
def test_forward_multi_inputs(): def test_forward_multi_inputs():
data1 = keras.layers.Input(shape=(32,32,3)) data1 = keras.layers.Input(shape=(32,32,3))
...@@ -204,16 +187,12 @@ def test_forward_reuse_layers(): ...@@ -204,16 +187,12 @@ def test_forward_reuse_layers():
if __name__ == '__main__': if __name__ == '__main__':
test_forward_elemwise_add() test_forward_elemwise_add()
test_forward_softmax() test_forward_activations()
test_forward_softrelu()
test_forward_leaky_relu()
test_forward_dense() test_forward_dense()
test_forward_transpose_conv() test_forward_transpose_conv()
test_forward_separable_conv() test_forward_separable_conv()
test_forward_upsample() test_forward_upsample()
test_forward_relu6()
test_forward_reshape() test_forward_reshape()
test_forward_vgg16() test_forward_vgg16()
test_forward_xception() test_forward_xception()
test_forward_resnet50() test_forward_resnet50()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment