Commit be968fef by MORITA Kazutaka Committed by Tianqi Chen

[Keras] ReLU6 support (#481)

parent 8db42c41
...@@ -52,7 +52,7 @@ def _convert_activation(insym, keras_layer, _): ...@@ -52,7 +52,7 @@ def _convert_activation(insym, keras_layer, _):
elif act_type == 'elu': elif act_type == 'elu':
raise NotImplementedError('elu not implemented') raise NotImplementedError('elu not implemented')
elif act_type == 'relu6': elif act_type == 'relu6':
raise NotImplementedError('relu6 not implemented') return _sym.clip(insym, a_min=0, a_max=6)
elif act_type == 'softsign': elif act_type == 'softsign':
raise NotImplementedError('softsign not implemented') raise NotImplementedError('softsign not implemented')
elif act_type == 'hard_sigmoid': elif act_type == 'hard_sigmoid':
......
...@@ -116,6 +116,15 @@ def test_forward_upsample(): ...@@ -116,6 +116,15 @@ def test_forward_upsample():
verify_keras_frontend(keras_model) verify_keras_frontend(keras_model)
def test_forward_relu6():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation(keras.applications.mobilenet.relu6)(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)
def test_forward_vgg16(): def test_forward_vgg16():
keras_model = keras.applications.vgg16.VGG16(include_top=True, weights=None, keras_model = keras.applications.vgg16.VGG16(include_top=True, weights=None,
input_shape=(224,224,3), classes=1000) input_shape=(224,224,3), classes=1000)
...@@ -142,6 +151,7 @@ if __name__ == '__main__': ...@@ -142,6 +151,7 @@ if __name__ == '__main__':
test_forward_transpose_conv() test_forward_transpose_conv()
test_forward_separable_conv() test_forward_separable_conv()
test_forward_upsample() test_forward_upsample()
test_forward_relu6()
test_forward_vgg16() test_forward_vgg16()
test_forward_xception() test_forward_xception()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment