Commit a8574e7b by Tatsuya Nishiyama Committed by Yizhi Liu

[FRONTEND][TENSORFLOW] Add Pad and PadV2 support (#1545)

* [FRONTEND][TENSORFLOW] Add Pad and PadV2 support

* Add assettion to _pad, and fix testcase for pad.
parent fb252448
......@@ -607,6 +607,26 @@ def _LSTMBlockCell():
return _impl
def _pad(name):
def _impl(inputs, attr, params):
padlist_key = inputs[1].list_output_names()[0]
if padlist_key in params:
padlist = params.pop(padlist_key).asnumpy()
else:
raise RuntimeError("Required parameter {} not fount.".format(padlist_key))
paddings = tuple([tuple(l) for l in padlist])
attr['pad_width'] = paddings
attr['pad_value'] = 0
new_inputs = [inputs[0]]
if name == 'PadV2':
constant_values = params.pop(inputs[2].list_output_names()[0]).asnumpy()
attr['pad_value'] = constant_values[0]
return AttrCvt(
op_name='pad',
ignores=['Tpaddings'],)(new_inputs, attr)
return _impl
# compatible operators that do NOT require any conversion.
_identity_list = []
......@@ -649,6 +669,8 @@ _convert_map = {
'GatherV2' : _gather_v2(),
'StridedSlice' : _stridedSlice(),
'LRN' : _lrn(),
'Pad' : _pad('Pad'),
'PadV2' : _pad('PadV2'),
}
# _convert_map_rnn defines maps of rnn operator name to
......
......@@ -704,6 +704,45 @@ def test_forward_resize_bilinear():
_test_resize_bilinear((4, 16, 32, 32), [50, 50], False)
_test_resize_bilinear((6, 32, 64, 64), [20, 20], True)
#######################################################################
# Pad
# ---
def _test_pad(input_shape, paddings, mode, **kwargs):
""" One iteration of pad operation with given shape"""
x = np.arange(np.prod(input_shape), dtype=np.float32).reshape(input_shape)
with tf.Graph().as_default():
in_data = constant_op.constant(x, shape=input_shape, dtype='float32')
pad_values = constant_op.constant(paddings)
pad = tf.pad(in_data, paddings=pad_values, mode=mode, **kwargs)
if mode == 'CONSTANT':
if 'constant_values' in kwargs:
out_node = 'PadV2'
out_name = 'PadV2:0'
else:
out_node = 'Pad'
out_name = 'Pad:0'
with tf.Session() as sess:
graph_def = tf.graph_util.convert_variables_to_constants(
sess,
sess.graph.as_graph_def(add_shapes=True),
[out_node],
)
tf_output = run_tf_graph(sess, x, 'Const:0', out_name)
tvm_output = run_tvm_graph(graph_def, x.astype('float32'),
"Const", tf_output.shape, 'float32')
np.testing.assert_allclose(tf_output, tvm_output)
sess.close()
def test_forward_pad():
""" Pad """
_test_pad((2, 3), [[1,1], [2,2]], mode="CONSTANT")
_test_pad((2, 3), [[1,1], [2,2]], mode="CONSTANT", constant_values=1.0)
#######################################################################
# Inception V3
......@@ -936,6 +975,7 @@ if __name__ == '__main__':
test_forward_mobilenet()
test_forward_variable()
test_forward_resize_bilinear()
test_forward_pad()
test_forward_lstm()
test_forward_stridedslice()
test_forward_gather()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment