Commit b5f46c42 by Siju Committed by Tianqi Chen

yolo reorg op for relay (#1941)

parent 0edb332f
......@@ -135,6 +135,7 @@ This level enables additional math and transform operators.
tvm.relay.vision.multibox_prior
tvm.relay.vision.multibox_transform_loc
tvm.relay.vision.nms
tvm.relay.vision.yolo_reorg
**Level 10: Temporary Operators**
......@@ -251,6 +252,7 @@ Level 5 Definitions
.. autofunction:: tvm.relay.vision.multibox_prior
.. autofunction:: tvm.relay.vision.multibox_transform_loc
.. autofunction:: tvm.relay.vision.nms
.. autofunction:: tvm.relay.vision.yolo_reorg
Level 10 Definitions
......
......@@ -98,6 +98,17 @@ struct ROIAlignAttrs : public tvm::AttrsNode<ROIAlignAttrs> {
}
};
/*! \brief Attributes used in yolo reorg operators */
struct YoloReorgAttrs : public tvm::AttrsNode<YoloReorgAttrs> {
Integer stride;
TVM_DECLARE_ATTRS(YoloReorgAttrs, "relay.attrs.YoloReorgAttrs") {
TVM_ATTR_FIELD(stride)
.set_default(1)
.describe("Stride value for yolo reorg");
}
};
} // namespace relay
} // namespace tvm
#endif // TVM_RELAY_ATTRS_VISION_H_
......@@ -5,5 +5,7 @@ from __future__ import absolute_import as _abs
from .multibox import *
from .nms import *
from .rcnn import *
from .yolo import *
from . import _multibox
from . import _rcnn
from . import _yolo
#pylint: disable=invalid-name, unused-argument
"""Backend compiler related feature registration"""
from __future__ import absolute_import
from ..op import register_schedule, register_pattern
from ..op import schedule_injective, OpPattern
# reorg
register_pattern("vision.yolo_reorg", OpPattern.INJECTIVE)
register_schedule("vision.yolo_reorg", schedule_injective)
"""Yolo operations."""
from . import _make
def yolo_reorg(data, stride):
"""Yolo reorg operation used in darknet models.
This layer shuffles the input tensor values based on the stride value.
Along with the shuffling, it does the shape transform.
If '(n, c, h, w)' is the data shape and 's' is stride, output shape is '(n, c*s*s, h/s, w/s)'
Example: data(1, 4, 2, 2) = [[[[ 0 1] [ 2 3]]
[[ 4 5] [ 6 7]]
[[ 8 9] [10 11]]
[[12 13] [14 15]]]]
stride = 2
ret(1, 16, 1, 1) = [[[[ 0]] [[ 2]] [[ 8]] [[10]]
[[ 1]] [[ 3]] [[ 9]] [[11]]
[[ 4]] [[ 6]] [[12]] [[14]]
[[ 5]] [[ 7]] [[13]] [[15]]]]
Note: stride=1 has no significance for reorg operation.
Parameters
----------
data : relay.Expr
The input data tensor.
stride : int
The stride value for reorganisation.
Returns
-------
ret : relay.Expr
The computed result.
"""
return _make.yolo_reorg(data, stride)
/*!
* Copyright (c) 2018 by Contributors
* \file yolo.cc
* \brief Yolo related operators
*/
#include <tvm/relay/op.h>
#include <tvm/relay/attrs/vision.h>
#include <topi/vision/reorg.h>
#include <vector>
#include "../op_common.h"
#include "../type_relations.h"
namespace tvm {
namespace relay {
TVM_REGISTER_NODE_TYPE(YoloReorgAttrs);
/*!
* \brief YoloReorgRel Output type and shape relation evaluation function.
* \param num_inputs Number of input types in the args.
* \param attrs The additional attributes of the operator.
* \param reporter The reporter to report solution to.
* \return false if This relation cannot be resolved. true if this relation has been resolved.
*/
bool YoloReorgRel(const Array<Type>& types,
int num_inputs,
const Attrs& attrs,
const TypeReporter& reporter) {
CHECK_EQ(types.size(), 2);
const auto* data = types[0].as<TensorTypeNode>();
if (data == nullptr) return false;
const YoloReorgAttrs* param = attrs.as<YoloReorgAttrs>();
CHECK(param != nullptr);
CHECK(data->shape.size() == 4) << "Yolo reorg supports only 4 dimension.";
std::vector<IndexExpr>&& oshape = AsVector(data->shape);
oshape[1] = oshape[1] * param->stride * param->stride;
oshape[2] = oshape[2] / param->stride;
oshape[3] = oshape[3] / param->stride;
reporter->Assign(types[1], TensorTypeNode::make(oshape, data->dtype));
return true;
}
Expr MakeYoloReorg(Expr data,
Integer stride) {
auto attrs = make_node<YoloReorgAttrs>();
attrs->stride = stride;
static const Op& op = Op::Get("vision.yolo_reorg");
return CallNode::make(op, {data}, Attrs(attrs), {});
}
TVM_REGISTER_API("relay.op.vision._make.yolo_reorg")
.set_body([](const TVMArgs& args, TVMRetValue* rv) {
runtime::detail::unpack_call<Expr, 2>(MakeYoloReorg, args, rv);
});
RELAY_REGISTER_OP("vision.yolo_reorg")
.describe(R"doc("Yolo reorg operation. This layer reorganize the output.
Its function is mostly shape transform.")doc" TVM_ADD_FILELINE)
.add_argument("data", "Tensor", "The input tensor.")
.set_num_inputs(1)
.set_support_level(5)
.set_attrs_type_key("relay.attrs.YoloReorgAttrs")
.add_type_rel("YoloReorg", YoloReorgRel)
.set_attr<FTVMCompute>("FTVMCompute", [](const Attrs& attrs,
const Array<Tensor>& inputs,
const Type& out_type,
const Target& target) {
const auto* params = attrs.as<YoloReorgAttrs>();
CHECK(params != nullptr);
return Array<Tensor>{ topi::vision::reorg(inputs[0], params->stride) };
});
} // namespace relay
} // namespace tvm
......@@ -7,7 +7,6 @@ from tvm import relay
from tvm.relay.testing import ctx_list
import topi.testing
def test_resize_infer_type():
n, c, h, w = tvm.var("n"), tvm.var("c"), tvm.var("h"), tvm.var("w")
x = relay.var("x", relay.TensorType((n, c, h, w), "int8"))
......@@ -307,6 +306,40 @@ def test_roi_align():
verify_roi_align((4, 4, 16, 16), (32, 5), pooled_size=7, spatial_scale=0.5, sample_ratio=2)
def test_yolo_reorg_infer_shape():
def verify_yolo_reorg(shape, stride, out_shape):
x = relay.var("x", relay.TensorType(shape, "float32"))
z = relay.vision.yolo_reorg(x, stride=stride)
zz = relay.ir_pass.infer_type(z)
assert "stride=" in z.astext()
assert zz.checked_type == relay.ty.TensorType(out_shape, "float32")
n, c, h, w = tvm.var("n"), tvm.var("c"), tvm.var("h"), tvm.var("w")
verify_yolo_reorg((n, c, 20, 20), 10, (n, c*10*10, 2, 2))
verify_yolo_reorg((n, c, h, w), 2, (n, c*2*2, h/2, w/2))
def test_yolo_reorg():
def verify_yolo_reorg(shape, stride):
x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32")
ref_res = topi.testing.reorg_python(x_data, stride)
x = relay.var("x", relay.TensorType(shape, "float32"))
z = relay.vision.yolo_reorg(x, stride=stride)
zz = relay.ir_pass.infer_type(z)
assert "stride=" in z.astext()
assert zz.checked_type == relay.ty.TensorType(ref_res.shape, "float32")
func = relay.Function([x], z)
for target, ctx in ctx_list():
for kind in ["graph", "debug"]:
intrp = relay.create_executor(kind, ctx=ctx, target=target)
op_res = intrp.evaluate(func)(x_data)
tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5)
verify_yolo_reorg((1, 100, 20, 20), 10)
verify_yolo_reorg((1, 4, 6, 6), 2)
if __name__ == "__main__":
test_resize_infer_type()
test_resize()
......@@ -314,3 +347,5 @@ if __name__ == "__main__":
test_multibox_transform_loc()
test_nms()
test_roi_align()
test_yolo_reorg_infer_shape()
test_yolo_reorg()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment