Commit 4d05fd96 by Siju Committed by Tianqi Chen

[RELAY][OP]log_softmax op (#1857)

parent 7bafca4e
......@@ -30,6 +30,7 @@ This level enables fully connected multi-layer perceptron.
tvm.relay.expand_dims
tvm.relay.concatenate
tvm.relay.nn.softmax
tvm.relay.nn.log_softmax
tvm.relay.subtract
tvm.relay.multiply
tvm.relay.divide
......@@ -114,6 +115,7 @@ Level 1 Definitions
.. autofunction:: tvm.relay.sigmoid
.. autofunction:: tvm.relay.concatenate
.. autofunction:: tvm.relay.nn.softmax
.. autofunction:: tvm.relay.nn.log_softmax
Level 2 Definitions
......
......@@ -108,6 +108,28 @@ def softmax(data, axis):
return _make.softmax(data, axis)
def log_softmax(data, axis):
r"""Computes log softmax.
.. math::
\text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)}
.. note::
This operator can be optimized away for inference.
Parameters
----------
data: relay.Expr
The input data to the operator.
axis: int
The axis to sum over when computing softmax
"""
return _make.log_softmax(data, axis)
def max_pool2d(data,
pool_size=(1, 1),
strides=(1, 1),
......
......@@ -41,6 +41,35 @@ RELAY_REGISTER_OP("nn.softmax")
.set_support_level(1)
.add_type_rel("Identity", IdentityRel);
TVM_REGISTER_API("relay.op.nn._make.log_softmax")
.set_body([](const TVMArgs& args, TVMRetValue* rv) {
auto make_func = [](Expr data, int axis) {
auto attrs = make_node<SoftmaxAttrs>();
attrs->axis = axis;
static const Op& op = Op::Get("nn.log_softmax");
return CallNode::make(op, {data}, Attrs(attrs), {});
};
runtime::detail::unpack_call<Expr, 2>(make_func, args, rv);
});
RELAY_REGISTER_OP("nn.log_softmax")
.describe(R"code(Computes log softmax.
.. math:: \text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)}
.. note::
This operator can be optimized away for inference.
- **data**: The input data
)code" TVM_ADD_FILELINE)
.set_num_inputs(1)
.add_argument("data", "Tensor", "The input tensor.")
.set_support_level(1)
.add_type_rel("Identity", IdentityRel);
// BatchFlatten
bool BatchFlattenRel(const Array<Type>& types,
int num_inputs,
......
......@@ -54,6 +54,18 @@ def test_softmax():
assert ftype.ret_type == relay.ty.TensorType((n, d), "float32")
def test_log_softmax():
ib = relay.ir_builder.IRBuilder()
n, d = tvm.var("n"), tvm.var("d")
x = ib.param("x", relay.ty.TensorType((n, d), "float32"))
with ib.function(x) as func:
ib.ret(relay.nn.log_softmax(x, axis=1))
ib.ret(func)
func = relay.ir_pass.infer_type(ib.env, func.to_func())
ftype = func.checked_type
assert ftype.ret_type == relay.ty.TensorType((n, d), "float32")
def test_unary_op():
for op in [relay.exp,
relay.log,
......@@ -162,5 +174,6 @@ if __name__ == "__main__":
test_expand_dims_infer_type()
test_concatenate_infer_type()
test_softmax()
test_log_softmax()
test_binary_op()
test_binary_broadcast_op()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment