Commit 262345fa by Tianqi Chen Committed by GitHub

[TOPI] dense API to remove redudant use_bias (#476)

parent 833855e7
......@@ -4,7 +4,7 @@ import tvm
from .. import tag
def dense(data, weight, bias, use_bias=True):
def dense(data, weight, bias=None):
"""Applies a linear transformation: :math:`Y = XW^T + b`.
Parameters
......@@ -15,27 +15,26 @@ def dense(data, weight, bias, use_bias=True):
weight : tvm.Tensor
2-D with shape [out_dim, in_dim]
bias : tvm.Tensor
bias : tvm.Tensor, optional
1-D with shape [out_dim]
use_bias : bool, optional, default=True
Whether to use bias parameter
Returns
-------
output : tvm.Tensor
2-D with shape [batch, out_dim]
"""
assert len(data.shape) == 2 and len(weight.shape) == 2 and len(bias.shape) == 1, \
assert len(data.shape) == 2 and len(weight.shape) == 2, \
"only support 2-dim dense"
if bias:
assert len(bias.shape) == 1
batch, in_dim = data.shape
out_dim, _ = weight.shape
k = tvm.reduce_axis((0, in_dim), name='k')
matmul = tvm.compute((batch, out_dim), \
lambda i, j: tvm.sum(data[i, k] * weight[j, k], axis=k), \
tag='dense')
if not use_bias:
return matmul
return tvm.compute((batch, out_dim), \
lambda i, j: matmul[i, j] + bias[j], \
tag=tag.BROADCAST)
if bias:
matmul = tvm.compute((batch, out_dim), \
lambda i, j: matmul[i, j] + bias[j], \
tag=tag.BROADCAST)
return matmul
......@@ -10,7 +10,7 @@ def verify_dense(batch, in_dim, out_dim, use_bias=True):
A = tvm.placeholder((batch, in_dim), name='A')
B = tvm.placeholder((out_dim, in_dim), name='B')
C = tvm.placeholder((out_dim,), name='C')
D = topi.nn.dense(A, B, C, use_bias=use_bias)
D = topi.nn.dense(A, B, C if use_bias else None)
D = topi.nn.relu(D)
s = topi.cuda.schedule_dense(D)
dtype = A.dtype
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment