Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
T
tic
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
wenyuanbo
tic
Commits
b71edd76
Commit
b71edd76
authored
Nov 19, 2018
by
Animesh Jain
Committed by
Tianqi Chen
Nov 19, 2018
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Relay Op sprint (part 2) - Level 1 - log_softmax (#2128)
parent
81da33f8
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
32 additions
and
6 deletions
+32
-6
python/tvm/relay/op/nn/_nn.py
+9
-1
src/relay/op/nn/nn.cc
+12
-1
tests/python/relay/test_op_level1.py
+11
-4
No files found.
python/tvm/relay/op/nn/_nn.py
View file @
b71edd76
...
@@ -9,7 +9,6 @@ from ..op import OpPattern, schedule_injective
...
@@ -9,7 +9,6 @@ from ..op import OpPattern, schedule_injective
reg
.
register_schedule
(
"nn.relu"
,
schedule_injective
)
reg
.
register_schedule
(
"nn.relu"
,
schedule_injective
)
reg
.
register_pattern
(
"nn.relu"
,
OpPattern
.
ELEMWISE
)
reg
.
register_pattern
(
"nn.relu"
,
OpPattern
.
ELEMWISE
)
@reg.register_schedule
(
"nn.softmax"
)
@reg.register_schedule
(
"nn.softmax"
)
def
schedule_softmax
(
_
,
outputs
,
target
):
def
schedule_softmax
(
_
,
outputs
,
target
):
"""Schedule definition of softmax"""
"""Schedule definition of softmax"""
...
@@ -19,6 +18,15 @@ def schedule_softmax(_, outputs, target):
...
@@ -19,6 +18,15 @@ def schedule_softmax(_, outputs, target):
reg
.
register_pattern
(
"nn.softmax"
,
OpPattern
.
OPAQUE
)
reg
.
register_pattern
(
"nn.softmax"
,
OpPattern
.
OPAQUE
)
@reg.register_schedule
(
"nn.log_softmax"
)
def
schedule_log_softmax
(
_
,
outputs
,
target
):
"""Schedule definition of log_softmax"""
with
target
:
return
topi
.
generic
.
schedule_softmax
(
outputs
)
reg
.
register_pattern
(
"nn.log_softmax"
,
OpPattern
.
OPAQUE
)
# dense
# dense
@reg.register_compute
(
"nn.dense"
)
@reg.register_compute
(
"nn.dense"
)
def
compute_dense
(
attrs
,
inputs
,
out_type
,
target
):
def
compute_dense
(
attrs
,
inputs
,
out_type
,
target
):
...
...
src/relay/op/nn/nn.cc
View file @
b71edd76
...
@@ -291,7 +291,18 @@ RELAY_REGISTER_OP("nn.log_softmax")
...
@@ -291,7 +291,18 @@ RELAY_REGISTER_OP("nn.log_softmax")
.
set_num_inputs
(
1
)
.
set_num_inputs
(
1
)
.
add_argument
(
"data"
,
"Tensor"
,
"The input tensor."
)
.
add_argument
(
"data"
,
"Tensor"
,
"The input tensor."
)
.
set_support_level
(
1
)
.
set_support_level
(
1
)
.
add_type_rel
(
"Identity"
,
IdentityRel
);
.
add_type_rel
(
"Identity"
,
IdentityRel
)
.
set_attr
<
FTVMCompute
>
(
"FTVMCompute"
,
[](
const
Attrs
&
attrs
,
const
Array
<
Tensor
>&
inputs
,
const
Type
&
out_type
,
const
Target
&
target
)
{
const
auto
*
param
=
attrs
.
as
<
SoftmaxAttrs
>
();
CHECK
(
param
!=
nullptr
);
CHECK
(
param
->
axis
==
-
1
||
param
->
axis
==
static_cast
<
int32_t
>
(
inputs
[
0
].
ndim
())
-
1
)
<<
"log_softmax currently only works on last dimension"
;
return
Array
<
Tensor
>
{
topi
::
nn
::
log_softmax
(
inputs
[
0
])
};
});
// BatchFlatten
// BatchFlatten
...
...
tests/python/relay/test_op_level1.py
View file @
b71edd76
...
@@ -137,12 +137,19 @@ def test_softmax():
...
@@ -137,12 +137,19 @@ def test_softmax():
def
test_log_softmax
():
def
test_log_softmax
():
n
,
d
=
tvm
.
var
(
"n"
),
tvm
.
var
(
"d"
)
shape
=
(
10
,
4
)
x
=
relay
.
var
(
"x"
,
shape
=
(
n
,
d
)
)
x
=
relay
.
var
(
"x"
,
shape
=
shape
)
y
=
relay
.
nn
.
log_softmax
(
x
,
axis
=
0
)
y
=
relay
.
nn
.
log_softmax
(
x
,
axis
=
1
)
assert
"nn.log_softmax"
in
y
.
astext
()
assert
"nn.log_softmax"
in
y
.
astext
()
yy
=
relay
.
ir_pass
.
infer_type
(
y
)
yy
=
relay
.
ir_pass
.
infer_type
(
y
)
assert
yy
.
checked_type
==
relay
.
TensorType
((
n
,
d
))
assert
yy
.
checked_type
==
relay
.
TensorType
(
shape
)
func
=
relay
.
Function
([
x
],
y
)
x_data
=
np
.
random
.
uniform
(
size
=
shape
)
.
astype
(
"float32"
)
ref_res
=
topi
.
testing
.
log_softmax_python
(
x_data
)
for
target
,
ctx
in
ctx_list
():
intrp
=
relay
.
create_executor
(
"graph"
,
ctx
=
ctx
,
target
=
target
)
op_res
=
intrp
.
evaluate
(
func
)(
x_data
)
np
.
testing
.
assert_allclose
(
op_res
.
asnumpy
(),
ref_res
,
rtol
=
1e-5
)
def
test_concatenate
():
def
test_concatenate
():
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment