Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
T
tic
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
wenyuanbo
tic
Commits
8fe38eef
Commit
8fe38eef
authored
Nov 29, 2018
by
Siju
Committed by
Tianqi Chen
Nov 29, 2018
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[Relay]collapse_sum and broadcast_to compute & schedule (#2180)
parent
f522b0f8
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
62 additions
and
11 deletions
+62
-11
python/tvm/relay/op/_transform.py
+4
-2
src/relay/op/tensor/transform.cc
+26
-2
tests/python/relay/test_op_level10.py
+32
-7
No files found.
python/tvm/relay/op/_transform.py
View file @
8fe38eef
...
...
@@ -2,13 +2,15 @@
# pylint: disable=invalid-name
from
__future__
import
absolute_import
from
.
import
op
as
_reg
from
._reduce
import
_schedule_reduce
schedule_injective
=
_reg
.
schedule_injective
schedule_broadcast
=
_reg
.
schedule_injective
_reg
.
register_schedule
(
"
squeeze"
,
schedule_injective
)
_reg
.
register_schedule
(
"collapse_sum_like"
,
_schedule_reduce
)
_reg
.
register_schedule
(
"
broadcast_to_like"
,
schedule_broadcast
)
_reg
.
register_schedule
(
"expand_dims"
,
schedule_broadcast
)
_reg
.
register_schedule
(
"squeeze"
,
schedule_injective
)
_reg
.
register_schedule
(
"reshape"
,
schedule_injective
)
_reg
.
register_schedule
(
"reshape_like"
,
schedule_injective
)
_reg
.
register_schedule
(
"full"
,
schedule_injective
)
...
...
src/relay/op/tensor/transform.cc
View file @
8fe38eef
...
...
@@ -9,6 +9,8 @@
#include <tvm/ir.h>
#include <topi/transform.h>
#include <topi/elemwise.h>
#include <topi/broadcast.h>
#include <topi/reduction.h>
#include <vector>
#include "../op_common.h"
#include "../../../arithmetic/compute_expr.h"
...
...
@@ -1017,6 +1019,15 @@ Expr MakeCollapseSumLike(Expr data,
return
CallNode
::
make
(
op
,
{
data
,
collapse_type
},
Attrs
(),
{});
}
Array
<
Tensor
>
CollapseSumLikeCompute
(
const
Attrs
&
attrs
,
const
Array
<
Tensor
>&
inputs
,
const
Type
&
out_type
,
const
Target
&
target
)
{
const
auto
*
out_ttype
=
out_type
.
as
<
TensorTypeNode
>
();
CHECK
(
out_ttype
!=
nullptr
);
return
{
topi
::
collapse_sum
(
inputs
[
0
],
out_ttype
->
shape
)
};
}
TVM_REGISTER_API
(
"relay.op._make.collapse_sum_like"
)
.
set_body
([](
const
TVMArgs
&
args
,
TVMRetValue
*
rv
)
{
runtime
::
detail
::
unpack_call
<
Expr
,
2
>
(
MakeCollapseSumLike
,
args
,
rv
);
...
...
@@ -1029,7 +1040,9 @@ RELAY_REGISTER_OP("collapse_sum_like")
.
add_argument
(
"data"
,
"Tensor"
,
"The input tensor."
)
.
add_argument
(
"collapse_type"
,
"Tensor"
,
"Provide the type to collapse to."
)
.
set_support_level
(
10
)
.
add_type_rel
(
"CollapseSumLike"
,
CollapseSumLikeRel
);
.
add_type_rel
(
"CollapseSumLike"
,
CollapseSumLikeRel
)
.
set_attr
<
FTVMCompute
>
(
"FTVMCompute"
,
CollapseSumLikeCompute
)
.
set_attr
<
TOpPattern
>
(
"TOpPattern"
,
kCommReduce
);
// BroadCastToLike: <A, B> -> B where BroadCast(A, B) = B
bool
BroadCastToLikeRel
(
const
Array
<
Type
>&
types
,
...
...
@@ -1047,6 +1060,15 @@ Expr MakeBroadCastToLike(Expr data,
return
CallNode
::
make
(
op
,
{
data
,
broadcast_type
},
Attrs
(),
{});
}
Array
<
Tensor
>
BroadCastToLikeCompute
(
const
Attrs
&
attrs
,
const
Array
<
Tensor
>&
inputs
,
const
Type
&
out_type
,
const
Target
&
target
)
{
const
auto
*
out_ttype
=
out_type
.
as
<
TensorTypeNode
>
();
CHECK
(
out_ttype
!=
nullptr
);
return
{
topi
::
broadcast_to
(
inputs
[
0
],
out_ttype
->
shape
)
};
}
TVM_REGISTER_API
(
"relay.op._make.broadcast_to_like"
)
.
set_body
([](
const
TVMArgs
&
args
,
TVMRetValue
*
rv
)
{
runtime
::
detail
::
unpack_call
<
Expr
,
2
>
(
MakeBroadCastToLike
,
args
,
rv
);
...
...
@@ -1059,7 +1081,9 @@ RELAY_REGISTER_OP("broadcast_to_like")
.
add_argument
(
"data"
,
"Tensor"
,
"The input tensor."
)
.
add_argument
(
"broadcast_type"
,
"Tensor"
,
"Provide the type to broadcast to."
)
.
set_support_level
(
10
)
.
add_type_rel
(
"BroadCastToLike"
,
BroadCastToLikeRel
);
.
add_type_rel
(
"BroadCastToLike"
,
BroadCastToLikeRel
)
.
set_attr
<
FTVMCompute
>
(
"FTVMCompute"
,
BroadCastToLikeCompute
)
.
set_attr
<
TOpPattern
>
(
"TOpPattern"
,
kBroadcast
);
// strided_slice
...
...
tests/python/relay/test_op_level10.py
View file @
8fe38eef
...
...
@@ -6,19 +6,44 @@ from tvm import relay
from
tvm.relay.testing
import
ctx_list
def
test_collapse_sum_like
():
x
=
relay
.
Var
(
"x"
,
relay
.
ty
.
TensorType
((
3
,
4
,
5
,
6
),
"int8"
))
y
=
relay
.
Var
(
"y"
,
relay
.
ty
.
TensorType
((
4
,
1
,
6
),
"int8"
))
shape
=
(
3
,
4
,
5
,
6
)
shape_like
=
(
4
,
5
,
6
)
dtype
=
"float32"
x
=
relay
.
Var
(
"x"
,
relay
.
ty
.
TensorType
(
shape
,
dtype
))
y
=
relay
.
Var
(
"y"
,
relay
.
ty
.
TensorType
(
shape_like
,
dtype
))
z
=
relay
.
collapse_sum_like
(
x
,
y
)
zz
=
relay
.
ir_pass
.
infer_type
(
z
)
assert
zz
.
checked_type
==
relay
.
ty
.
TensorType
(
(
4
,
1
,
6
),
"int8"
)
assert
zz
.
checked_type
==
relay
.
ty
.
TensorType
(
shape_like
,
dtype
)
func
=
relay
.
Function
([
x
,
y
],
z
)
x
=
np
.
random
.
uniform
(
size
=
shape
)
.
astype
(
dtype
)
y
=
np
.
random
.
uniform
(
size
=
shape_like
)
.
astype
(
dtype
)
ref_res
=
np
.
sum
(
x
,
0
)
for
target
,
ctx
in
ctx_list
():
for
kind
in
[
"graph"
,
"debug"
]:
intrp
=
relay
.
create_executor
(
kind
,
ctx
=
ctx
,
target
=
target
)
op_res
=
intrp
.
evaluate
(
func
)(
x
,
y
)
tvm
.
testing
.
assert_allclose
(
op_res
.
asnumpy
(),
ref_res
,
rtol
=
1e-5
)
def
test_broadcast_to_like
():
x
=
relay
.
Var
(
"x"
,
relay
.
ty
.
TensorType
((
3
,
4
,
5
,
6
),
"int8"
))
y
=
relay
.
Var
(
"y"
,
relay
.
ty
.
TensorType
((
4
,
1
,
6
),
"int8"
))
z
=
relay
.
broadcast_to_like
(
y
,
x
)
shape
=
(
4
,
1
,
6
)
shape_like
=
(
3
,
4
,
5
,
6
)
dtype
=
"float32"
x
=
relay
.
Var
(
"x"
,
relay
.
ty
.
TensorType
(
shape
,
dtype
))
y
=
relay
.
Var
(
"y"
,
relay
.
ty
.
TensorType
(
shape_like
,
dtype
))
z
=
relay
.
broadcast_to_like
(
x
,
y
)
zz
=
relay
.
ir_pass
.
infer_type
(
z
)
assert
zz
.
checked_type
==
relay
.
ty
.
TensorType
((
3
,
4
,
5
,
6
),
"int8"
)
assert
zz
.
checked_type
==
relay
.
ty
.
TensorType
(
shape_like
,
dtype
)
func
=
relay
.
Function
([
x
,
y
],
z
)
x
=
np
.
random
.
uniform
(
size
=
shape
)
.
astype
(
dtype
)
y
=
np
.
random
.
uniform
(
size
=
shape_like
)
.
astype
(
dtype
)
ref_res
=
np
.
broadcast_to
(
x
,
shape_like
)
for
target
,
ctx
in
ctx_list
():
for
kind
in
[
"graph"
,
"debug"
]:
intrp
=
relay
.
create_executor
(
kind
,
ctx
=
ctx
,
target
=
target
)
op_res
=
intrp
.
evaluate
(
func
)(
x
,
y
)
tvm
.
testing
.
assert_allclose
(
op_res
.
asnumpy
(),
ref_res
,
rtol
=
1e-5
)
def
np_slice_like
(
np_data
,
np_shape_like
,
axis
=
None
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment