Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
T
tic
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
wenyuanbo
tic
Commits
1ad6a2af
Commit
1ad6a2af
authored
Oct 25, 2019
by
雾雨魔理沙
Committed by
Thierry Moreau
Oct 25, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[Relay] crossentropy_with_logits and its gradient (#4075)
* save * lint
parent
493c98d3
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
73 additions
and
4 deletions
+73
-4
python/tvm/relay/op/_reduce.py
+1
-0
python/tvm/relay/op/_tensor_grad.py
+9
-0
python/tvm/relay/op/nn/_nn.py
+9
-0
python/tvm/relay/op/nn/nn.py
+19
-0
src/relay/op/nn/nn.cc
+24
-1
tests/python/relay/test_op_grad_level10.py
+11
-3
No files found.
python/tvm/relay/op/_reduce.py
View file @
1ad6a2af
...
...
@@ -37,3 +37,4 @@ _reg.register_schedule("prod", _schedule_reduce)
_reg
.
register_schedule
(
"mean"
,
_schedule_reduce
)
_reg
.
register_schedule
(
"variance"
,
_schedule_reduce
)
_reg
.
register_schedule
(
"nn.cross_entropy"
,
_schedule_reduce
)
_reg
.
register_schedule
(
"nn.cross_entropy_with_logits"
,
_schedule_reduce
)
python/tvm/relay/op/_tensor_grad.py
View file @
1ad6a2af
...
...
@@ -449,3 +449,12 @@ def cross_entropy_grad(orig, grad):
batch_size
=
take
(
shape
,
const
(
0
,
dtype
=
'int32'
),
axis
=
0
)
grad
=
grad
/
batch_size
.
astype
(
'float32'
)
return
[
-
grad
*
y
/
x
,
-
grad
*
log
(
x
)]
@register_gradient
(
"nn.cross_entropy_with_logits"
)
def
cross_entropy_with_logits_grad
(
orig
,
grad
):
x
,
y
=
orig
.
args
shape
=
shape_of
(
x
)
batch_size
=
take
(
shape
,
const
(
0
,
dtype
=
'int32'
),
axis
=
0
)
grad
=
grad
/
batch_size
.
astype
(
'float32'
)
return
[
-
grad
*
y
,
-
grad
*
x
]
python/tvm/relay/op/nn/_nn.py
View file @
1ad6a2af
...
...
@@ -770,3 +770,12 @@ reg.register_pattern("nn.cross_entropy", OpPattern.OPAQUE)
def
compute_cross_entropy
(
attrs
,
inputs
,
out_dtype
,
target
):
x
,
y
=
inputs
return
[
-
topi
.
sum
(
topi
.
log
(
x
)
*
y
)
/
x
.
shape
[
0
]]
reg
.
register_pattern
(
"nn.cross_entropy_with_logits"
,
OpPattern
.
OPAQUE
)
@reg.register_compute
(
"nn.cross_entropy_with_logits"
)
def
compute_cross_entropy_with_logits
(
attrs
,
inputs
,
out_dtype
,
target
):
x
,
y
=
inputs
return
[
-
topi
.
sum
(
x
*
y
)
/
x
.
shape
[
0
]]
python/tvm/relay/op/nn/nn.py
View file @
1ad6a2af
...
...
@@ -1807,3 +1807,22 @@ def cross_entropy(predictions, targets):
The computed result.
"""
return
_make
.
cross_entropy
(
predictions
,
targets
)
def
cross_entropy_with_logits
(
predictions
,
targets
):
"""CrossEntropy with logits.
Parameters
----------
predictions : tvm.relay.Expr
The predictions.
targets : tvm.relay.Expr
The targets.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return
_make
.
cross_entropy_with_logits
(
predictions
,
targets
)
src/relay/op/nn/nn.cc
View file @
1ad6a2af
...
...
@@ -910,7 +910,7 @@ bool CrossEntropyRel(const Array<Type>& types,
return
true
;
}
// Positional relay function to create
batch_matmul
operator used by frontend FFI.
// Positional relay function to create
cross_entropy
operator used by frontend FFI.
Expr
MakeCrossEntropy
(
Expr
predictions
,
Expr
targets
)
{
static
const
Op
&
op
=
Op
::
Get
(
"nn.cross_entropy"
);
return
CallNode
::
make
(
op
,
{
predictions
,
targets
},
Attrs
(),
{});
...
...
@@ -933,5 +933,28 @@ Do log on the data - do not accept logits.
.
add_type_rel
(
"CrossEntropy"
,
CrossEntropyRel
);
// Positional relay function to create cross_entropy_with_logits operator used by frontend FFI.
Expr
MakeCrossEntropyWithLogits
(
Expr
predictions
,
Expr
targets
)
{
static
const
Op
&
op
=
Op
::
Get
(
"nn.cross_entropy_with_logits"
);
return
CallNode
::
make
(
op
,
{
predictions
,
targets
},
Attrs
(),
{});
}
TVM_REGISTER_API
(
"relay.op.nn._make.cross_entropy_with_logits"
)
.
set_body_typed
(
MakeCrossEntropyWithLogits
);
RELAY_REGISTER_OP
(
"nn.cross_entropy_with_logits"
)
.
describe
(
R"code(
Computes cross entropy given predictions and targets.
Accept logits.
)code"
TVM_ADD_FILELINE
)
.
set_num_inputs
(
2
)
.
add_argument
(
"x"
,
"1D Tensor"
,
"Predictions."
)
.
add_argument
(
"y"
,
"1D Tensor"
,
"Targets."
)
.
set_support_level
(
10
)
.
add_type_rel
(
"CrossEntropy"
,
CrossEntropyRel
);
}
// namespace relay
}
// namespace tvm
tests/python/relay/test_op_grad_level10.py
View file @
1ad6a2af
...
...
@@ -14,15 +14,23 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import
pytest
from
tvm
import
relay
from
tvm.relay.testing
import
check_grad
def
test_cross_entropy_grad
():
x
=
relay
.
var
(
"x"
,
shape
=
(
1
,
5
))
y
=
relay
.
var
(
"y"
,
shape
=
(
1
,
5
))
x
=
relay
.
var
(
"x"
,
shape
=
(
2
,
5
))
y
=
relay
.
var
(
"y"
,
shape
=
(
2
,
5
))
check_grad
(
relay
.
Function
([
x
,
y
],
relay
.
op
.
nn
.
cross_entropy
(
x
,
y
)),
eps
=
0.01
,
scale
=
0.1
,
mean
=
1
)
def
test_cross_entropy_with_logits_grad
():
x
=
relay
.
var
(
"x"
,
shape
=
(
2
,
5
))
y
=
relay
.
var
(
"y"
,
shape
=
(
2
,
5
))
check_grad
(
relay
.
Function
([
x
,
y
],
relay
.
op
.
nn
.
cross_entropy_with_logits
(
x
,
y
)),
eps
=
0.01
,
scale
=
0.1
,
mean
=
1
)
if
__name__
==
"__main__"
:
test_cross_entropy_grad
(
)
pytest
.
main
([
__file__
]
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment