Commit b5ff88b0 by Siju Committed by Tianqi Chen

[Darknet] softmax temperature in frontend (#1429)

parent 3b476ffe
......@@ -260,6 +260,9 @@ def _darknet_reshape(inputs, attrs):
def _darknet_softmax_output(inputs, attrs):
"""Process the softmax operation."""
temperature = attrs.get('temperature', 1)
if temperature != 1:
inputs[0] = inputs[0] / float(temperature)
op_name, new_attrs = 'softmax', {}
if _darknet_parse_bool_str(attrs, 'multi_output'):
new_attrs['axis'] = 1
......@@ -529,6 +532,8 @@ def _get_darknet_attrs(net, layer_num):
elif LAYERTYPE.SOFTMAX == layer.type:
attr.update({'axis' : 1})
attr.update({'use_flatten' : True})
if layer.temperature:
attr.update({'temperature' : str(layer.temperature)})
elif LAYERTYPE.SHORTCUT == layer.type:
add_layer = net.layers[layer.index]
......
......@@ -267,6 +267,28 @@ def test_forward_elu():
test_forward(net)
LIB.free_network(net)
def test_forward_softmax():
'''test softmax layer'''
net = LIB.make_network(1)
layer_1 = LIB.make_softmax_layer(1, 75, 1)
layer_1.temperature=1
net.layers[0] = layer_1
net.w = net.h = 5
LIB.resize_network(net, net.w, net.h)
test_forward(net)
LIB.free_network(net)
def test_forward_softmax_temperature():
'''test softmax layer'''
net = LIB.make_network(1)
layer_1 = LIB.make_softmax_layer(1, 75, 1)
layer_1.temperature=0.8
net.layers[0] = layer_1
net.w = net.h = 5
LIB.resize_network(net, net.w, net.h)
test_forward(net)
LIB.free_network(net)
if __name__ == '__main__':
test_forward_resnet50()
test_forward_alexnet()
......@@ -279,6 +301,8 @@ if __name__ == '__main__':
test_forward_shortcut()
test_forward_dense()
test_forward_dense_batchnorm()
test_forward_softmax()
test_forward_softmax_temperature()
test_forward_reorg()
test_forward_region()
test_forward_elu()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment