Commit 9f6e0c59 by Pariksheet Pinjari Committed by Tianqi Chen

Fixed issue #483, removing enum dependancy (#485)

parent a349f5ec
...@@ -3,12 +3,11 @@ DarkNet symbol frontend. ...@@ -3,12 +3,11 @@ DarkNet symbol frontend.
""" """
from __future__ import absolute_import as _abs from __future__ import absolute_import as _abs
from enum import IntEnum
import numpy as np import numpy as np
import tvm import tvm
from .. import symbol as _sym from .. import symbol as _sym
class LAYERTYPE(IntEnum): class LAYERTYPE(object):
"""Darknet LAYERTYPE Class constant.""" """Darknet LAYERTYPE Class constant."""
CONVOLUTIONAL = 0 CONVOLUTIONAL = 0
DECONVOLUTIONAL = 1 DECONVOLUTIONAL = 1
...@@ -36,7 +35,7 @@ class LAYERTYPE(IntEnum): ...@@ -36,7 +35,7 @@ class LAYERTYPE(IntEnum):
REORG = 23 REORG = 23
BLANK = 24 BLANK = 24
class ACTIVATION(IntEnum): class ACTIVATION(object):
"""Darknet ACTIVATION Class constant.""" """Darknet ACTIVATION Class constant."""
LOGISTIC = 0 LOGISTIC = 0
RELU = 1 RELU = 1
...@@ -323,33 +322,31 @@ def _darknet_op_not_support(inputs, attrs): ...@@ -323,33 +322,31 @@ def _darknet_op_not_support(inputs, attrs):
raise NotImplementedError(err) raise NotImplementedError(err)
_DARKNET_CONVERT_MAP = { _DARKNET_CONVERT_MAP = {
'CONVOLUTIONAL' : _darknet_conv2d, LAYERTYPE.CONVOLUTIONAL : _darknet_conv2d,
'DECONVOLUTIONAL' : _darknet_conv2d_transpose, LAYERTYPE.DECONVOLUTIONAL : _darknet_conv2d_transpose,
'CONNECTED' : _darknet_dense, LAYERTYPE.CONNECTED : _darknet_dense,
'MAXPOOL' : _darknet_maxpooling, LAYERTYPE.MAXPOOL : _darknet_maxpooling,
'SOFTMAX' : _darknet_softmax_output, LAYERTYPE.SOFTMAX : _darknet_softmax_output,
'DROPOUT' : _darknet_dropout, LAYERTYPE.DROPOUT : _darknet_dropout,
'AVGPOOL' : _darknet_avgpooling, LAYERTYPE.AVGPOOL : _darknet_avgpooling,
'BATCHNORM' : _darknet_batch_norm, LAYERTYPE.BATCHNORM : _darknet_batch_norm,
'RESHAPE' : _darknet_reshape, LAYERTYPE.ROUTE : _darknet_route,
'ROUTE' : _darknet_route, LAYERTYPE.REORG : _darknet_reorg,
'REORG' : _darknet_reorg, LAYERTYPE.REGION : _darknet_region,
'REGION' : _darknet_region, LAYERTYPE.SHORTCUT : _darknet_shortcut,
'ACTIVATION' : _darknet_activations, LAYERTYPE.DETECTION : _darknet_op_not_support,
'SHORTCUT' : _darknet_shortcut, LAYERTYPE.CROP : _darknet_op_not_support,
'DETECTION' : _darknet_op_not_support, LAYERTYPE.COST : _darknet_op_not_support,
'CROP' : _darknet_op_not_support, LAYERTYPE.NORMALIZATION : _darknet_op_not_support,
'COST' : _darknet_op_not_support, LAYERTYPE.LOCAL : _darknet_op_not_support,
'NORMALIZATION' : _darknet_op_not_support, LAYERTYPE.ACTIVE : _darknet_op_not_support,
'LOCAL' : _darknet_op_not_support, LAYERTYPE.RNN : _darknet_op_not_support,
'ACTIVE' : _darknet_op_not_support, LAYERTYPE.GRU : _darknet_op_not_support,
'RNN' : _darknet_op_not_support, LAYERTYPE.LSTM : _darknet_op_not_support,
'GRU' : _darknet_op_not_support, LAYERTYPE.CRNN : _darknet_op_not_support,
'LSTM' : _darknet_op_not_support, LAYERTYPE.NETWORK : _darknet_op_not_support,
'CRNN' : _darknet_op_not_support, LAYERTYPE.XNOR : _darknet_op_not_support,
'NETWORK' : _darknet_op_not_support, LAYERTYPE.BLANK : _darknet_op_not_support,
'XNOR' : _darknet_op_not_support,
'BLANK' : _darknet_op_not_support,
} }
def _darknet_convert_symbol(op_name, inputs, attrs): def _darknet_convert_symbol(op_name, inputs, attrs):
...@@ -376,7 +373,7 @@ def _darknet_convert_symbol(op_name, inputs, attrs): ...@@ -376,7 +373,7 @@ def _darknet_convert_symbol(op_name, inputs, attrs):
if op_name in _DARKNET_CONVERT_MAP: if op_name in _DARKNET_CONVERT_MAP:
sym, out_name = _DARKNET_CONVERT_MAP[op_name](inputs, attrs) sym, out_name = _DARKNET_CONVERT_MAP[op_name](inputs, attrs)
else: else:
_darknet_raise_not_supported('Operator: ' + op_name) _darknet_raise_not_supported('Operator type ' + str(op_name))
if out_name is None: if out_name is None:
out_name = sym.list_output_names()[0].replace('_output', '') out_name = sym.list_output_names()[0].replace('_output', '')
return out_name, sym return out_name, sym
...@@ -397,10 +394,6 @@ def _read_memory_buffer(shape, data, dtype): ...@@ -397,10 +394,6 @@ def _read_memory_buffer(shape, data, dtype):
data_np[i] = data[i] data_np[i] = data[i]
return data_np.reshape(shape) return data_np.reshape(shape)
def _get_darknet_layername(layer_type):
"""Get the layer name from the darknet enums."""
return str((LAYERTYPE(layer_type))).replace('LAYERTYPE.', '')
def _get_convolution_weights(layer, opname, params, dtype): def _get_convolution_weights(layer, opname, params, dtype):
"""Get the convolution layer weights and biases.""" """Get the convolution layer weights and biases."""
if layer.nweights == 0: if layer.nweights == 0:
...@@ -460,8 +453,6 @@ def _get_darknet_attrs(net, layer_num): ...@@ -460,8 +453,6 @@ def _get_darknet_attrs(net, layer_num):
attr = {} attr = {}
use_flatten = True use_flatten = True
layer = net.layers[layer_num] layer = net.layers[layer_num]
op_name = _get_darknet_layername(layer.type)
if LAYERTYPE.CONVOLUTIONAL == layer.type: if LAYERTYPE.CONVOLUTIONAL == layer.type:
attr.update({'layout' : 'NCHW'}) attr.update({'layout' : 'NCHW'})
attr.update({'pad' : str(layer.pad)}) attr.update({'pad' : str(layer.pad)})
...@@ -551,10 +542,10 @@ def _get_darknet_attrs(net, layer_num): ...@@ -551,10 +542,10 @@ def _get_darknet_attrs(net, layer_num):
attr.update({'background' : layer.background}) attr.update({'background' : layer.background})
attr.update({'softmax' : layer.softmax}) attr.update({'softmax' : layer.softmax})
else: else:
err = "Darknet layer {} is not supported in nnvm.".format(op_name) err = "Darknet layer type {} is not supported in nnvm.".format(layer.type)
raise NotImplementedError(err) raise NotImplementedError(err)
return op_name, attr return layer.type, attr
def _get_tvm_params_name(opname, arg_name): def _get_tvm_params_name(opname, arg_name):
"""Makes the params name for the k,v pair.""" """Makes the params name for the k,v pair."""
......
...@@ -8,7 +8,6 @@ These are utility functions used for testing and tutorial file. ...@@ -8,7 +8,6 @@ These are utility functions used for testing and tutorial file.
""" """
from __future__ import division from __future__ import division
import math import math
from enum import IntEnum
import numpy as np import numpy as np
import cv2 import cv2
from cffi import FFI from cffi import FFI
...@@ -91,7 +90,7 @@ def load_image(image, resize_width, resize_height): ...@@ -91,7 +90,7 @@ def load_image(image, resize_width, resize_height):
img = load_image_color(image) img = load_image_color(image)
return _letterbox_image(img, resize_width, resize_height) return _letterbox_image(img, resize_width, resize_height)
class LAYERTYPE(IntEnum): class LAYERTYPE(object):
"""Darknet LAYERTYPE Class constant.""" """Darknet LAYERTYPE Class constant."""
CONVOLUTIONAL = 0 CONVOLUTIONAL = 0
DECONVOLUTIONAL = 1 DECONVOLUTIONAL = 1
...@@ -119,7 +118,7 @@ class LAYERTYPE(IntEnum): ...@@ -119,7 +118,7 @@ class LAYERTYPE(IntEnum):
REORG = 23 REORG = 23
BLANK = 24 BLANK = 24
class ACTIVATION(IntEnum): class ACTIVATION(object):
"""Darknet ACTIVATION Class constant.""" """Darknet ACTIVATION Class constant."""
LOGISTIC = 0 LOGISTIC = 0
RELU = 1 RELU = 1
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment