Skip to content

Commit

Permalink
unified error handling in nnvm and relay
Browse files Browse the repository at this point in the history
  • Loading branch information
markrogersjr committed Mar 15, 2019
1 parent f8ac138 commit d6521af
Show file tree
Hide file tree
Showing 18 changed files with 341 additions and 355 deletions.
7 changes: 7 additions & 0 deletions nnvm/python/nnvm/frontend/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,10 @@
from .darknet import from_darknet
from .tensorflow import from_tensorflow
from .caffe2 import from_caffe2
from .common import raise_not_supported, get_nnvm_op, required_attr, \
warn_not_used, parse_tshape, parse_bool_str
from tvm.error_handling import raise_attribute_required, \
raise_attribute_invalid, \
raise_operator_unimplemented, \
raise_attribute_unimplemented, \
warn_not_used
9 changes: 3 additions & 6 deletions nnvm/python/nnvm/frontend/caffe2.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@ def get_converter(cls):

if hasattr(cls, '_impl'):
return getattr(cls, '_impl')
raise NotImplementedError('{} not implemented'.format(
cls.__name__))
raise_operator_unimplemented(cls.__name__)


_caffe2_internal_args = {
Expand Down Expand Up @@ -176,8 +175,7 @@ def _get_axis_from_order_str(order):
return 1
if order == 'NHWC':
return 3
raise RuntimeError(
"Unsupported storage order: {} in caffe2".format(order))
raise_attribute_invalid(order, 'storage order', 'concat')

return AttrCvt(
op_name='concatenate',
Expand Down Expand Up @@ -427,8 +425,7 @@ def _convert_operator(self,
# Add a sanitizing step to convert all byte strings in args to strings
sym = convert_map[op_type](inputs, args, self._params)
else:
raise NotImplementedError(
"Operator {} not implemented.".format(op_type))
raise_operator_unimplemented(op_type)
return sym


Expand Down
16 changes: 15 additions & 1 deletion nnvm/python/nnvm/frontend/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,23 @@
def get_nnvm_op(op_name):
op = getattr(_sym, op_name)
if not op:
raise RuntimeError("Unable to map op_name {} to nnvm.sym".format(op_name))
raise_operator_unimplemented(op_name)
return op

def required_attr(attr, key, op_name):
assert isinstance(attr, dict)
if key not in attr:
raise_attribute_required(key, op_name)
return attr[key]

def parse_tshape(tshape):
"""Parse tshape in string."""
return [int(x.strip()) for x in tshape.strip('()').split(',')]

def parse_bool_str(attr, key, default='False'):
"""Parse bool string to boolean."""
return attr.get(key, default).strip().lower() in ['true', '1', 't', 'y', 'yes']

class Renamer(object):
"""A simply renamer for operators.
Expand Down
23 changes: 12 additions & 11 deletions nnvm/python/nnvm/frontend/coreml.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def BatchnormLayerParams(op, insym, symtab):
"""Get layer of batchnorm parameter"""
# this changes the symbol
if op.instanceNormalization:
raise NotImplementedError("instance normalization not implemented")
raise_operator_unimplemented('instance normalization')
else:
params = {'gamma':symtab.new_const(list(op.gamma.floatValue)),
'beta':symtab.new_const(list(op.beta.floatValue)),
Expand Down Expand Up @@ -136,7 +136,7 @@ def ActivationParams(op, insym, symtab):
betasym = symtab.new_const(beta)
return _sym.broadcast_mul(_sym.log(_sym.broadcast_add(
_sym.exp(insym), betasym)), alphasym)
raise NotImplementedError('%s not implemented' % whichActivation)
raise_operator_unimplemented(whichActivation)

def ScaleLayerParams(op, insym, symtab):
"""Scale layer params."""
Expand All @@ -158,7 +158,7 @@ def PoolingLayerParams(op, insym, symtab):
return _sym.global_max_pool2d(insym)
if op.type == 1:
return _sym.global_avg_pool2d(insym)
raise NotImplementedError("Only max and average pooling implemented")
raise_operator_unimplemented('pooling (not max or average)')

else:
params = {'pool_size':list(op.kernelSize),
Expand All @@ -178,7 +178,8 @@ def PoolingLayerParams(op, insym, symtab):
params['padding'] = padding
params['ceil_mode'] = True
else:
raise NotImplementedError("Other convolution padding not implemented")
raise_attribute_invalid(op.WhichOneof('PoolingPaddingType'),
'PoolingPaddingType', 'pooling')

# consume padding layer
if symtab.in_padding:
Expand All @@ -190,7 +191,7 @@ def PoolingLayerParams(op, insym, symtab):
return _sym.max_pool2d(insym, **params)
if op.type == 1:
return _sym.avg_pool2d(insym, **params)
raise NotImplementedError("Only max and average pooling implemented")
raise_operator_unimplemented('pooling (not max or average)')

def SoftmaxLayerParams(op, insym, symtab):
return _sym.softmax(_sym.flatten(insym))
Expand Down Expand Up @@ -229,7 +230,7 @@ def ConcatLayerParams(op, insyms, symtab):
if not isinstance(insyms, list):
insyms = [insyms]
if op.sequenceConcat:
raise NotImplementedError("Sequence Concat not supported")
raise_operator_unimplemented('sequence concat')
ret = _sym.concatenate(*insyms, axis=1)
return ret

Expand All @@ -243,14 +244,14 @@ def PaddingLayerParams(op, insym, symtab):
if op.WhichOneof('PaddingType') == 'constant':
constant = op.constant
if constant.value != 0:
raise NotImplementedError("Padding value {} not supported.".format(constant.value))
raise_attribute_invalid(constant.value, 'padding value', 'padding')
padding = [b.startEdgeSize for b in op.paddingAmounts.borderAmounts]
padding2 = [b.endEdgeSize for b in op.paddingAmounts.borderAmounts]
for i, j in zip(padding, padding2):
assert i == j
symtab.set_padding(padding)
else:
raise NotImplementedError("Only constant padding is supported now.")
raise_operator_unimplemented('non-constant padding')
return insym

def PermuteLayerParams(op, insym, symtab):
Expand All @@ -259,8 +260,8 @@ def PermuteLayerParams(op, insym, symtab):

def UpsampleLayerParams(op, insym, symtab):
if op.scalingFactor[0] != op.scalingFactor[1]:
raise NotImplementedError("Upsampling only supported with same \
height and width scaling factor.")
raise_attribute_invalid(op.scalingFactor, 'scaling factors',
'upsample')
interpolationMode = 'NEAREST_NEIGHBOR' if op.mode == 0 else 'BILINEAR'
return _sym.upsampling(insym, scale=op.scalingFactor[0], method=interpolationMode)

Expand Down Expand Up @@ -341,7 +342,7 @@ def coreml_op_to_nnvm(op, inname, outname, symtab):
"""
classname = type(op).__name__
if classname not in _convert_map:
raise NotImplementedError("%s is not supported" % (classname))
raise_operator_unimplemented(classname)
if isinstance(inname, string_types):
insym = symtab.get_var(inname)
else:
Expand Down
Loading

0 comments on commit d6521af

Please sign in to comment.