Skip to content

Commit

Permalink
Unified error handling in NNVM and Relay frontends (apache#2828)
Browse files Browse the repository at this point in the history
  • Loading branch information
markrogersjr authored and MarisaKirisame committed Apr 9, 2019
1 parent e021565 commit 823c327
Show file tree
Hide file tree
Showing 15 changed files with 471 additions and 371 deletions.
13 changes: 6 additions & 7 deletions nnvm/python/nnvm/frontend/caffe2.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from __future__ import absolute_import as _abs
import tvm
from nnvm import symbol as _sym
from nnvm.frontend.common import get_nnvm_op, Renamer, AttrConverter as AttrCvt
from .common import get_nnvm_op
from .onnx_caffe2_utils import dimension_picker, dimension_constraint, infer_channels, revert_caffe2_pad
from . import onnx

Expand Down Expand Up @@ -73,8 +73,8 @@ def get_converter(cls):

if hasattr(cls, '_impl'):
return getattr(cls, '_impl')
raise NotImplementedError('{} not implemented'.format(
cls.__name__))
raise tvm.error.OpNotImplemented(
'Operator {} is not implemented in frontend Caffe2.'.format(cls.__name__))


_caffe2_internal_args = {
Expand Down Expand Up @@ -176,8 +176,7 @@ def _get_axis_from_order_str(order):
return 1
if order == 'NHWC':
return 3
raise RuntimeError(
"Unsupported storage order: {} in caffe2".format(order))
raise tvm.error.OpAttributeInvalid('Value {} in attribute {} of operator {} is not valid.'.format(order, 'order', 'Concat'))

return AttrCvt(
op_name='concatenate',
Expand Down Expand Up @@ -427,8 +426,8 @@ def _convert_operator(self,
# Add a sanitizing step to convert all byte strings in args to strings
sym = convert_map[op_type](inputs, args, self._params)
else:
raise NotImplementedError(
"Operator {} not implemented.".format(op_type))
raise tvm.error.OpNotImplemented(
'Operator {} is not supported in frontend Caffe2.'.format(op_type))
return sym


Expand Down
18 changes: 17 additions & 1 deletion nnvm/python/nnvm/frontend/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,25 @@
def get_nnvm_op(op_name):
op = getattr(_sym, op_name)
if not op:
raise RuntimeError("Unable to map op_name {} to nnvm.sym".format(op_name))
raise OpNotImplemented(
'Operator {} is not supported.'.format(op))
return op

def required_attr(attr, key, op_name):
assert isinstance(attr, dict)
if key not in attr:
raise OpAttributeRequired(
'Required attribute {} not found in operator {}'.format(key, op_name))
return attr[key]

def parse_tshape(tshape):
"""Parse tshape in string."""
return [int(x.strip()) for x in tshape.strip('()').split(',')]

def parse_bool_str(attr, key, default='False'):
"""Parse bool string to boolean."""
return attr.get(key, default).strip().lower() in ['true', '1', 't', 'y', 'yes']

class Renamer(object):
"""A simply renamer for operators.
Expand Down
34 changes: 21 additions & 13 deletions nnvm/python/nnvm/frontend/coreml.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,10 @@
"""CoreML frontend."""
from __future__ import absolute_import as _abs
import numpy as np

import tvm
from .common import SymbolTable
from .. import symbol as _sym
from .._base import string_types
from .common import SymbolTable

__all__ = ['from_coreml']

Expand Down Expand Up @@ -83,7 +82,8 @@ def BatchnormLayerParams(op, insym, symtab):
"""Get layer of batchnorm parameter"""
# this changes the symbol
if op.instanceNormalization:
raise NotImplementedError("instance normalization not implemented")
msg = 'Operator "instance normalization" is not supported in frontend CoreML.'
raise tvm.error.OpNotImplemented(msg)
else:
params = {'gamma':symtab.new_const(list(op.gamma.floatValue)),
'beta':symtab.new_const(list(op.beta.floatValue)),
Expand Down Expand Up @@ -136,7 +136,8 @@ def ActivationParams(op, insym, symtab):
betasym = symtab.new_const(beta)
return _sym.broadcast_mul(_sym.log(_sym.broadcast_add(
_sym.exp(insym), betasym)), alphasym)
raise NotImplementedError('%s not implemented' % whichActivation)
raise tvm.error.OpNotImplemented(
'Operator {} is not supported in frontend CoreML.'.format(whichActivation))

def ScaleLayerParams(op, insym, symtab):
"""Scale layer params."""
Expand All @@ -158,7 +159,8 @@ def PoolingLayerParams(op, insym, symtab):
return _sym.global_max_pool2d(insym)
if op.type == 1:
return _sym.global_avg_pool2d(insym)
raise NotImplementedError("Only max and average pooling implemented")
raise tvm.error.OpNotImplemented(
'Operator pooling (not max or average) is not supported in frontend CoreML.')

else:
params = {'pool_size':list(op.kernelSize),
Expand All @@ -178,7 +180,8 @@ def PoolingLayerParams(op, insym, symtab):
params['padding'] = padding
params['ceil_mode'] = True
else:
raise NotImplementedError("Other convolution padding not implemented")
msg = 'Value {} in attribute PoolingPaddingType of operator Pooling is not valid.'
raise tvm.error.OpAttributeInvalid(msg.format(op.WhichOneof('PoolingPaddingType')))

# consume padding layer
if symtab.in_padding:
Expand All @@ -190,7 +193,8 @@ def PoolingLayerParams(op, insym, symtab):
return _sym.max_pool2d(insym, **params)
if op.type == 1:
return _sym.avg_pool2d(insym, **params)
raise NotImplementedError("Only max and average pooling implemented")
msg = 'Operator pooling (not max or average) is not supported in frontend CoreML.'
raise tvm.error.OpNotImplemented(msg)

def SoftmaxLayerParams(op, insym, symtab):
return _sym.softmax(_sym.flatten(insym))
Expand Down Expand Up @@ -229,7 +233,8 @@ def ConcatLayerParams(op, insyms, symtab):
if not isinstance(insyms, list):
insyms = [insyms]
if op.sequenceConcat:
raise NotImplementedError("Sequence Concat not supported")
raise tvm.error.OpNotImplemented(
'Operator Sequence Concat is not supported in frontend CoreML.')
ret = _sym.concatenate(*insyms, axis=1)
return ret

Expand All @@ -243,14 +248,16 @@ def PaddingLayerParams(op, insym, symtab):
if op.WhichOneof('PaddingType') == 'constant':
constant = op.constant
if constant.value != 0:
raise NotImplementedError("Padding value {} not supported.".format(constant.value))
msg = 'Value {} in attribute "padding value" of operator Padding is not valid.'
raise tvm.error.OpAttributeInvalid(msg.format(constant.value))
padding = [b.startEdgeSize for b in op.paddingAmounts.borderAmounts]
padding2 = [b.endEdgeSize for b in op.paddingAmounts.borderAmounts]
for i, j in zip(padding, padding2):
assert i == j
symtab.set_padding(padding)
else:
raise NotImplementedError("Only constant padding is supported now.")
raise tvm.error.OpNotImplemented(
'Operator "non-constant padding" is not supported in frontend CoreML.')
return insym

def PermuteLayerParams(op, insym, symtab):
Expand All @@ -259,8 +266,8 @@ def PermuteLayerParams(op, insym, symtab):

def UpsampleLayerParams(op, insym, symtab):
if op.scalingFactor[0] != op.scalingFactor[1]:
raise NotImplementedError("Upsampling only supported with same \
height and width scaling factor.")
raise tvm.error.OpAttributeInvalid(
'Height and width scaling factors of Upsample operator must be equal.')
interpolationMode = 'NEAREST_NEIGHBOR' if op.mode == 0 else 'BILINEAR'
return _sym.upsampling(insym, scale=op.scalingFactor[0], method=interpolationMode)

Expand Down Expand Up @@ -341,7 +348,8 @@ def coreml_op_to_nnvm(op, inname, outname, symtab):
"""
classname = type(op).__name__
if classname not in _convert_map:
raise NotImplementedError("%s is not supported" % (classname))
raise tvm.error.OpNotImplemented(
'Operator {} is not supported in frontend CoreML.'.format(classname))
if isinstance(inname, string_types):
insym = symtab.get_var(inname)
else:
Expand Down
Loading

0 comments on commit 823c327

Please sign in to comment.