Skip to content

Commit

Permalink
Better default serving output error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
Rocketknight1 committed May 17, 2023
1 parent bb3d9e5 commit 7fd9a88
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/transformers/modeling_tf_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1287,10 +1287,10 @@ def serving_output(self, output):
and getattr(self.config, "add_cross_attention", False)
):
output[key] = None
if output[key] is not None:
if isinstance(output[key], (tuple, list)):
try:
output[key] = tf.convert_to_tensor(output[key])
except ValueError:
except (ValueError, tf.errors.InvalidArgumentError):
pass # Layers may not have the same dimensions
return output

Expand Down
61 changes: 61 additions & 0 deletions src/transformers/models/funnel/modeling_tf_funnel.py
Original file line number Diff line number Diff line change
Expand Up @@ -1129,6 +1129,15 @@ def call(
training=training,
)

def serving_output(self, output):
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFBaseModelOutput(
last_hidden_state=output.last_hidden_state,
hidden_states=output.hidden_states,
attentions=output.attentions,
)


@add_start_docstrings(
"The bare Funnel Transformer Model transformer outputting raw hidden-states without any specific head on top.",
Expand Down Expand Up @@ -1168,6 +1177,15 @@ def call(
training=training,
)

def serving_output(self, output):
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFBaseModelOutput(
last_hidden_state=output.last_hidden_state,
hidden_states=output.hidden_states,
attentions=output.attentions,
)


@add_start_docstrings(
"""
Expand Down Expand Up @@ -1234,6 +1252,13 @@ def call(
attentions=discriminator_hidden_states.attentions,
)

def serving_output(self, output):
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFFunnelForPreTrainingOutput(
logits=output.logits, hidden_states=output.hidden_states, attentions=output.attentions
)


@add_start_docstrings("""Funnel Model with a `language modeling` head on top.""", FUNNEL_START_DOCSTRING)
class TFFunnelForMaskedLM(TFFunnelPreTrainedModel, TFMaskedLanguageModelingLoss):
Expand Down Expand Up @@ -1301,6 +1326,11 @@ def call(
attentions=outputs.attentions,
)

def serving_output(self, output: TFMaskedLMOutput) -> TFMaskedLMOutput:
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFMaskedLMOutput(logits=output.logits, hidden_states=output.hidden_states, attentions=output.attentions)


@add_start_docstrings(
"""
Expand Down Expand Up @@ -1369,6 +1399,13 @@ def call(
attentions=outputs.attentions,
)

def serving_output(self, output: TFSequenceClassifierOutput) -> TFSequenceClassifierOutput:
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFSequenceClassifierOutput(
logits=output.logits, hidden_states=output.hidden_states, attentions=output.attentions
)


@add_start_docstrings(
"""
Expand Down Expand Up @@ -1453,6 +1490,13 @@ def call(
attentions=outputs.attentions,
)

def serving_output(self, output: TFMultipleChoiceModelOutput) -> TFMultipleChoiceModelOutput:
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFMultipleChoiceModelOutput(
logits=output.logits, hidden_states=output.hidden_states, attentions=output.attentions
)


@add_start_docstrings(
"""
Expand Down Expand Up @@ -1523,6 +1567,13 @@ def call(
attentions=outputs.attentions,
)

def serving_output(self, output: TFTokenClassifierOutput) -> TFTokenClassifierOutput:
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFTokenClassifierOutput(
logits=output.logits, hidden_states=output.hidden_states, attentions=output.attentions
)


@add_start_docstrings(
"""
Expand Down Expand Up @@ -1605,3 +1656,13 @@ def call(
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)

def serving_output(self, output: TFQuestionAnsweringModelOutput) -> TFQuestionAnsweringModelOutput:
# hidden_states and attentions not converted to Tensor with tf.convert_to_tensor as they are all of
# different dimensions
return TFQuestionAnsweringModelOutput(
start_logits=output.start_logits,
end_logits=output.end_logits,
hidden_states=output.hidden_states,
attentions=output.attentions,
)

0 comments on commit 7fd9a88

Please sign in to comment.