From fb3da1f021be8799902f5055753065aa314ed49a Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Sat, 8 Apr 2023 23:26:38 +0000 Subject: [PATCH] Make temp style --- .../modeling_tf_efficientformer.py | 27 +++++++------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/src/transformers/models/efficientformer/modeling_tf_efficientformer.py b/src/transformers/models/efficientformer/modeling_tf_efficientformer.py index 2bd07e7bceefd5..f204fdfe38ae80 100644 --- a/src/transformers/models/efficientformer/modeling_tf_efficientformer.py +++ b/src/transformers/models/efficientformer/modeling_tf_efficientformer.py @@ -16,36 +16,24 @@ import itertools from dataclasses import dataclass -from multiprocessing import context -from typing import Optional, Tuple, Union +from typing import Tuple import tensorflow as tf -from ...activations_tf import get_tf_activation - -from ...modeling_tf_outputs import ( - TFBaseModelOutput, - TFBaseModelOutputWithPooling, - TFImageClassifierOutput, -) - from ...modeling_tf_utils import ( TFPreTrainedModel, TFSequenceClassificationLoss, get_initializer, - keras_serializable, - unpack_inputs, ) from ...tf_utils import shape_list, stable_softmax from ...utils import ( ModelOutput, - add_code_sample_docstrings, add_start_docstrings, - add_start_docstrings_to_model_forward, logging, ) from .configuration_efficientformer import EfficientFormerConfig + logger = logging.get_logger(__name__) # General docstring @@ -254,20 +242,20 @@ class TFEfficientFormerConvMlp(tf.keras.layers.Layer): pass -# Copied from transformers.models.convnext.modeling_tf_convnext.TFConvNextDropPath +# Copied from transformers.models.convnext.modeling_tf_convnext.TFConvNextDropPath with ConvNext->EfficientFormer class TFEfficientFormerDropPath(tf.keras.layers.Layer): """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). References: (1) github.com:rwightman/pytorch-image-models """ - def __init__(self, drop_prob, **kwargs): + def __init__(self, drop_path, **kwargs): super().__init__(**kwargs) - self.drop_prob = drop_prob + self.drop_path = drop_path def call(self, x, training=None): if training: - keep_prob = 1 - self.drop_prob + keep_prob = 1 - self.drop_path shape = (tf.shape(x)[0],) + (1,) * (len(tf.shape(x)) - 1) random_tensor = keep_prob + tf.random.uniform(shape, 0, 1) random_tensor = tf.floor(random_tensor) @@ -369,9 +357,12 @@ class TFEfficientFormerForImageClassificationWithTeacherOutput(ModelOutput): EfficientFormer Model transformer with image classification heads on top (a linear layer on top of the final hidden state of the [CLS] token and a linear layer on top of the final hidden state of the distillation token) e.g. for ImageNet. + + This model supports inference-only. Fine-tuning with distillation (i.e. with a teacher) is not yet supported. + """, EFFICIENTFORMER_START_DOCSTRING,