diff --git a/merlin/models/tokenizers/__init__.py b/merlin/models/tokenizers/__init__.py index 5fac9f3f54..d955878e32 100644 --- a/merlin/models/tokenizers/__init__.py +++ b/merlin/models/tokenizers/__init__.py @@ -1,2 +1,2 @@ -from merlin.models.tokenizers.tokenizer import Tokenizer # noqa: F401 from merlin.models.tokenizers.sentencepiece import SentencePieceTokenizer # noqa: F401 +from merlin.models.tokenizers.tokenizer import Tokenizer # noqa: F401 diff --git a/merlin/models/tokenizers/sentencepiece.py b/merlin/models/tokenizers/sentencepiece.py index d886bec916..436d1484b8 100644 --- a/merlin/models/tokenizers/sentencepiece.py +++ b/merlin/models/tokenizers/sentencepiece.py @@ -11,7 +11,7 @@ class SentencePieceTokenizer(Tokenizer): [1] https://github.com/google/sentencepiece """ - def __init__(self, *, processor: "SentencePieceTrainer") -> None: + def __init__(self, *, processor: "SentencePieceTrainer") -> None: # noqa: F821 require_sentencepiece() self.processor = processor