From beab770c8f2c33be23159a041aa40515ac658407 Mon Sep 17 00:00:00 2001 From: hankcs Date: Thu, 21 Dec 2023 18:19:23 -0800 Subject: [PATCH] Mirror tokenizers from our file servers fix: https://github.com/hankcs/HanLP/issues/1865 --- hanlp/components/parsers/biaffine/biaffine_dep.py | 4 ++-- hanlp/version.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hanlp/components/parsers/biaffine/biaffine_dep.py b/hanlp/components/parsers/biaffine/biaffine_dep.py index 366eded8f..5870eb313 100644 --- a/hanlp/components/parsers/biaffine/biaffine_dep.py +++ b/hanlp/components/parsers/biaffine/biaffine_dep.py @@ -5,7 +5,7 @@ from collections import Counter from typing import Union, Any, List -from hanlp.layers.transformers.pt_imports import AutoTokenizer, PreTrainedTokenizer, AutoModel_ +from hanlp.layers.transformers.pt_imports import PreTrainedTokenizer, AutoModel_, AutoTokenizer_ import torch from hanlp.utils.torch_util import lengths_to_mask from torch import nn @@ -282,7 +282,7 @@ def build_optimizer(self, epochs, trn, gradient_accumulation, **kwargs): def build_transformer_tokenizer(self): transformer = self.config.transformer if transformer: - transformer_tokenizer: PreTrainedTokenizer = AutoTokenizer.from_pretrained(transformer, use_fast=True) + transformer_tokenizer: PreTrainedTokenizer = AutoTokenizer_.from_pretrained(transformer, use_fast=True) else: transformer_tokenizer = None self.transformer_tokenizer = transformer_tokenizer diff --git a/hanlp/version.py b/hanlp/version.py index 2461aca7b..d3a74c0d7 100644 --- a/hanlp/version.py +++ b/hanlp/version.py @@ -2,7 +2,7 @@ # Author: hankcs # Date: 2019-12-28 19:26 -__version__ = '2.1.0-beta.54' +__version__ = '2.1.0-beta.55' """HanLP version"""