From a6e9161045ac5eb3092df4a34ab00e930dc1c4e9 Mon Sep 17 00:00:00 2001 From: Ting Date: Sat, 5 Jul 2025 08:19:19 +0800 Subject: [PATCH] fix bug. (#2718) --- fastdeploy/input/ernie_tokenizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fastdeploy/input/ernie_tokenizer.py b/fastdeploy/input/ernie_tokenizer.py index 68915399b..f51c730fd 100644 --- a/fastdeploy/input/ernie_tokenizer.py +++ b/fastdeploy/input/ernie_tokenizer.py @@ -143,7 +143,7 @@ class ErnieBotTokenizer(PretrainedTokenizer): def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (string) in a single string.""" - spec_init() + self.spec_init() current_sub_tokens = [] out_string = "" # prev_is_special = False @@ -216,7 +216,7 @@ class ErnieBotTokenizer(PretrainedTokenizer): # if isinstance(t, AddedToken) # ) - spec_init() + self.spec_init() text, kwargs = self.prepare_for_tokenization(text, **kwargs) # TODO: should this be in the base class?