fix: 修复openai计算token错误

This commit is contained in:
shaohuzhang1 2024-08-29 17:21:19 +08:00 committed by shaohuzhang1
parent b0931c8612
commit 3d1b3ea8d5

View File

@ -46,14 +46,14 @@ class OpenAIChatModel(MaxKBBaseModel, ChatOpenAI):
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
try:
super().get_num_tokens_from_messages(messages)
return super().get_num_tokens_from_messages(messages)
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages])
def get_num_tokens(self, text: str) -> int:
try:
super().get_num_tokens(text)
return super().get_num_tokens(text)
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return len(tokenizer.encode(text))