fix: 修复openai计算token错误
This commit is contained in:
parent
b0931c8612
commit
3d1b3ea8d5
@ -46,14 +46,14 @@ class OpenAIChatModel(MaxKBBaseModel, ChatOpenAI):
|
||||
|
||||
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
|
||||
try:
|
||||
super().get_num_tokens_from_messages(messages)
|
||||
return super().get_num_tokens_from_messages(messages)
|
||||
except Exception as e:
|
||||
tokenizer = TokenizerManage.get_tokenizer()
|
||||
return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages])
|
||||
|
||||
def get_num_tokens(self, text: str) -> int:
|
||||
try:
|
||||
super().get_num_tokens(text)
|
||||
return super().get_num_tokens(text)
|
||||
except Exception as e:
|
||||
tokenizer = TokenizerManage.get_tokenizer()
|
||||
return len(tokenizer.encode(text))
|
||||
|
||||
Loading…
Reference in New Issue
Block a user