From 3aa2e6a04d02613a49cdf362163dea1c42dcc658 Mon Sep 17 00:00:00 2001 From: lanvent Date: Fri, 16 Jun 2023 00:51:29 +0800 Subject: [PATCH] fix: caclucate tokens correctly for *0613 models --- bot/chatgpt/chat_gpt_session.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/chatgpt/chat_gpt_session.py b/bot/chatgpt/chat_gpt_session.py index c4afa8c..3c2657c 100644 --- a/bot/chatgpt/chat_gpt_session.py +++ b/bot/chatgpt/chat_gpt_session.py @@ -57,9 +57,9 @@ def num_tokens_from_messages(messages, model): """Returns the number of tokens used by a list of messages.""" import tiktoken - if model in ["gpt-3.5-turbo-0301", "gpt-3.5-turbo-0613", "gpt-35-turbo", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613"]: + if model in ["gpt-3.5-turbo-0301", "gpt-35-turbo"]: return num_tokens_from_messages(messages, model="gpt-3.5-turbo") - elif model in ["gpt-4-0314", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613"]: + elif model in ["gpt-4-0314", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613"]: return num_tokens_from_messages(messages, model="gpt-4") try: