Sfoglia il codice sorgente

fix: caclucate tokens correctly for *0613 models

master
lanvent 1 anno fa
parent
commit
3aa2e6a04d
1 ha cambiato i file con 2 aggiunte e 2 eliminazioni
  1. +2
    -2
      bot/chatgpt/chat_gpt_session.py

+ 2
- 2
bot/chatgpt/chat_gpt_session.py Vedi File

@@ -57,9 +57,9 @@ def num_tokens_from_messages(messages, model):
"""Returns the number of tokens used by a list of messages."""
import tiktoken

if model in ["gpt-3.5-turbo-0301", "gpt-3.5-turbo-0613", "gpt-35-turbo", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613"]:
if model in ["gpt-3.5-turbo-0301", "gpt-35-turbo"]:
return num_tokens_from_messages(messages, model="gpt-3.5-turbo")
elif model in ["gpt-4-0314", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613"]:
elif model in ["gpt-4-0314", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613"]:
return num_tokens_from_messages(messages, model="gpt-4")

try:


Loading…
Annulla
Salva