chat_gpt_bot.py 6.6KB

2 anos atrás
2 anos atrás
1 ano atrás
1 ano atrás
1 ano atrás
1 ano atrás
1 ano atrás
1 ano atrás
1 ano atrás
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144
  1. # encoding:utf-8
  2. from bot.bot import Bot
  3. from bot.chatgpt.chat_gpt_session import ChatGPTSession
  4. from bot.openai.open_ai_image import OpenAIImage
  5. from bot.session_manager import Session, SessionManager
  6. from bridge.context import ContextType
  7. from bridge.reply import Reply, ReplyType
  8. from config import conf, load_config
  9. from common.log import logger
  10. from common.token_bucket import TokenBucket
  11. from common.expired_dict import ExpiredDict
  12. import openai
  13. import time
  14. # OpenAI对话模型API (可用)
  15. class ChatGPTBot(Bot,OpenAIImage):
  16. def __init__(self):
  17. super().__init__()
  18. openai.api_key = conf().get('open_ai_api_key')
  19. if conf().get('open_ai_api_base'):
  20. openai.api_base = conf().get('open_ai_api_base')
  21. proxy = conf().get('proxy')
  22. if proxy:
  23. openai.proxy = proxy
  24. if conf().get('rate_limit_chatgpt'):
  25. self.tb4chatgpt = TokenBucket(conf().get('rate_limit_chatgpt', 20))
  26. self.sessions = SessionManager(ChatGPTSession, model= conf().get("model") or "gpt-3.5-turbo")
  27. def reply(self, query, context=None):
  28. # acquire reply content
  29. if context.type == ContextType.TEXT:
  30. logger.info("[OPEN_AI] query={}".format(query))
  31. session_id = context['session_id']
  32. reply = None
  33. clear_memory_commands = conf().get('clear_memory_commands', ['#清除记忆'])
  34. if query in clear_memory_commands:
  35. self.sessions.clear_session(session_id)
  36. reply = Reply(ReplyType.INFO, '记忆已清除')
  37. elif query == '#清除所有':
  38. self.sessions.clear_all_session()
  39. reply = Reply(ReplyType.INFO, '所有人记忆已清除')
  40. elif query == '#更新配置':
  41. load_config()
  42. reply = Reply(ReplyType.INFO, '配置已更新')
  43. if reply:
  44. return reply
  45. session = self.sessions.session_query(query, session_id)
  46. logger.debug("[OPEN_AI] session query={}".format(session.messages))
  47. # if context.get('stream'):
  48. # # reply in stream
  49. # return self.reply_text_stream(query, new_query, session_id)
  50. reply_content = self.reply_text(session, session_id, 0)
  51. logger.debug("[OPEN_AI] new_query={}, session_id={}, reply_cont={}, completion_tokens={}".format(session.messages, session_id, reply_content["content"], reply_content["completion_tokens"]))
  52. if reply_content['completion_tokens'] == 0 and len(reply_content['content']) > 0:
  53. reply = Reply(ReplyType.ERROR, reply_content['content'])
  54. elif reply_content["completion_tokens"] > 0:
  55. self.sessions.session_reply(reply_content["content"], session_id, reply_content["total_tokens"])
  56. reply = Reply(ReplyType.TEXT, reply_content["content"])
  57. else:
  58. reply = Reply(ReplyType.ERROR, reply_content['content'])
  59. logger.debug("[OPEN_AI] reply {} used 0 tokens.".format(reply_content))
  60. return reply
  61. elif context.type == ContextType.IMAGE_CREATE:
  62. ok, retstring = self.create_img(query, 0)
  63. reply = None
  64. if ok:
  65. reply = Reply(ReplyType.IMAGE_URL, retstring)
  66. else:
  67. reply = Reply(ReplyType.ERROR, retstring)
  68. return reply
  69. else:
  70. reply = Reply(ReplyType.ERROR, 'Bot不支持处理{}类型的消息'.format(context.type))
  71. return reply
  72. def compose_args(self):
  73. return {
  74. "model": conf().get("model") or "gpt-3.5-turbo", # 对话模型的名称
  75. "temperature":conf().get('temperature', 0.9), # 值在[0,1]之间,越大表示回复越具有不确定性
  76. # "max_tokens":4096, # 回复最大的字符数
  77. "top_p":1,
  78. "frequency_penalty":conf().get('frequency_penalty', 0.0), # [-2,2]之间,该值越大则更倾向于产生不同的内容
  79. "presence_penalty":conf().get('presence_penalty', 0.0), # [-2,2]之间,该值越大则更倾向于产生不同的内容
  80. }
  81. def reply_text(self, session:ChatGPTSession, session_id, retry_count=0) -> dict:
  82. '''
  83. call openai's ChatCompletion to get the answer
  84. :param session: a conversation session
  85. :param session_id: session id
  86. :param retry_count: retry count
  87. :return: {}
  88. '''
  89. try:
  90. if conf().get('rate_limit_chatgpt') and not self.tb4chatgpt.get_token():
  91. return {"completion_tokens": 0, "content": "提问太快啦,请休息一下再问我吧"}
  92. response = openai.ChatCompletion.create(
  93. messages=session.messages, **self.compose_args()
  94. )
  95. # logger.info("[ChatGPT] reply={}, total_tokens={}".format(response.choices[0]['message']['content'], response["usage"]["total_tokens"]))
  96. return {"total_tokens": response["usage"]["total_tokens"],
  97. "completion_tokens": response["usage"]["completion_tokens"],
  98. "content": response.choices[0]['message']['content']}
  99. except openai.error.RateLimitError as e:
  100. # rate limit exception
  101. logger.warn(e)
  102. if retry_count < 1:
  103. time.sleep(5)
  104. logger.warn("[OPEN_AI] RateLimit exceed, 第{}次重试".format(retry_count+1))
  105. return self.reply_text(session, session_id, retry_count+1)
  106. else:
  107. return {"completion_tokens": 0, "content": "提问太快啦,请休息一下再问我吧"}
  108. except openai.error.APIConnectionError as e:
  109. # api connection exception
  110. logger.warn(e)
  111. logger.warn("[OPEN_AI] APIConnection failed")
  112. return {"completion_tokens": 0, "content": "我连接不到你的网络"}
  113. except openai.error.Timeout as e:
  114. logger.warn(e)
  115. logger.warn("[OPEN_AI] Timeout")
  116. return {"completion_tokens": 0, "content": "我没有收到你的消息"}
  117. except Exception as e:
  118. # unknown exception
  119. logger.exception(e)
  120. self.sessions.clear_session(session_id)
  121. return {"completion_tokens": 0, "content": "请再问我一次吧"}
  122. class AzureChatGPTBot(ChatGPTBot):
  123. def __init__(self):
  124. super().__init__()
  125. openai.api_type = "azure"
  126. openai.api_version = "2023-03-15-preview"
  127. def compose_args(self):
  128. args = super().compose_args()
  129. args["engine"] = args["model"]
  130. del(args["model"])
  131. return args