From f98b43514e03fa571bd071e4324e0a55815035a7 Mon Sep 17 00:00:00 2001 From: resphinas <2934218525@qq.com> Date: Mon, 28 Aug 2023 17:18:00 +0800 Subject: [PATCH] claude_bot --- bot/bot_factory.py | 4 + bot/chatgpt/chat_gpt_bot.py | 4 + bot/claude/claude_ai_bot.py | 229 ++++++++++++++++++++++++ bridge/bridge.py | 2 + chatgpt-on-wechat | 1 - chatgpt-on-wechat-master.iml | 12 ++ common/const.py | 5 +- common/log.py | 4 +- config-template.json | 7 +- config.py | 4 +- lib/itchat/async_components/messages.py | 4 + lib/itchat/components/messages.py | 1 + requirements.txt | 4 + 13 files changed, 274 insertions(+), 7 deletions(-) create mode 100644 bot/claude/claude_ai_bot.py delete mode 160000 chatgpt-on-wechat create mode 100644 chatgpt-on-wechat-master.iml diff --git a/bot/bot_factory.py b/bot/bot_factory.py index 513eb78..da12f95 100644 --- a/bot/bot_factory.py +++ b/bot/bot_factory.py @@ -39,4 +39,8 @@ def create_bot(bot_type): elif bot_type == const.LINKAI: from bot.linkai.link_ai_bot import LinkAIBot return LinkAIBot() + + elif bot_type == const.CLAUDEAI: + from bot.claude.claude_ai_bot import ClaudeAIBot + return ClaudeAIBot() raise RuntimeError diff --git a/bot/chatgpt/chat_gpt_bot.py b/bot/chatgpt/chat_gpt_bot.py index 8c9a250..00f83e8 100644 --- a/bot/chatgpt/chat_gpt_bot.py +++ b/bot/chatgpt/chat_gpt_bot.py @@ -106,6 +106,10 @@ class ChatGPTBot(Bot, OpenAIImage): reply = Reply(ReplyType.ERROR, "Bot不支持处理{}类型的消息".format(context.type)) return reply +<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 +======= + +>>>>>>> claude bot def reply_text(self, session: ChatGPTSession, api_key=None, args=None, retry_count=0) -> dict: """ call openai's ChatCompletion to get the answer diff --git a/bot/claude/claude_ai_bot.py b/bot/claude/claude_ai_bot.py new file mode 100644 index 0000000..bb1cbd8 --- /dev/null +++ b/bot/claude/claude_ai_bot.py @@ -0,0 +1,229 @@ +import re +import time +import json +import uuid + +from curl_cffi import requests +from bot.bot import Bot +from bot.chatgpt.chat_gpt_session import ChatGPTSession +from bot.openai.open_ai_image import OpenAIImage +from bot.session_manager import SessionManager +from bridge.context import Context, ContextType +from bridge.reply import Reply, ReplyType +from common.log import logger +from config import conf + + +class ClaudeAIBot(Bot, OpenAIImage): + # authentication failed + AUTH_FAILED_CODE = 401 + NO_QUOTA_CODE = 406 + + def __init__(self): + super().__init__() + self.sessions = SessionManager(ChatGPTSession, model=conf().get("model") or "gpt-3.5-turbo") + self.claude_api_cookie = conf().get("claude_api_cookie") + self.proxy = conf().get("proxy") + self.proxies = { + "http": self.proxy, + "https": self.proxy + } + self.org_uuid = self.get_organization_id() + self.con_uuid = None + self.get_uuid() + + + + + def generate_uuid(self): + random_uuid = uuid.uuid4() + random_uuid_str = str(random_uuid) + formatted_uuid = f"{random_uuid_str[0:8]}-{random_uuid_str[9:13]}-{random_uuid_str[14:18]}-{random_uuid_str[19:23]}-{random_uuid_str[24:]}" + return formatted_uuid + + def get_uuid(self): + if conf().get("claude_uuid") != None: + self.con_uuid = conf().get("claude_uuid") + else: + self.con_uuid = self.generate_uuid() + self.create_new_chat() + + + + def get_organization_id(self): + url = "https://claude.ai/api/organizations" + + headers = { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', + 'Accept-Language': 'en-US,en;q=0.5', + 'Referer': 'https://claude.ai/chats', + 'Content-Type': 'application/json', + 'Sec-Fetch-Dest': 'empty', + 'Sec-Fetch-Mode': 'cors', + 'Sec-Fetch-Site': 'same-origin', + 'Connection': 'keep-alive', + 'Cookie': f'{self.claude_api_cookie}' + } + + response = requests.get(url, headers=headers,impersonate="chrome110",proxies=self.proxies) + res = json.loads(response.text) + uuid = res[0]['uuid'] + + return uuid + def reply(self, query, context: Context = None) -> Reply: + if context.type == ContextType.TEXT: + return self._chat(query, context) + elif context.type == ContextType.IMAGE_CREATE: + ok, res = self.create_img(query, 0) + if ok: + reply = Reply(ReplyType.IMAGE_URL, res) + else: + reply = Reply(ReplyType.ERROR, res) + return reply + else: + reply = Reply(ReplyType.ERROR, "Bot不支持处理{}类型的消息".format(context.type)) + return reply + + def get_organization_id(self): + url = "https://claude.ai/api/organizations" + + headers = { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', + 'Accept-Language': 'en-US,en;q=0.5', + 'Referer': 'https://claude.ai/chats', + 'Content-Type': 'application/json', + 'Sec-Fetch-Dest': 'empty', + 'Sec-Fetch-Mode': 'cors', + 'Sec-Fetch-Site': 'same-origin', + 'Connection': 'keep-alive', + 'Cookie': f'{self.claude_api_cookie}' + } + try: + response = requests.get(url, headers=headers,impersonate="chrome110",proxies =self.proxies ) + res = json.loads(response.text) + uuid = res[0]['uuid'] + except: + print(response.text) + + return uuid + def create_new_chat(self): + url = f"https://claude.ai/api/organizations/{self.org_uuid}/chat_conversations" + + payload = json.dumps({"uuid": self.con_uuid, "name": ""}) + headers = { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', + 'Accept-Language': 'en-US,en;q=0.5', + 'Referer': 'https://claude.ai/chats', + 'Content-Type': 'application/json', + 'Origin': 'https://claude.ai', + 'DNT': '1', + 'Connection': 'keep-alive', + 'Cookie': self.claude_api_cookie, + 'Sec-Fetch-Dest': 'empty', + 'Sec-Fetch-Mode': 'cors', + 'Sec-Fetch-Site': 'same-origin', + 'TE': 'trailers' + } + + response = requests.post( url, headers=headers, data=payload,impersonate="chrome110", proxies= self.proxies) + + # Returns JSON of the newly created conversation information + return response.json() + def _chat(self, query, context, retry_count=0) -> Reply: + """ + 发起对话请求 + :param query: 请求提示词 + :param context: 对话上下文 + :param retry_count: 当前递归重试次数 + :return: 回复 + """ + if retry_count >= 2: + # exit from retry 2 times + logger.warn("[CLAUDEAI] failed after maximum number of retry times") + return Reply(ReplyType.ERROR, "请再问我一次吧") + + try: + + session_id = context["session_id"] + session = self.sessions.session_query(query, session_id) + model = conf().get("model") or "gpt-3.5-turbo" + # remove system message + if session.messages[0].get("role") == "system": + if model == "wenxin": + session.messages.pop(0) + + + logger.info(f"[CLAUDEAI] query={query}") + + # do http request + base_url = "https://claude.ai" + payload = json.dumps({ + "completion": { + "prompt": f"{query}", + "timezone": "Asia/Kolkata", + "model": "claude-2" + }, + "organization_uuid": f"{self.org_uuid}", + "conversation_uuid": f"{self.con_uuid}", + "text": f"{query}", + "attachments": [] + }) + headers = { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', + 'Accept': 'text/event-stream, text/event-stream', + 'Accept-Language': 'en-US,en;q=0.5', + 'Referer': 'https://claude.ai/chats', + 'Content-Type': 'application/json', + 'Origin': 'https://claude.ai', + 'DNT': '1', + 'Connection': 'keep-alive', + 'Cookie': f'{self.claude_api_cookie}', + 'Sec-Fetch-Dest': 'empty', + 'Sec-Fetch-Mode': 'cors', + 'Sec-Fetch-Site': 'same-origin', + 'TE': 'trailers' + } + + res = requests.post(base_url + "/api/append_message", headers=headers, data=payload,impersonate="chrome110",proxies= self.proxies,timeout=400) + + if res.status_code == 200 or "pemission" in res.text: + # execute success + decoded_data = res.content.decode("utf-8") + decoded_data = re.sub('\n+', '\n', decoded_data).strip() + data_strings = decoded_data.split('\n') + completions = [] + for data_string in data_strings: + json_str = data_string[6:].strip() + data = json.loads(json_str) + if 'completion' in data: + completions.append(data['completion']) + + reply_content = ''.join(completions) + logger.info(f"[CLAUDE] reply={reply_content}, total_tokens=100") + self.sessions.session_reply(reply_content, session_id, 100) + return Reply(ReplyType.TEXT, reply_content) + + else: + response = res.json() + error = response.get("error") + logger.error(f"[CLAUDE] chat failed, status_code={res.status_code}, " + f"msg={error.get('message')}, type={error.get('type')}, detail: {res.text}, uuid: {self.con_uuid}") + + if res.status_code >= 500: + # server error, need retry + time.sleep(2) + logger.warn(f"[CLAUDE] do retry, times={retry_count}") + return self._chat(query, context, retry_count + 1) + + return Reply(ReplyType.ERROR, "提问太快啦,请休息一下再问我吧") + + except Exception as e: + logger.exception(e) + # retry + time.sleep(2) + logger.warn(f"[CLAUDE] do retry, times={retry_count}") + return self._chat(query, context, retry_count + 1) diff --git a/bridge/bridge.py b/bridge/bridge.py index 2022438..4a0ef4f 100644 --- a/bridge/bridge.py +++ b/bridge/bridge.py @@ -29,6 +29,8 @@ class Bridge(object): self.btype["chat"] = const.XUNFEI if conf().get("use_linkai") and conf().get("linkai_api_key"): self.btype["chat"] = const.LINKAI + if model_type in ["claude"]: + self.btype["chat"] = const.CLAUDEAI self.bots = {} def get_bot(self, typename): diff --git a/chatgpt-on-wechat b/chatgpt-on-wechat deleted file mode 160000 index 827e8ed..0000000 --- a/chatgpt-on-wechat +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 diff --git a/chatgpt-on-wechat-master.iml b/chatgpt-on-wechat-master.iml new file mode 100644 index 0000000..049614a --- /dev/null +++ b/chatgpt-on-wechat-master.iml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/common/const.py b/common/const.py index 505ab71..ce3eac4 100644 --- a/common/const.py +++ b/common/const.py @@ -5,7 +5,8 @@ BAIDU = "baidu" XUNFEI = "xunfei" CHATGPTONAZURE = "chatGPTOnAzure" LINKAI = "linkai" - VERSION = "1.3.0" - MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "xunfei"] +CLAUDEAI = "claude" +VERSION = "1.3.0" +MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "xunfei","claude"] diff --git a/common/log.py b/common/log.py index f02a365..ac64530 100644 --- a/common/log.py +++ b/common/log.py @@ -13,14 +13,14 @@ def _reset_logger(log): console_handle.setFormatter( logging.Formatter( "[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", + datefmt="%Y-%m-%claude_ai_bot.py%H:%M:%S", ) ) file_handle = logging.FileHandler("run.log", encoding="utf-8") file_handle.setFormatter( logging.Formatter( "[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", + datefmt="%Y-%m-%claude_ai_bot.py%H:%M:%S", ) ) log.addHandler(file_handle) diff --git a/config-template.json b/config-template.json index f01633d..003fbb6 100644 --- a/config-template.json +++ b/config-template.json @@ -2,8 +2,12 @@ "open_ai_api_key": "YOUR API KEY", "model": "gpt-3.5-turbo", "channel_type": "wx", - "proxy": "", + "proxy": "http://127.0.0.1:33210", "hot_reload": false, + "model": "claude", + "channel_type": "wx", + "claude_api_cookie": "intercom-device-id-lupk8zyo=b37192f8-c3a5-408d-8525-60ad8c2cc57c; sessionKey=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; intercom-session-lupk8zyo=dHBseWhqVVIvVW4vZU1NMWM3eUpTQldjQkUwYUMwQVlGQ3g4azR6RlQ3ZDBkTTRqQWd4aGN6ZmY4MSt4aEVERy0tdXY0OGdnUTdYdVhsYWx3c2ErUFFTdz09--58fbe081d071984de6a196e49d513761e8b806e9; cf_clearance=9R_XpUT12.KKIRtXlXUFng05L_sdc0GDwz55ZBGsZ6o-1693003153-0-1-636ec5eb.e36b35e5.346f9a0b-0.2.1693003153; __cf_bm=TLFZA8a7JhAo6NRLVIL0jYsD8nb4cDna6slscBAns3A-1693004564-0-AfWzEcpZbRjF6cLEjxhPUnA84TNQDNQofUkZCuabIKkmQan+BlCvvYIeZod8ISJ/RLq1URvIsp++UwTDJyKfLI8=", + "hot_reload": true, "single_chat_prefix": [ "bot", "@bot" @@ -14,6 +18,7 @@ ], "group_name_white_list": [ "ChatGPT测试群", + "高中数学应用题", "ChatGPT测试群2" ], "group_chat_in_one_session": [ diff --git a/config.py b/config.py index 5853b0d..cb043d1 100644 --- a/config.py +++ b/config.py @@ -120,7 +120,9 @@ available_setting = { "use_linkai": False, "linkai_api_key": "", "linkai_app_code": "", - "linkai_api_base": "https://api.link-ai.chat" # linkAI服务地址,若国内无法访问或延迟较高可改为 https://api.link-ai.tech + "linkai_api_base": "https://api.link-ai.chat", # linkAI服务地址,若国内无法访问或延迟较高可改为 https://api.link-ai.tech + "claude_api_cookie":"", + "claude_uuid":"" } diff --git a/lib/itchat/async_components/messages.py b/lib/itchat/async_components/messages.py index f842f1f..20726dd 100644 --- a/lib/itchat/async_components/messages.py +++ b/lib/itchat/async_components/messages.py @@ -349,7 +349,11 @@ def upload_chunk_file(core, fileDir, fileSymbol, fileSize, ('id', (None, 'WU_FILE_0')), ('name', (None, fileName)), ('type', (None, fileType)), +<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 ('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))), +======= + ('lastModifiedDate', (None, time.strftime('%a %b %claude_ai_bot.py%Y %H:%M:%S GMT+0800 (CST)'))), +>>>>>>> claude bot ('size', (None, str(fileSize))), ('chunks', (None, None)), ('chunk', (None, None)), diff --git a/lib/itchat/components/messages.py b/lib/itchat/components/messages.py index 85c0ca2..cdeb6fb 100644 --- a/lib/itchat/components/messages.py +++ b/lib/itchat/components/messages.py @@ -351,6 +351,7 @@ def upload_chunk_file(core, fileDir, fileSymbol, fileSize, ('name', (None, fileName)), ('type', (None, fileType)), ('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))), + ('lastModifiedDate', (None, time.strftime('%a %b %claude_ai_bot.py%Y %H:%M:%S GMT+0800 (CST)'))), ('size', (None, str(fileSize))), ('chunks', (None, None)), ('chunk', (None, None)), diff --git a/requirements.txt b/requirements.txt index 0863125..4d5bfe4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,7 @@ chardet>=5.1.0 Pillow pre-commit web.py +<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 +======= +curl_cffi +>>>>>>> claude bot