@@ -39,4 +39,8 @@ def create_bot(bot_type): | |||||
elif bot_type == const.LINKAI: | elif bot_type == const.LINKAI: | ||||
from bot.linkai.link_ai_bot import LinkAIBot | from bot.linkai.link_ai_bot import LinkAIBot | ||||
return LinkAIBot() | return LinkAIBot() | ||||
elif bot_type == const.CLAUDEAI: | |||||
from bot.claude.claude_ai_bot import ClaudeAIBot | |||||
return ClaudeAIBot() | |||||
raise RuntimeError | raise RuntimeError |
@@ -106,6 +106,10 @@ class ChatGPTBot(Bot, OpenAIImage): | |||||
reply = Reply(ReplyType.ERROR, "Bot不支持处理{}类型的消息".format(context.type)) | reply = Reply(ReplyType.ERROR, "Bot不支持处理{}类型的消息".format(context.type)) | ||||
return reply | return reply | ||||
<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 | |||||
======= | |||||
>>>>>>> claude bot | |||||
def reply_text(self, session: ChatGPTSession, api_key=None, args=None, retry_count=0) -> dict: | def reply_text(self, session: ChatGPTSession, api_key=None, args=None, retry_count=0) -> dict: | ||||
""" | """ | ||||
call openai's ChatCompletion to get the answer | call openai's ChatCompletion to get the answer | ||||
@@ -0,0 +1,229 @@ | |||||
import re | |||||
import time | |||||
import json | |||||
import uuid | |||||
from curl_cffi import requests | |||||
from bot.bot import Bot | |||||
from bot.chatgpt.chat_gpt_session import ChatGPTSession | |||||
from bot.openai.open_ai_image import OpenAIImage | |||||
from bot.session_manager import SessionManager | |||||
from bridge.context import Context, ContextType | |||||
from bridge.reply import Reply, ReplyType | |||||
from common.log import logger | |||||
from config import conf | |||||
class ClaudeAIBot(Bot, OpenAIImage): | |||||
# authentication failed | |||||
AUTH_FAILED_CODE = 401 | |||||
NO_QUOTA_CODE = 406 | |||||
def __init__(self): | |||||
super().__init__() | |||||
self.sessions = SessionManager(ChatGPTSession, model=conf().get("model") or "gpt-3.5-turbo") | |||||
self.claude_api_cookie = conf().get("claude_api_cookie") | |||||
self.proxy = conf().get("proxy") | |||||
self.proxies = { | |||||
"http": self.proxy, | |||||
"https": self.proxy | |||||
} | |||||
self.org_uuid = self.get_organization_id() | |||||
self.con_uuid = None | |||||
self.get_uuid() | |||||
def generate_uuid(self): | |||||
random_uuid = uuid.uuid4() | |||||
random_uuid_str = str(random_uuid) | |||||
formatted_uuid = f"{random_uuid_str[0:8]}-{random_uuid_str[9:13]}-{random_uuid_str[14:18]}-{random_uuid_str[19:23]}-{random_uuid_str[24:]}" | |||||
return formatted_uuid | |||||
def get_uuid(self): | |||||
if conf().get("claude_uuid") != None: | |||||
self.con_uuid = conf().get("claude_uuid") | |||||
else: | |||||
self.con_uuid = self.generate_uuid() | |||||
self.create_new_chat() | |||||
def get_organization_id(self): | |||||
url = "https://claude.ai/api/organizations" | |||||
headers = { | |||||
'User-Agent': | |||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', | |||||
'Accept-Language': 'en-US,en;q=0.5', | |||||
'Referer': 'https://claude.ai/chats', | |||||
'Content-Type': 'application/json', | |||||
'Sec-Fetch-Dest': 'empty', | |||||
'Sec-Fetch-Mode': 'cors', | |||||
'Sec-Fetch-Site': 'same-origin', | |||||
'Connection': 'keep-alive', | |||||
'Cookie': f'{self.claude_api_cookie}' | |||||
} | |||||
response = requests.get(url, headers=headers,impersonate="chrome110",proxies=self.proxies) | |||||
res = json.loads(response.text) | |||||
uuid = res[0]['uuid'] | |||||
return uuid | |||||
def reply(self, query, context: Context = None) -> Reply: | |||||
if context.type == ContextType.TEXT: | |||||
return self._chat(query, context) | |||||
elif context.type == ContextType.IMAGE_CREATE: | |||||
ok, res = self.create_img(query, 0) | |||||
if ok: | |||||
reply = Reply(ReplyType.IMAGE_URL, res) | |||||
else: | |||||
reply = Reply(ReplyType.ERROR, res) | |||||
return reply | |||||
else: | |||||
reply = Reply(ReplyType.ERROR, "Bot不支持处理{}类型的消息".format(context.type)) | |||||
return reply | |||||
def get_organization_id(self): | |||||
url = "https://claude.ai/api/organizations" | |||||
headers = { | |||||
'User-Agent': | |||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', | |||||
'Accept-Language': 'en-US,en;q=0.5', | |||||
'Referer': 'https://claude.ai/chats', | |||||
'Content-Type': 'application/json', | |||||
'Sec-Fetch-Dest': 'empty', | |||||
'Sec-Fetch-Mode': 'cors', | |||||
'Sec-Fetch-Site': 'same-origin', | |||||
'Connection': 'keep-alive', | |||||
'Cookie': f'{self.claude_api_cookie}' | |||||
} | |||||
try: | |||||
response = requests.get(url, headers=headers,impersonate="chrome110",proxies =self.proxies ) | |||||
res = json.loads(response.text) | |||||
uuid = res[0]['uuid'] | |||||
except: | |||||
print(response.text) | |||||
return uuid | |||||
def create_new_chat(self): | |||||
url = f"https://claude.ai/api/organizations/{self.org_uuid}/chat_conversations" | |||||
payload = json.dumps({"uuid": self.con_uuid, "name": ""}) | |||||
headers = { | |||||
'User-Agent': | |||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', | |||||
'Accept-Language': 'en-US,en;q=0.5', | |||||
'Referer': 'https://claude.ai/chats', | |||||
'Content-Type': 'application/json', | |||||
'Origin': 'https://claude.ai', | |||||
'DNT': '1', | |||||
'Connection': 'keep-alive', | |||||
'Cookie': self.claude_api_cookie, | |||||
'Sec-Fetch-Dest': 'empty', | |||||
'Sec-Fetch-Mode': 'cors', | |||||
'Sec-Fetch-Site': 'same-origin', | |||||
'TE': 'trailers' | |||||
} | |||||
response = requests.post( url, headers=headers, data=payload,impersonate="chrome110", proxies= self.proxies) | |||||
# Returns JSON of the newly created conversation information | |||||
return response.json() | |||||
def _chat(self, query, context, retry_count=0) -> Reply: | |||||
""" | |||||
发起对话请求 | |||||
:param query: 请求提示词 | |||||
:param context: 对话上下文 | |||||
:param retry_count: 当前递归重试次数 | |||||
:return: 回复 | |||||
""" | |||||
if retry_count >= 2: | |||||
# exit from retry 2 times | |||||
logger.warn("[CLAUDEAI] failed after maximum number of retry times") | |||||
return Reply(ReplyType.ERROR, "请再问我一次吧") | |||||
try: | |||||
session_id = context["session_id"] | |||||
session = self.sessions.session_query(query, session_id) | |||||
model = conf().get("model") or "gpt-3.5-turbo" | |||||
# remove system message | |||||
if session.messages[0].get("role") == "system": | |||||
if model == "wenxin": | |||||
session.messages.pop(0) | |||||
logger.info(f"[CLAUDEAI] query={query}") | |||||
# do http request | |||||
base_url = "https://claude.ai" | |||||
payload = json.dumps({ | |||||
"completion": { | |||||
"prompt": f"{query}", | |||||
"timezone": "Asia/Kolkata", | |||||
"model": "claude-2" | |||||
}, | |||||
"organization_uuid": f"{self.org_uuid}", | |||||
"conversation_uuid": f"{self.con_uuid}", | |||||
"text": f"{query}", | |||||
"attachments": [] | |||||
}) | |||||
headers = { | |||||
'User-Agent': | |||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0', | |||||
'Accept': 'text/event-stream, text/event-stream', | |||||
'Accept-Language': 'en-US,en;q=0.5', | |||||
'Referer': 'https://claude.ai/chats', | |||||
'Content-Type': 'application/json', | |||||
'Origin': 'https://claude.ai', | |||||
'DNT': '1', | |||||
'Connection': 'keep-alive', | |||||
'Cookie': f'{self.claude_api_cookie}', | |||||
'Sec-Fetch-Dest': 'empty', | |||||
'Sec-Fetch-Mode': 'cors', | |||||
'Sec-Fetch-Site': 'same-origin', | |||||
'TE': 'trailers' | |||||
} | |||||
res = requests.post(base_url + "/api/append_message", headers=headers, data=payload,impersonate="chrome110",proxies= self.proxies,timeout=400) | |||||
if res.status_code == 200 or "pemission" in res.text: | |||||
# execute success | |||||
decoded_data = res.content.decode("utf-8") | |||||
decoded_data = re.sub('\n+', '\n', decoded_data).strip() | |||||
data_strings = decoded_data.split('\n') | |||||
completions = [] | |||||
for data_string in data_strings: | |||||
json_str = data_string[6:].strip() | |||||
data = json.loads(json_str) | |||||
if 'completion' in data: | |||||
completions.append(data['completion']) | |||||
reply_content = ''.join(completions) | |||||
logger.info(f"[CLAUDE] reply={reply_content}, total_tokens=100") | |||||
self.sessions.session_reply(reply_content, session_id, 100) | |||||
return Reply(ReplyType.TEXT, reply_content) | |||||
else: | |||||
response = res.json() | |||||
error = response.get("error") | |||||
logger.error(f"[CLAUDE] chat failed, status_code={res.status_code}, " | |||||
f"msg={error.get('message')}, type={error.get('type')}, detail: {res.text}, uuid: {self.con_uuid}") | |||||
if res.status_code >= 500: | |||||
# server error, need retry | |||||
time.sleep(2) | |||||
logger.warn(f"[CLAUDE] do retry, times={retry_count}") | |||||
return self._chat(query, context, retry_count + 1) | |||||
return Reply(ReplyType.ERROR, "提问太快啦,请休息一下再问我吧") | |||||
except Exception as e: | |||||
logger.exception(e) | |||||
# retry | |||||
time.sleep(2) | |||||
logger.warn(f"[CLAUDE] do retry, times={retry_count}") | |||||
return self._chat(query, context, retry_count + 1) |
@@ -29,6 +29,8 @@ class Bridge(object): | |||||
self.btype["chat"] = const.XUNFEI | self.btype["chat"] = const.XUNFEI | ||||
if conf().get("use_linkai") and conf().get("linkai_api_key"): | if conf().get("use_linkai") and conf().get("linkai_api_key"): | ||||
self.btype["chat"] = const.LINKAI | self.btype["chat"] = const.LINKAI | ||||
if model_type in ["claude"]: | |||||
self.btype["chat"] = const.CLAUDEAI | |||||
self.bots = {} | self.bots = {} | ||||
def get_bot(self, typename): | def get_bot(self, typename): | ||||
@@ -1 +0,0 @@ | |||||
Subproject commit 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 |
@@ -0,0 +1,12 @@ | |||||
<?xml version="1.0" encoding="UTF-8"?> | |||||
<module type="PYTHON_MODULE" version="4"> | |||||
<component name="NewModuleRootManager" inherit-compiler-output="true"> | |||||
<exclude-output /> | |||||
<content url="file://$MODULE_DIR$"> | |||||
<sourceFolder url="file://$MODULE_DIR$/translate" isTestSource="false" /> | |||||
<excludeFolder url="file://$MODULE_DIR$/venv" /> | |||||
</content> | |||||
<orderEntry type="inheritedJdk" /> | |||||
<orderEntry type="sourceFolder" forTests="false" /> | |||||
</component> | |||||
</module> |
@@ -5,7 +5,8 @@ BAIDU = "baidu" | |||||
XUNFEI = "xunfei" | XUNFEI = "xunfei" | ||||
CHATGPTONAZURE = "chatGPTOnAzure" | CHATGPTONAZURE = "chatGPTOnAzure" | ||||
LINKAI = "linkai" | LINKAI = "linkai" | ||||
VERSION = "1.3.0" | VERSION = "1.3.0" | ||||
MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "xunfei"] | MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "xunfei"] | ||||
CLAUDEAI = "claude" | |||||
VERSION = "1.3.0" | |||||
MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "xunfei","claude"] |
@@ -13,14 +13,14 @@ def _reset_logger(log): | |||||
console_handle.setFormatter( | console_handle.setFormatter( | ||||
logging.Formatter( | logging.Formatter( | ||||
"[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s", | "[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s", | ||||
datefmt="%Y-%m-%d %H:%M:%S", | |||||
datefmt="%Y-%m-%claude_ai_bot.py%H:%M:%S", | |||||
) | ) | ||||
) | ) | ||||
file_handle = logging.FileHandler("run.log", encoding="utf-8") | file_handle = logging.FileHandler("run.log", encoding="utf-8") | ||||
file_handle.setFormatter( | file_handle.setFormatter( | ||||
logging.Formatter( | logging.Formatter( | ||||
"[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s", | "[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s", | ||||
datefmt="%Y-%m-%d %H:%M:%S", | |||||
datefmt="%Y-%m-%claude_ai_bot.py%H:%M:%S", | |||||
) | ) | ||||
) | ) | ||||
log.addHandler(file_handle) | log.addHandler(file_handle) | ||||
@@ -2,8 +2,12 @@ | |||||
"open_ai_api_key": "YOUR API KEY", | "open_ai_api_key": "YOUR API KEY", | ||||
"model": "gpt-3.5-turbo", | "model": "gpt-3.5-turbo", | ||||
"channel_type": "wx", | "channel_type": "wx", | ||||
"proxy": "", | |||||
"proxy": "http://127.0.0.1:33210", | |||||
"hot_reload": false, | "hot_reload": false, | ||||
"model": "claude", | |||||
"channel_type": "wx", | |||||
"claude_api_cookie": "intercom-device-id-lupk8zyo=b37192f8-c3a5-408d-8525-60ad8c2cc57c; sessionKey=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; intercom-session-lupk8zyo=dHBseWhqVVIvVW4vZU1NMWM3eUpTQldjQkUwYUMwQVlGQ3g4azR6RlQ3ZDBkTTRqQWd4aGN6ZmY4MSt4aEVERy0tdXY0OGdnUTdYdVhsYWx3c2ErUFFTdz09--58fbe081d071984de6a196e49d513761e8b806e9; cf_clearance=9R_XpUT12.KKIRtXlXUFng05L_sdc0GDwz55ZBGsZ6o-1693003153-0-1-636ec5eb.e36b35e5.346f9a0b-0.2.1693003153; __cf_bm=TLFZA8a7JhAo6NRLVIL0jYsD8nb4cDna6slscBAns3A-1693004564-0-AfWzEcpZbRjF6cLEjxhPUnA84TNQDNQofUkZCuabIKkmQan+BlCvvYIeZod8ISJ/RLq1URvIsp++UwTDJyKfLI8=", | |||||
"hot_reload": true, | |||||
"single_chat_prefix": [ | "single_chat_prefix": [ | ||||
"bot", | "bot", | ||||
"@bot" | "@bot" | ||||
@@ -14,6 +18,7 @@ | |||||
], | ], | ||||
"group_name_white_list": [ | "group_name_white_list": [ | ||||
"ChatGPT测试群", | "ChatGPT测试群", | ||||
"高中数学应用题", | |||||
"ChatGPT测试群2" | "ChatGPT测试群2" | ||||
], | ], | ||||
"group_chat_in_one_session": [ | "group_chat_in_one_session": [ | ||||
@@ -120,7 +120,9 @@ available_setting = { | |||||
"use_linkai": False, | "use_linkai": False, | ||||
"linkai_api_key": "", | "linkai_api_key": "", | ||||
"linkai_app_code": "", | "linkai_app_code": "", | ||||
"linkai_api_base": "https://api.link-ai.chat" # linkAI服务地址,若国内无法访问或延迟较高可改为 https://api.link-ai.tech | |||||
"linkai_api_base": "https://api.link-ai.chat", # linkAI服务地址,若国内无法访问或延迟较高可改为 https://api.link-ai.tech | |||||
"claude_api_cookie":"", | |||||
"claude_uuid":"" | |||||
} | } | ||||
@@ -349,7 +349,11 @@ def upload_chunk_file(core, fileDir, fileSymbol, fileSize, | |||||
('id', (None, 'WU_FILE_0')), | ('id', (None, 'WU_FILE_0')), | ||||
('name', (None, fileName)), | ('name', (None, fileName)), | ||||
('type', (None, fileType)), | ('type', (None, fileType)), | ||||
<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 | |||||
('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))), | ('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))), | ||||
======= | |||||
('lastModifiedDate', (None, time.strftime('%a %b %claude_ai_bot.py%Y %H:%M:%S GMT+0800 (CST)'))), | |||||
>>>>>>> claude bot | |||||
('size', (None, str(fileSize))), | ('size', (None, str(fileSize))), | ||||
('chunks', (None, None)), | ('chunks', (None, None)), | ||||
('chunk', (None, None)), | ('chunk', (None, None)), | ||||
@@ -351,6 +351,7 @@ def upload_chunk_file(core, fileDir, fileSymbol, fileSize, | |||||
('name', (None, fileName)), | ('name', (None, fileName)), | ||||
('type', (None, fileType)), | ('type', (None, fileType)), | ||||
('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))), | ('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))), | ||||
('lastModifiedDate', (None, time.strftime('%a %b %claude_ai_bot.py%Y %H:%M:%S GMT+0800 (CST)'))), | |||||
('size', (None, str(fileSize))), | ('size', (None, str(fileSize))), | ||||
('chunks', (None, None)), | ('chunks', (None, None)), | ||||
('chunk', (None, None)), | ('chunk', (None, None)), | ||||
@@ -7,3 +7,7 @@ chardet>=5.1.0 | |||||
Pillow | Pillow | ||||
pre-commit | pre-commit | ||||
web.py | web.py | ||||
<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67 | |||||
======= | |||||
curl_cffi | |||||
>>>>>>> claude bot |