Browse Source

claude_bot

master
resphinas 1 year ago
parent
commit
f98b43514e
13 changed files with 274 additions and 7 deletions
  1. +4
    -0
      bot/bot_factory.py
  2. +4
    -0
      bot/chatgpt/chat_gpt_bot.py
  3. +229
    -0
      bot/claude/claude_ai_bot.py
  4. +2
    -0
      bridge/bridge.py
  5. +0
    -1
      chatgpt-on-wechat
  6. +12
    -0
      chatgpt-on-wechat-master.iml
  7. +3
    -2
      common/const.py
  8. +2
    -2
      common/log.py
  9. +6
    -1
      config-template.json
  10. +3
    -1
      config.py
  11. +4
    -0
      lib/itchat/async_components/messages.py
  12. +1
    -0
      lib/itchat/components/messages.py
  13. +4
    -0
      requirements.txt

+ 4
- 0
bot/bot_factory.py View File

@@ -39,4 +39,8 @@ def create_bot(bot_type):
elif bot_type == const.LINKAI:
from bot.linkai.link_ai_bot import LinkAIBot
return LinkAIBot()

elif bot_type == const.CLAUDEAI:
from bot.claude.claude_ai_bot import ClaudeAIBot
return ClaudeAIBot()
raise RuntimeError

+ 4
- 0
bot/chatgpt/chat_gpt_bot.py View File

@@ -106,6 +106,10 @@ class ChatGPTBot(Bot, OpenAIImage):
reply = Reply(ReplyType.ERROR, "Bot不支持处理{}类型的消息".format(context.type))
return reply

<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67
=======

>>>>>>> claude bot
def reply_text(self, session: ChatGPTSession, api_key=None, args=None, retry_count=0) -> dict:
"""
call openai's ChatCompletion to get the answer


+ 229
- 0
bot/claude/claude_ai_bot.py View File

@@ -0,0 +1,229 @@
import re
import time
import json
import uuid

from curl_cffi import requests
from bot.bot import Bot
from bot.chatgpt.chat_gpt_session import ChatGPTSession
from bot.openai.open_ai_image import OpenAIImage
from bot.session_manager import SessionManager
from bridge.context import Context, ContextType
from bridge.reply import Reply, ReplyType
from common.log import logger
from config import conf


class ClaudeAIBot(Bot, OpenAIImage):
# authentication failed
AUTH_FAILED_CODE = 401
NO_QUOTA_CODE = 406

def __init__(self):
super().__init__()
self.sessions = SessionManager(ChatGPTSession, model=conf().get("model") or "gpt-3.5-turbo")
self.claude_api_cookie = conf().get("claude_api_cookie")
self.proxy = conf().get("proxy")
self.proxies = {
"http": self.proxy,
"https": self.proxy
}
self.org_uuid = self.get_organization_id()
self.con_uuid = None
self.get_uuid()




def generate_uuid(self):
random_uuid = uuid.uuid4()
random_uuid_str = str(random_uuid)
formatted_uuid = f"{random_uuid_str[0:8]}-{random_uuid_str[9:13]}-{random_uuid_str[14:18]}-{random_uuid_str[19:23]}-{random_uuid_str[24:]}"
return formatted_uuid

def get_uuid(self):
if conf().get("claude_uuid") != None:
self.con_uuid = conf().get("claude_uuid")
else:
self.con_uuid = self.generate_uuid()
self.create_new_chat()



def get_organization_id(self):
url = "https://claude.ai/api/organizations"

headers = {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0',
'Accept-Language': 'en-US,en;q=0.5',
'Referer': 'https://claude.ai/chats',
'Content-Type': 'application/json',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'Connection': 'keep-alive',
'Cookie': f'{self.claude_api_cookie}'
}

response = requests.get(url, headers=headers,impersonate="chrome110",proxies=self.proxies)
res = json.loads(response.text)
uuid = res[0]['uuid']

return uuid
def reply(self, query, context: Context = None) -> Reply:
if context.type == ContextType.TEXT:
return self._chat(query, context)
elif context.type == ContextType.IMAGE_CREATE:
ok, res = self.create_img(query, 0)
if ok:
reply = Reply(ReplyType.IMAGE_URL, res)
else:
reply = Reply(ReplyType.ERROR, res)
return reply
else:
reply = Reply(ReplyType.ERROR, "Bot不支持处理{}类型的消息".format(context.type))
return reply

def get_organization_id(self):
url = "https://claude.ai/api/organizations"

headers = {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0',
'Accept-Language': 'en-US,en;q=0.5',
'Referer': 'https://claude.ai/chats',
'Content-Type': 'application/json',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'Connection': 'keep-alive',
'Cookie': f'{self.claude_api_cookie}'
}
try:
response = requests.get(url, headers=headers,impersonate="chrome110",proxies =self.proxies )
res = json.loads(response.text)
uuid = res[0]['uuid']
except:
print(response.text)

return uuid
def create_new_chat(self):
url = f"https://claude.ai/api/organizations/{self.org_uuid}/chat_conversations"

payload = json.dumps({"uuid": self.con_uuid, "name": ""})
headers = {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0',
'Accept-Language': 'en-US,en;q=0.5',
'Referer': 'https://claude.ai/chats',
'Content-Type': 'application/json',
'Origin': 'https://claude.ai',
'DNT': '1',
'Connection': 'keep-alive',
'Cookie': self.claude_api_cookie,
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'TE': 'trailers'
}

response = requests.post( url, headers=headers, data=payload,impersonate="chrome110", proxies= self.proxies)

# Returns JSON of the newly created conversation information
return response.json()
def _chat(self, query, context, retry_count=0) -> Reply:
"""
发起对话请求
:param query: 请求提示词
:param context: 对话上下文
:param retry_count: 当前递归重试次数
:return: 回复
"""
if retry_count >= 2:
# exit from retry 2 times
logger.warn("[CLAUDEAI] failed after maximum number of retry times")
return Reply(ReplyType.ERROR, "请再问我一次吧")

try:

session_id = context["session_id"]
session = self.sessions.session_query(query, session_id)
model = conf().get("model") or "gpt-3.5-turbo"
# remove system message
if session.messages[0].get("role") == "system":
if model == "wenxin":
session.messages.pop(0)


logger.info(f"[CLAUDEAI] query={query}")

# do http request
base_url = "https://claude.ai"
payload = json.dumps({
"completion": {
"prompt": f"{query}",
"timezone": "Asia/Kolkata",
"model": "claude-2"
},
"organization_uuid": f"{self.org_uuid}",
"conversation_uuid": f"{self.con_uuid}",
"text": f"{query}",
"attachments": []
})
headers = {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0',
'Accept': 'text/event-stream, text/event-stream',
'Accept-Language': 'en-US,en;q=0.5',
'Referer': 'https://claude.ai/chats',
'Content-Type': 'application/json',
'Origin': 'https://claude.ai',
'DNT': '1',
'Connection': 'keep-alive',
'Cookie': f'{self.claude_api_cookie}',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'TE': 'trailers'
}

res = requests.post(base_url + "/api/append_message", headers=headers, data=payload,impersonate="chrome110",proxies= self.proxies,timeout=400)

if res.status_code == 200 or "pemission" in res.text:
# execute success
decoded_data = res.content.decode("utf-8")
decoded_data = re.sub('\n+', '\n', decoded_data).strip()
data_strings = decoded_data.split('\n')
completions = []
for data_string in data_strings:
json_str = data_string[6:].strip()
data = json.loads(json_str)
if 'completion' in data:
completions.append(data['completion'])

reply_content = ''.join(completions)
logger.info(f"[CLAUDE] reply={reply_content}, total_tokens=100")
self.sessions.session_reply(reply_content, session_id, 100)
return Reply(ReplyType.TEXT, reply_content)

else:
response = res.json()
error = response.get("error")
logger.error(f"[CLAUDE] chat failed, status_code={res.status_code}, "
f"msg={error.get('message')}, type={error.get('type')}, detail: {res.text}, uuid: {self.con_uuid}")

if res.status_code >= 500:
# server error, need retry
time.sleep(2)
logger.warn(f"[CLAUDE] do retry, times={retry_count}")
return self._chat(query, context, retry_count + 1)

return Reply(ReplyType.ERROR, "提问太快啦,请休息一下再问我吧")

except Exception as e:
logger.exception(e)
# retry
time.sleep(2)
logger.warn(f"[CLAUDE] do retry, times={retry_count}")
return self._chat(query, context, retry_count + 1)

+ 2
- 0
bridge/bridge.py View File

@@ -29,6 +29,8 @@ class Bridge(object):
self.btype["chat"] = const.XUNFEI
if conf().get("use_linkai") and conf().get("linkai_api_key"):
self.btype["chat"] = const.LINKAI
if model_type in ["claude"]:
self.btype["chat"] = const.CLAUDEAI
self.bots = {}

def get_bot(self, typename):


+ 0
- 1
chatgpt-on-wechat

@@ -1 +0,0 @@
Subproject commit 827e8eddf87b73f310464e3d6c0509e5b6e2ba67

+ 12
- 0
chatgpt-on-wechat-master.iml View File

@@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/translate" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

+ 3
- 2
common/const.py View File

@@ -5,7 +5,8 @@ BAIDU = "baidu"
XUNFEI = "xunfei"
CHATGPTONAZURE = "chatGPTOnAzure"
LINKAI = "linkai"

VERSION = "1.3.0"

MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "xunfei"]
CLAUDEAI = "claude"
VERSION = "1.3.0"
MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "xunfei","claude"]

+ 2
- 2
common/log.py View File

@@ -13,14 +13,14 @@ def _reset_logger(log):
console_handle.setFormatter(
logging.Formatter(
"[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
datefmt="%Y-%m-%claude_ai_bot.py%H:%M:%S",
)
)
file_handle = logging.FileHandler("run.log", encoding="utf-8")
file_handle.setFormatter(
logging.Formatter(
"[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d] - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
datefmt="%Y-%m-%claude_ai_bot.py%H:%M:%S",
)
)
log.addHandler(file_handle)


+ 6
- 1
config-template.json View File

@@ -2,8 +2,12 @@
"open_ai_api_key": "YOUR API KEY",
"model": "gpt-3.5-turbo",
"channel_type": "wx",
"proxy": "",
"proxy": "http://127.0.0.1:33210",
"hot_reload": false,
"model": "claude",
"channel_type": "wx",
"claude_api_cookie": "intercom-device-id-lupk8zyo=b37192f8-c3a5-408d-8525-60ad8c2cc57c; sessionKey=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; intercom-session-lupk8zyo=dHBseWhqVVIvVW4vZU1NMWM3eUpTQldjQkUwYUMwQVlGQ3g4azR6RlQ3ZDBkTTRqQWd4aGN6ZmY4MSt4aEVERy0tdXY0OGdnUTdYdVhsYWx3c2ErUFFTdz09--58fbe081d071984de6a196e49d513761e8b806e9; cf_clearance=9R_XpUT12.KKIRtXlXUFng05L_sdc0GDwz55ZBGsZ6o-1693003153-0-1-636ec5eb.e36b35e5.346f9a0b-0.2.1693003153; __cf_bm=TLFZA8a7JhAo6NRLVIL0jYsD8nb4cDna6slscBAns3A-1693004564-0-AfWzEcpZbRjF6cLEjxhPUnA84TNQDNQofUkZCuabIKkmQan+BlCvvYIeZod8ISJ/RLq1URvIsp++UwTDJyKfLI8=",
"hot_reload": true,
"single_chat_prefix": [
"bot",
"@bot"
@@ -14,6 +18,7 @@
],
"group_name_white_list": [
"ChatGPT测试群",
"高中数学应用题",
"ChatGPT测试群2"
],
"group_chat_in_one_session": [


+ 3
- 1
config.py View File

@@ -120,7 +120,9 @@ available_setting = {
"use_linkai": False,
"linkai_api_key": "",
"linkai_app_code": "",
"linkai_api_base": "https://api.link-ai.chat" # linkAI服务地址,若国内无法访问或延迟较高可改为 https://api.link-ai.tech
"linkai_api_base": "https://api.link-ai.chat", # linkAI服务地址,若国内无法访问或延迟较高可改为 https://api.link-ai.tech
"claude_api_cookie":"",
"claude_uuid":""
}




+ 4
- 0
lib/itchat/async_components/messages.py View File

@@ -349,7 +349,11 @@ def upload_chunk_file(core, fileDir, fileSymbol, fileSize,
('id', (None, 'WU_FILE_0')),
('name', (None, fileName)),
('type', (None, fileType)),
<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67
('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))),
=======
('lastModifiedDate', (None, time.strftime('%a %b %claude_ai_bot.py%Y %H:%M:%S GMT+0800 (CST)'))),
>>>>>>> claude bot
('size', (None, str(fileSize))),
('chunks', (None, None)),
('chunk', (None, None)),


+ 1
- 0
lib/itchat/components/messages.py View File

@@ -351,6 +351,7 @@ def upload_chunk_file(core, fileDir, fileSymbol, fileSize,
('name', (None, fileName)),
('type', (None, fileType)),
('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))),
('lastModifiedDate', (None, time.strftime('%a %b %claude_ai_bot.py%Y %H:%M:%S GMT+0800 (CST)'))),
('size', (None, str(fileSize))),
('chunks', (None, None)),
('chunk', (None, None)),


+ 4
- 0
requirements.txt View File

@@ -7,3 +7,7 @@ chardet>=5.1.0
Pillow
pre-commit
web.py
<<<<<<< 827e8eddf87b73f310464e3d6c0509e5b6e2ba67
=======
curl_cffi
>>>>>>> claude bot

Loading…
Cancel
Save