Ver código fonte

feat: wechat group reply

master
zhayujie 1 ano atrás
pai
commit
a1f45344b6
12 arquivos alterados com 77 adições e 25 exclusões
  1. +2
    -1
      .gitignore
  2. BIN
      bot/__pycache__/bot.cpython-36.pyc
  3. BIN
      bot/__pycache__/bot_factory.cpython-36.pyc
  4. BIN
      bot/baidu/__pycache__/baidu_unit_bot.cpython-36.pyc
  5. +1
    -1
      bot/bot_factory.py
  6. +27
    -15
      bot/chatgpt/chat_gpt_bot.py
  7. BIN
      bridge/__pycache__/bridge.cpython-36.pyc
  8. +1
    -0
      bridge/bridge.py
  9. BIN
      channel/__pycache__/channel.cpython-36.pyc
  10. BIN
      channel/__pycache__/channel_factory.cpython-36.pyc
  11. BIN
      channel/wechat/__pycache__/wechat_channel.cpython-36.pyc
  12. +46
    -8
      channel/wechat/wechat_channel.py

+ 2
- 1
.gitignore Ver arquivo

@@ -1,4 +1,5 @@
.DS_Store
.idea
__pycache__
__pycache__/
venv
*.pyc

BIN
bot/__pycache__/bot.cpython-36.pyc Ver arquivo


BIN
bot/__pycache__/bot_factory.cpython-36.pyc Ver arquivo


BIN
bot/baidu/__pycache__/baidu_unit_bot.cpython-36.pyc Ver arquivo


+ 1
- 1
bot/bot_factory.py Ver arquivo

@@ -16,4 +16,4 @@ def create_bot(bot_type):
return BaiduUnitBot()
elif bot_type == 'chatGPT':
return ChatGPTBot()
raise RuntimeError
raise RuntimeError

+ 27
- 15
bot/chatgpt/chat_gpt_bot.py Ver arquivo

@@ -4,7 +4,7 @@ from revChatGPT.revChatGPT import Chatbot

config = {
"Authorization": "<Your Bearer Token Here>", # This is optional
"session_token": "eyJhbGciOiJkaXIiLCJlbmMiOiJBMjU2R0NNIn0.._BniuRSwMqdGn8t2.Hv_C4nsWF1FWnVGxNs19YZOGLDIWFlyC3FeLlTE9HUEgV1M8IFmhz1aH0j40vutVTzoEv3JDSEZN8je4kzewXE5tcKqhQF_Jh0SSHNL52OdRe_tqu_aE61L39NkvxEkmMRP2teXmWtiBIGtrPX3B77IbHIyqx9Kbk17awwl_WJokr39KlWhHE-Mpf9AwPvOW4UPi2csIxVNsVFW6MjxffRL8QZAqhXaeVlAnpjlpfJrK1B9TRoH7wkj0Q4nQ4MMhenvKXQHhOaa2tfTC79BeOBr8d60b2HPGCiOZuYc4vCqehXRl4VA7_JF9EKEmhgBQAPp0J1LFTC-3i0ZVAUX4zud8UEAsAwkjknQx7BHx9_QUOGil5I4U84WWzsGKxNNj1Jhi3z877V6d90CrTQax_yuuNzV9wb7j_RYx5EbO1MKmOPvL1iksa7xm0SYWsK2fYUID2vW2cE0nAavnGyi7g2-Lf83ARyWAmU8puuEweuEbsS_-1-IP6b3rtapOoZWfWmW74Mm6mmdxBIldrKHTDK8rJanjbp5Dho4YdJaTlAo7Mlg6fwpFk5i-2_QPr-s2E141hLCAhbKFQ6OTKfv3yugF1OoOG0IsXy49yuq3ezznDhtArYgZ6mj-DfHaQakuYrw9aHsfHebiVZ6tuwr_lZr73AuwneD0wGc1sk5nTYqFGn-E9ZC3kgng5Sa_CrOg2RhuNPsuDDsDuJ22ANilQprmDR0N8J-2ASlqkyf_af23QWwaEmjj8pcDrg_FWs7-6Qdju4pSFcLbtbAFh9ORi5SD463LxGxHsRPNCL9LfUZFrVFYVfTKk3mGtb7XQ80p7L1O7_ix6UkiastENknqH-cwk6HCW8VKf2ASp28NoEhKCjW0CHfGT4eHQxxzh2wrb2FWmw7zAXd8_c48o1LY6GOgqBWnp4npMYRwA2yki0cuKR5eVrKHlZZU4g5hSQ5F_Pq0qDZsCrKFCUJ19C3ndqG0XZF70jUfJtTMdUgFh4viDVpVS4FvL35VDTygYnTz7DCRHc9uLlsG8e-7wfCHRcyL8U4HL1KTB2JIZnNMOAHaeiTxAc9PeLxhqSHuG7LttdHPWE0OBUZmMbArR9BzjHp4L3mL-oK8xGil9vuw43TWxpPpcbP_EzuZIBgWlTB4LZg19WmrRq_Ll1eLjXKeCEzoxisyicIdQ8q01iPKZOfVQZCISmgG8Ey6qm8DAk84tJ44p7IWXeE5Ho-SIdW1aR9H7U02ycvtBM24aRs30o_xwqfKfDInrAUsGeJAkA4iKkmUtzaHHYlBZVAITG7-FlsFBgDJQStG7w_4d-JqfqltO6YrbZG2A5EYX8RV5SFgNzEluTp_ZY0A65kThVLvVnldWGb0gkBiC8r-UzYKGgQQJwG-WqnyOvQ5oNMqUErqWaqYg8Z1u7nvZlFG_WeqwfF711huA2eMLIyV3gvdhxSVO9j2IfNsxQoMOIeUcXF_d64q95RgM2Cv5bw-kwRSqumejPhGghjDwv01et6zeHnseoEItHkyfKHw443rqBvitMlERQcvIfF07j0Pgm2-7fBiGFAstS-6swFnydmlKev9ypAKE-vFgH_R4s5R0cy_aYQr16rpmX0pRaDfEA-kDrq4u1PFUijuRQiOG0UN3Ih4E7HPZtKvCqQreUbceihgWsh0_u-Ygh6dpN7-ALf4En6e--FyRePScJ1RUHByJZ_r5R9tPgdFDmC2N38ybXA9CUjHh7uVZtlC5Gb78He5kRqTCKGV-lVqBzubaI-VO-oQfEyui81I3e1XpAAQySITT6d1HtM9HyEk0_CMQ83N5mpUAKaCWT-Pqzby-GmKfet-ct07NiH-8yvvgThMEAl78bk-W8MBjQB6fagHjR93OWMr6usow4noL5sf1gVa1izd1_OLbroUY8kFHN7BCryHsy-WmFnfUzNb6qS-7HsX4qsoBAHmQMlYRj-B9RMa7QC_53f8laQPMnT4c1M1d1yITarmtL9n6cL0Ku096w3c-4735yk-mYpds0KaAclU9_7NjcznEcY3NHRr0LxwxoV6-ClHFO6Ccqgxyk-Kng0J3oGYwgY4QBPuSCphvf9PmbhABeLILx0i-_eeI3gOfAKbZUGDweftoG1Lw7qgB14KOX1fig69PFxp8dGApMOTJvQeXWN7ISPlDZRfmRkSbO5siDsTS4WS01qcgdc_2osmN8aMyB_bP-FG4v3CF8z6Cpp6x-iIyXdHS_riNGydbf5-Et1e5nlYTRAVW4Cgf-P_AAZH82PGIpWsPWO3Lsisli1IdVSHi5Vxfj8OzzE0aC5CuXzvCv18KwCyDgHZ2AZeqGkBHPyptmXyIPspaSlRtq7H7j5w57deaOmJVjHM.iOPoBiz9LmbJxxXkoKyzfQ"
"session_token": "eyJhbGciOiJkaXIiLCJlbmMiOiJBMjU2R0NNIn0..n8p94letJscz9y64.vgWOg1sSU7Wkoxbs81kTB_9rGgXQVmc6i9LgBHzo-EsUatVV-PsGKiAc9g8gaTaAf1pM_QW53ECH7b2Ge80ie2Q-EAsx-qdiLdwfggwob3dXk3zTQmK5pL8_aVNQ1YoMzQNciBXUbHdZhwzZrXPEsXr5eocjNm2fD5LcTR4cBwH4LRo9Z4AZsFBg9SJ9miQLLBdtkBmjWNQfwM_OHhlAKOYAT_aR1e3E0M6M173xbyvsBLwzQ5ol6Fu5ui7to6SSYejen518pm9vDkV3QRaJ6u0W9t8OEbnaTOCv-JR_7_UNgD5dBlRnj4nyh38vj9yGpW9fm6D7HDmUtF5X2RlmECdQBxsJI-Xk0fETqgjsPGC7O2kNJduW2ukwzMN9KaEVlONQYwfZ73TA6-5jXkvkD3rq3qnsWFGoju5GP11RpbgXKxeHBOZzslja6xPQVPSbkwEVsON_JTyOKcCrzP-vlPXWm51ZyE5hYsjhf3h1UonlpxQRuqM9EnKiajetV8gdbBF5QkhdSoGYQhwPZ1sdUQ197GmpWdgMQgnh9VBcpVP-GWB_yAM4Yj4AAJ79Jh4hkUy2YrGaSu3X3jqBZBfy3SXymcGZZpEHU8-jTovw4wFctVpK3l1fAGHppGpbS0mTciXh0-Vw5F1unIYe6v-y-vEPGmhx8wjukruSlagW1vvklHVsj8zIg0cUEU112w18MuxqzpFMa1LBr8Lr6hbODJX_I67QV1LvyVdFGygxtKzOj9sm-ZZFibv2vrXoIGWRutxlmxz1BiCWldFUjNw9S4oSEDe_mk3WommuY7MCt15Ufa4JOmerZD-P2aK-cylfwFgQJhl8Xx2bwSh8feY404p_pgIcaN8Oex5qSkXiz_MChNPbLSysDB7eXPaUDRWP1kVVaye6MLAmOT8RtAsIWjO3Ky6z5j4nE2FtUMNFMiG79iLFj6NSZ4l-8xdpYgvdsEh9pUfAO5zY_WqOwxb1v4ckKIII_3ZZQTz8t3KePJc4jqpvvmFa30agldub9nYglcDS95zDLrboqYF2opNNuVlDrZsXzJHH-t43S5H34dnUY8jENn_qe83DcgV6FJaUI-iln5h_Z0xvRNXi-HFXwavwC2i0otHsUO60fvRThrmv6pgzYVXqbEY4JO_pWbiH_qdxJ4Ky1LKGRHcfw9Kmn32qZhQptbSNPyP3irrC8_DKATbi2q-0gQ314Loc2VJGphvW9qmsBC8MXjdyZMYqZcXHcoed-E5_JgDWPaSfdrrpfP_ct1uis-x-yIdylLK8wQ7XHuCxfHVUDePqbLw59XUYOO7TVRi5tPvoh8-J0fvYHwGqLUSULAEV0AfggYOIMODHgq_tCLHOZMVEKzvGspPtvBtZUU64F-l4XWX9AwRqmWD654lhV3BR1Ea7FEVh1Mb1trntcd6JGMERqHVFoLxlbTk7_BFWGwfjPSs-8uzR7MBYVXMmscmICzHp_EJbG1zqmrZfszN_TS5pXB5sKKoe8bEJpDznCa3GlyQxlftMYFtyTXGmi8N7M-ZxPBHX0SIlsuunaKoNAKTqJM5rezn1iXZYANkDgom115sc6rpILNgTUQMzJawYmnVNjtfdLEHRBpPymPohlMwCPTst3Fh0xNaUUg7lEAIfFWRnl-lAxxqpTb1z1nz_6fqaNXEa4be9cxr23LLnz7XESan4nQSvusewWKdOO3HP4MoubrCE1rfVY68vG6mg3eV78dMy40_8ee40p0eYVdRSCBXJg-QSFCd8no0BCCFI0QhQ_rc3Al6boIaGun6UhsTSsHudSAUHuwRS1FpsxHo7X2jI80avFUuNtZhRCgNSru81hfozNhf7X5K46Fq5MQpRY32CMxTYu6hSfSdlvtbnVvaFiLLxqAg73Gr7L7Je2BAy1Feg6b13JMkEWObDnA1GhdWVs6p81MSB4Id2vFKpUaIjuJDuW2PTnE0BQCHtUhxbgac9yqyx3L7IlURrCvaAs31LtU9KsPJhE-_vjss3rKMY51xBKMxThwP7UP90mXm9llUnFzdho9jERYEPigJMSyZBSHTiETU-fHg3JcQbGs3ncrTdd_EDieEEgugYcxkJtt4QuUuiRoK3jTf8T0UbqEZWihtp_quSuyWXHSdtY2XbzHrY3cWeUhdmTsg6VdWQVM7R2BlXj0tbRN3mhpggLXVIeJskp8h7MLOo90DlwX8j2sSWTggVjVLmL39dk2MV0orevquoLmZNg2vP_dYs-w7nJLRI1jdvYw3T87JlEV09gRL_YCzdr5vBx1PcAEkSe6E5W0qI6rXXNZ7DpXoCHAlVVuxgtW_nz7zkCv1twhYYTRCpI6TdIVCOcMU_D06-WYNgflzzqz_Cm2J5lHu4.UeCj_srwxF4V4x92z7lAjA"
}
chatbot = Chatbot(config)
user_session = dict()
@@ -13,28 +13,40 @@ last_session_refresh = time.time()

class ChatGPTBot(Bot):
def reply(self, query, context=None):

from_user_id = context['from_user_id']
print("[GPT]query={}, user_id={}, session={}".format(query, from_user_id, user_session))

now = time.time()
global last_session_refresh
if now - last_session_refresh > 60 * 8:
print('[GPT]session refresh, now={}, last={}'.format(now, last_session_refresh))
chatbot.refresh_session()
last_session_refresh = now

if from_user_id in user_session:
if time.time() - user_session[from_user_id]['last_reply_time'] < 60 * 3:
if time.time() - user_session[from_user_id]['last_reply_time'] < 60 * 5:
chatbot.conversation_id = user_session[from_user_id]['conversation_id']
chatbot.parent_id = user_session[from_user_id]['parent_id']
else:
chatbot.reset_chat()
else:
chatbot.reset_chat()

now = time.time()
global last_session_refresh
if now - last_session_refresh > 60 * 10:
chatbot.refresh_session()
last_session_refresh = now
print("[GPT]convId={}, parentId={}".format(chatbot.conversation_id, chatbot.parent_id))

res = chatbot.get_chat_response(query, output="text")

user_cache = dict()
user_cache['last_reply_time'] = time.time()
user_cache['conversation_id'] = res['conversation_id']
user_cache['parent_id'] = res['parent_id']
user_session[from_user_id] = user_cache

print("[GPT]user={}, convId={}, content={}", from_user_id, res['conversation_id'], res['message'])
return res['message']
try:
res = chatbot.get_chat_response(query, output="text")
print("[GPT]userId={}, res={}".format(from_user_id, res))

user_cache = dict()
user_cache['last_reply_time'] = time.time()
user_cache['conversation_id'] = res['conversation_id']
user_cache['parent_id'] = res['parent_id']
user_session[from_user_id] = user_cache
return res['message']
except Exception as e:
print(e)
return None

BIN
bridge/__pycache__/bridge.cpython-36.pyc Ver arquivo


+ 1
- 0
bridge/bridge.py Ver arquivo

@@ -7,3 +7,4 @@ class Bridge(object):

def fetch_reply_content(self, query, context):
return bot_factory.create_bot("chatGPT").reply(query, context)
# return bot_factory.create_bot("baidu").reply(query, context)

BIN
channel/__pycache__/channel.cpython-36.pyc Ver arquivo


BIN
channel/__pycache__/channel_factory.cpython-36.pyc Ver arquivo


BIN
channel/wechat/__pycache__/wechat_channel.cpython-36.pyc Ver arquivo


+ 46
- 8
channel/wechat/wechat_channel.py Ver arquivo

@@ -9,11 +9,17 @@ from concurrent.futures import ThreadPoolExecutor

thead_pool = ThreadPoolExecutor(max_workers=8)

@itchat.msg_register([TEXT])
def handler_receive_msg(msg):

@itchat.msg_register(TEXT)
def handler_single_msg(msg):
WechatChannel().handle(msg)


@itchat.msg_register(TEXT, isGroupChat=True)
def handler_group_msg(msg):
WechatChannel().handle_group(msg)


class WechatChannel(Channel):
def __init__(self):
pass
@@ -26,19 +32,51 @@ class WechatChannel(Channel):
itchat.run()

def handle(self, msg):
# print("handle: ", msg)
print("[WX]receive msg: " + json.dumps(msg, ensure_ascii=False))
from_user_id = msg['FromUserName']
other_user_id = msg['User']['UserName']
if from_user_id == other_user_id:
thead_pool.submit(self._do_send, msg['Text'], from_user_id)
content = msg['Text']
if from_user_id == other_user_id and (content.lower().startswith('bot') or content.lower().startswith('@bot')):
str_list = content.split('bot', 1)
if len(str_list) == 2:
content = str_list[1].strip()
thead_pool.submit(self._do_send, content, from_user_id)

def handle_group(self, msg):
group_white_list = ['学就完事了', '小宝群', '全天乱斗模式', '戒赌吧', '命苦还要快乐','攒钱让姐妹当小三的组织']
print("[WX]receive group msg: " + json.dumps(msg, ensure_ascii=False))
group_id = msg['User']['UserName']
group_name = msg['User'].get('NickName', None)
if not group_name:
return ""
origin_content = msg['Content']
content = msg['Content']
content_list = content.split(' ', 1)
context_special_list = content.split('\u2005', 1)
if len(context_special_list) == 2:
content = context_special_list[1]
elif len(content_list) == 2:
content = content_list[1]

if group_name in group_white_list and (msg['IsAt'] or origin_content.lower().startswith('@bot')):
thead_pool.submit(self._do_send_group, content, msg)

def send(self, msg, receiver):
# time.sleep(random.randint(1, 3))
print(msg, receiver)
itchat.send(msg + " [bot]", toUserName=receiver)
print('[WX] sendMsg={}, receiver={}'.format(msg, receiver))
itchat.send(msg, toUserName=receiver)

def _do_send(self, send_msg, reply_user_id):
context = dict()
context['from_user_id'] = reply_user_id
self.send(super().build_reply_content(send_msg, context), reply_user_id)
content = super().build_reply_content(send_msg, context)
if content:
self.send("[bot] " + content, reply_user_id)

def _do_send_group(self, content, msg):
context = dict()
context['from_user_id'] = msg['ActualUserName']
reply_text = super().build_reply_content(content, context)
reply_text = '@' + msg['ActualNickName'] + ' ' + reply_text
if reply_text:
self.send(reply_text, msg['User']['UserName'])

Carregando…
Cancelar
Salvar