@@ -44,7 +44,7 @@ def create_bot(bot_type): | |||||
from bot.claude.claude_ai_bot import ClaudeAIBot | from bot.claude.claude_ai_bot import ClaudeAIBot | ||||
return ClaudeAIBot() | return ClaudeAIBot() | ||||
elif bot_type == const.TONGYI: | |||||
elif bot_type == const.QWEN: | |||||
from bot.tongyi.tongyi_qwen_bot import TongyiQwenBot | from bot.tongyi.tongyi_qwen_bot import TongyiQwenBot | ||||
return TongyiQwenBot() | return TongyiQwenBot() | ||||
raise RuntimeError | raise RuntimeError |
@@ -20,14 +20,14 @@ from config import conf, load_config | |||||
class TongyiQwenBot(Bot): | class TongyiQwenBot(Bot): | ||||
def __init__(self): | def __init__(self): | ||||
super().__init__() | super().__init__() | ||||
self.access_key_id = conf().get("tongyi_access_key_id") | |||||
self.access_key_secret = conf().get("tongyi_access_key_secret") | |||||
self.agent_key = conf().get("tongyi_agent_key") | |||||
self.app_id = conf().get("tongyi_app_id") | |||||
self.node_id = conf().get("tongyi_node_id") | |||||
self.access_key_id = conf().get("qwen_access_key_id") | |||||
self.access_key_secret = conf().get("qwen_access_key_secret") | |||||
self.agent_key = conf().get("qwen_agent_key") | |||||
self.app_id = conf().get("qwen_app_id") | |||||
self.node_id = conf().get("qwen_node_id") or "" | |||||
self.api_key_client = broadscope_bailian.AccessTokenClient(access_key_id=self.access_key_id, access_key_secret=self.access_key_secret) | self.api_key_client = broadscope_bailian.AccessTokenClient(access_key_id=self.access_key_id, access_key_secret=self.access_key_secret) | ||||
self.api_key_expired_time = self.set_api_key() | self.api_key_expired_time = self.set_api_key() | ||||
self.sessions = SessionManager(BaiduWenxinSession, model=conf().get("model") or "tongyi") | |||||
self.sessions = SessionManager(BaiduWenxinSession, model=conf().get("model") or "qwen") | |||||
self.temperature = conf().get("temperature", 0.2) # 值在[0,1]之间,越大表示回复越具有不确定性 | self.temperature = conf().get("temperature", 0.2) # 值在[0,1]之间,越大表示回复越具有不确定性 | ||||
self.top_p = conf().get("top_p", 1) | self.top_p = conf().get("top_p", 1) | ||||
@@ -35,8 +35,8 @@ class Bridge(object): | |||||
self.btype["text_to_voice"] = const.LINKAI | self.btype["text_to_voice"] = const.LINKAI | ||||
if model_type in ["claude"]: | if model_type in ["claude"]: | ||||
self.btype["chat"] = const.CLAUDEAI | self.btype["chat"] = const.CLAUDEAI | ||||
if model_type in ["tongyi"]: | |||||
self.btype["chat"] = const.TONGYI | |||||
if model_type in [const.QWEN]: | |||||
self.btype["chat"] = const.QWEN | |||||
self.bots = {} | self.bots = {} | ||||
self.chat_bots = {} | self.chat_bots = {} | ||||
@@ -6,7 +6,7 @@ XUNFEI = "xunfei" | |||||
CHATGPTONAZURE = "chatGPTOnAzure" | CHATGPTONAZURE = "chatGPTOnAzure" | ||||
LINKAI = "linkai" | LINKAI = "linkai" | ||||
CLAUDEAI = "claude" | CLAUDEAI = "claude" | ||||
TONGYI = "tongyi" | |||||
QWEN = "qwen" | |||||
# model | # model | ||||
GPT35 = "gpt-3.5-turbo" | GPT35 = "gpt-3.5-turbo" | ||||
@@ -17,7 +17,7 @@ WHISPER_1 = "whisper-1" | |||||
TTS_1 = "tts-1" | TTS_1 = "tts-1" | ||||
TTS_1_HD = "tts-1-hd" | TTS_1_HD = "tts-1-hd" | ||||
MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "wenxin-4", "xunfei", "claude", "gpt-4-turbo", GPT4_TURBO_PREVIEW, "tongyi"] | |||||
MODEL_LIST = ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "wenxin", "wenxin-4", "xunfei", "claude", "gpt-4-turbo", GPT4_TURBO_PREVIEW, QWEN] | |||||
# channel | # channel | ||||
FEISHU = "feishu" | FEISHU = "feishu" |
@@ -16,7 +16,7 @@ available_setting = { | |||||
"open_ai_api_base": "https://api.openai.com/v1", | "open_ai_api_base": "https://api.openai.com/v1", | ||||
"proxy": "", # openai使用的代理 | "proxy": "", # openai使用的代理 | ||||
# chatgpt模型, 当use_azure_chatgpt为true时,其名称为Azure上model deployment名称 | # chatgpt模型, 当use_azure_chatgpt为true时,其名称为Azure上model deployment名称 | ||||
"model": "gpt-3.5-turbo", # 还支持 gpt-4, gpt-4-turbo, wenxin, xunfei, tongyi | |||||
"model": "gpt-3.5-turbo", # 还支持 gpt-4, gpt-4-turbo, wenxin, xunfei, qwen | |||||
"use_azure_chatgpt": False, # 是否使用azure的chatgpt | "use_azure_chatgpt": False, # 是否使用azure的chatgpt | ||||
"azure_deployment_id": "", # azure 模型部署名称 | "azure_deployment_id": "", # azure 模型部署名称 | ||||
"azure_api_version": "", # azure api版本 | "azure_api_version": "", # azure api版本 | ||||
@@ -68,11 +68,11 @@ available_setting = { | |||||
"claude_api_cookie": "", | "claude_api_cookie": "", | ||||
"claude_uuid": "", | "claude_uuid": "", | ||||
# 通义千问API, 获取方式查看文档 https://help.aliyun.com/document_detail/2587494.html | # 通义千问API, 获取方式查看文档 https://help.aliyun.com/document_detail/2587494.html | ||||
"tongyi_access_key_id": "", | |||||
"tongyi_access_key_secret": "", | |||||
"tongyi_agent_key": "", | |||||
"tongyi_app_id": "", | |||||
"tongyi_node_id": "", # 流程编排模型用到的id,如果没有用到tongyi_node_id,请务必保持为空字符串 | |||||
"qwen_access_key_id": "", | |||||
"qwen_access_key_secret": "", | |||||
"qwen_agent_key": "", | |||||
"qwen_app_id": "", | |||||
"qwen_node_id": "", # 流程编排模型用到的id,如果没有用到qwen_node_id,请务必保持为空字符串 | |||||
# wework的通用配置 | # wework的通用配置 | ||||
"wework_smart": True, # 配置wework是否使用已登录的企业微信,False为多开 | "wework_smart": True, # 配置wework是否使用已登录的企业微信,False为多开 | ||||
# 语音设置 | # 语音设置 | ||||