diff --git a/bridge/bridge.py b/bridge/bridge.py index 6733701..596c67c 100644 --- a/bridge/bridge.py +++ b/bridge/bridge.py @@ -19,38 +19,42 @@ class Bridge(object): "translate": conf().get("translate", "baidu"), } # 这边取配置的模型 - model_type = conf().get("model") or const.GPT35 - if model_type in ["text-davinci-003"]: - self.btype["chat"] = const.OPEN_AI - if conf().get("use_azure_chatgpt", False): - self.btype["chat"] = const.CHATGPTONAZURE - if model_type in ["wenxin", "wenxin-4"]: - self.btype["chat"] = const.BAIDU - if model_type in ["xunfei"]: - self.btype["chat"] = const.XUNFEI - if model_type in [const.QWEN]: - self.btype["chat"] = const.QWEN - if model_type in [const.QWEN_TURBO, const.QWEN_PLUS, const.QWEN_MAX]: - self.btype["chat"] = const.QWEN_DASHSCOPE - if model_type in [const.GEMINI]: - self.btype["chat"] = const.GEMINI - if model_type in [const.ZHIPU_AI]: - self.btype["chat"] = const.ZHIPU_AI - if model_type and model_type.startswith("claude-3"): - self.btype["chat"] = const.CLAUDEAPI + bot_type = conf().get("bot_type") + if bot_type: + self.btype["chat"] = bot_type + else: + model_type = conf().get("model") or const.GPT35 + if model_type in ["text-davinci-003"]: + self.btype["chat"] = const.OPEN_AI + if conf().get("use_azure_chatgpt", False): + self.btype["chat"] = const.CHATGPTONAZURE + if model_type in ["wenxin", "wenxin-4"]: + self.btype["chat"] = const.BAIDU + if model_type in ["xunfei"]: + self.btype["chat"] = const.XUNFEI + if model_type in [const.QWEN]: + self.btype["chat"] = const.QWEN + if model_type in [const.QWEN_TURBO, const.QWEN_PLUS, const.QWEN_MAX]: + self.btype["chat"] = const.QWEN_DASHSCOPE + if model_type in [const.GEMINI]: + self.btype["chat"] = const.GEMINI + if model_type in [const.ZHIPU_AI]: + self.btype["chat"] = const.ZHIPU_AI + if model_type and model_type.startswith("claude-3"): + self.btype["chat"] = const.CLAUDEAPI - if model_type in ["claude"]: - self.btype["chat"] = const.CLAUDEAI + if model_type in ["claude"]: + self.btype["chat"] = const.CLAUDEAI - if model_type in ["moonshot-v1-8k", "moonshot-v1-32k", "moonshot-v1-128k"]: - self.btype["chat"] = const.MOONSHOT + if model_type in ["moonshot-v1-8k", "moonshot-v1-32k", "moonshot-v1-128k"]: + self.btype["chat"] = const.MOONSHOT - if conf().get("use_linkai") and conf().get("linkai_api_key"): - self.btype["chat"] = const.LINKAI - if not conf().get("voice_to_text") or conf().get("voice_to_text") in ["openai"]: - self.btype["voice_to_text"] = const.LINKAI - if not conf().get("text_to_voice") or conf().get("text_to_voice") in ["openai", const.TTS_1, const.TTS_1_HD]: - self.btype["text_to_voice"] = const.LINKAI + if conf().get("use_linkai") and conf().get("linkai_api_key"): + self.btype["chat"] = const.LINKAI + if not conf().get("voice_to_text") or conf().get("voice_to_text") in ["openai"]: + self.btype["voice_to_text"] = const.LINKAI + if not conf().get("text_to_voice") or conf().get("text_to_voice") in ["openai", const.TTS_1, const.TTS_1_HD]: + self.btype["text_to_voice"] = const.LINKAI self.bots = {} self.chat_bots = {} diff --git a/config.py b/config.py index bff03bb..600091e 100644 --- a/config.py +++ b/config.py @@ -18,6 +18,7 @@ available_setting = { "proxy": "", # openai使用的代理 # chatgpt模型, 当use_azure_chatgpt为true时,其名称为Azure上model deployment名称 "model": "gpt-3.5-turbo", # 支持ChatGPT、Claude、Gemini、文心一言、通义千问、Kimi、讯飞星火、智谱、LinkAI等模型,模型具体名称详见common/const.py文件列出的模型 + "bot_type": "", # 可选配置,使用兼容openai格式的三方服务时候,需填"chatGPT"。bot具体名称详见common/const.py文件列出的bot_type,如不填根据model名称判断, "use_azure_chatgpt": False, # 是否使用azure的chatgpt "azure_deployment_id": "", # azure 模型部署名称 "azure_api_version": "", # azure api版本