diff --git a/api/db/init_data.py b/api/db/init_data.py index 89e0cf47023..36c25a793fc 100644 --- a/api/db/init_data.py +++ b/api/db/init_data.py @@ -152,6 +152,11 @@ def init_superuser(): "logo": "", "tags": "TEXT EMBEDDING, TEXT RE-RANK", "status": "1", +},{ + "name": "Minimax", + "logo": "", + "tags": "LLM,TEXT EMBEDDING", + "status": "1", } # { # "name": "文心一言", @@ -536,6 +541,49 @@ def init_llm_factory(): "max_tokens": 2048, "model_type": LLMType.RERANK.value }, + # ------------------------ Minimax ----------------------- + { + "fid": factory_infos[13]["name"], + "llm_name": "abab6.5-chat", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": LLMType.CHAT.value + }, + { + "fid": factory_infos[13]["name"], + "llm_name": "abab6.5s-chat", + "tags": "LLM,CHAT,245k", + "max_tokens": 245760, + "model_type": LLMType.CHAT.value + }, + { + "fid": factory_infos[13]["name"], + "llm_name": "abab6.5t-chat", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": LLMType.CHAT.value + }, + { + "fid": factory_infos[13]["name"], + "llm_name": "abab6.5g-chat", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": LLMType.CHAT.value + }, + { + "fid": factory_infos[13]["name"], + "llm_name": "abab5.5-chat", + "tags": "LLM,CHAT,16k", + "max_tokens": 16384, + "model_type": LLMType.CHAT.value + }, + { + "fid": factory_infos[13]["name"], + "llm_name": "abab5.5s-chat", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": LLMType.CHAT.value + }, ] for info in factory_infos: try: diff --git a/rag/llm/__init__.py b/rag/llm/__init__.py index a97cc976262..c3d58f871c8 100644 --- a/rag/llm/__init__.py +++ b/rag/llm/__init__.py @@ -51,7 +51,8 @@ "Xinference": XinferenceChat, "Moonshot": MoonshotChat, "DeepSeek": DeepSeekChat, - "BaiChuan": BaiChuanChat + "BaiChuan": BaiChuanChat, + "MiniMax": MiniMaxChat } diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index b15dd092a0d..2dc63fd99ce 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -464,3 +464,11 @@ def chat_streamly(self, system, history, gen_conf): except Exception as e: yield ans + "\n**ERROR**: " + str(e) yield tk_count + + +class MiniMaxChat(Base): + def __init__(self, key, model_name="abab6.5s-chat", + base_url="https://api.minimax.chat/v1/text/chatcompletion_v2"): + if not base_url: + base_url="https://api.minimax.chat/v1/text/chatcompletion_v2" + super().__init__(key, model_name, base_url)