diff --git a/api/apps/llm_app.py b/api/apps/llm_app.py index 638db1851c..2e117cadfa 100644 --- a/api/apps/llm_app.py +++ b/api/apps/llm_app.py @@ -57,8 +57,8 @@ def set_api_key(): mdl = ChatModel[factory]( req["api_key"], llm.llm_name, base_url=req.get("base_url")) try: - m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], { - "temperature": 0.9}) + m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], + {"temperature": 0.9,'max_tokens':50}) if not tc: raise Exception(m) except Exception as e: diff --git a/api/db/init_data.py b/api/db/init_data.py index e1e79057de..1f4fd35a93 100644 --- a/api/db/init_data.py +++ b/api/db/init_data.py @@ -89,904 +89,29 @@ def init_superuser(): tenant["embd_id"])) -factory_infos = [{ - "name": "OpenAI", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", - "status": "1", -}, { - "name": "Tongyi-Qianwen", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", - "status": "1", -}, { - "name": "ZHIPU-AI", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", - "status": "1", -}, - { - "name": "Ollama", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", - "status": "1", -}, { - "name": "Moonshot", - "logo": "", - "tags": "LLM,TEXT EMBEDDING", - "status": "1", -}, { - "name": "FastEmbed", - "logo": "", - "tags": "TEXT EMBEDDING", - "status": "1", -}, { - "name": "Xinference", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION,TEXT RE-RANK", - "status": "1", -},{ - "name": "Youdao", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", - "status": "1", -},{ - "name": "DeepSeek", - "logo": "", - "tags": "LLM", - "status": "1", -},{ - "name": "VolcEngine", - "logo": "", - "tags": "LLM, TEXT EMBEDDING", - "status": "1", -},{ - "name": "BaiChuan", - "logo": "", - "tags": "LLM,TEXT EMBEDDING", - "status": "1", -},{ - "name": "Jina", - "logo": "", - "tags": "TEXT EMBEDDING, TEXT RE-RANK", - "status": "1", -},{ - "name": "BAAI", - "logo": "", - "tags": "TEXT EMBEDDING, TEXT RE-RANK", - "status": "1", -},{ - "name": "MiniMax", - "logo": "", - "tags": "LLM,TEXT EMBEDDING", - "status": "1", -},{ - "name": "Mistral", - "logo": "", - "tags": "LLM,TEXT EMBEDDING", - "status": "1", -},{ - "name": "Azure-OpenAI", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", - "status": "1", -},{ - "name": "Bedrock", - "logo": "", - "tags": "LLM,TEXT EMBEDDING", - "status": "1", -},{ - "name": "Gemini", - "logo": "", - "tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT", - "status": "1", -}, -{ - "name": "Groq", - "logo": "", - "tags": "LLM", - "status": "1", -} - # { - # "name": "文心一言", - # "logo": "", - # "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", - # "status": "1", - # }, -] - - def init_llm_factory(): - llm_infos = [ - # ---------------------- OpenAI ------------------------ - { - "fid": factory_infos[0]["name"], - "llm_name": "gpt-4o", - "tags": "LLM,CHAT,128K", - "max_tokens": 128000, - "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "gpt-3.5-turbo", - "tags": "LLM,CHAT,4K", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "gpt-3.5-turbo-16k-0613", - "tags": "LLM,CHAT,16k", - "max_tokens": 16385, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "text-embedding-ada-002", - "tags": "TEXT EMBEDDING,8K", - "max_tokens": 8191, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "text-embedding-3-small", - "tags": "TEXT EMBEDDING,8K", - "max_tokens": 8191, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "text-embedding-3-large", - "tags": "TEXT EMBEDDING,8K", - "max_tokens": 8191, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "whisper-1", - "tags": "SPEECH2TEXT", - "max_tokens": 25 * 1024 * 1024, - "model_type": LLMType.SPEECH2TEXT.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "gpt-4", - "tags": "LLM,CHAT,8K", - "max_tokens": 8191, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "gpt-4-turbo", - "tags": "LLM,CHAT,8K", - "max_tokens": 8191, - "model_type": LLMType.CHAT.value - },{ - "fid": factory_infos[0]["name"], - "llm_name": "gpt-4-32k", - "tags": "LLM,CHAT,32K", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[0]["name"], - "llm_name": "gpt-4-vision-preview", - "tags": "LLM,CHAT,IMAGE2TEXT", - "max_tokens": 765, - "model_type": LLMType.IMAGE2TEXT.value - }, - # ----------------------- Qwen ----------------------- - { - "fid": factory_infos[1]["name"], - "llm_name": "qwen-turbo", - "tags": "LLM,CHAT,8K", - "max_tokens": 8191, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[1]["name"], - "llm_name": "qwen-plus", - "tags": "LLM,CHAT,32K", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[1]["name"], - "llm_name": "qwen-max-1201", - "tags": "LLM,CHAT,6K", - "max_tokens": 5899, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[1]["name"], - "llm_name": "text-embedding-v2", - "tags": "TEXT EMBEDDING,2K", - "max_tokens": 2048, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[1]["name"], - "llm_name": "paraformer-realtime-8k-v1", - "tags": "SPEECH2TEXT", - "max_tokens": 25 * 1024 * 1024, - "model_type": LLMType.SPEECH2TEXT.value - }, { - "fid": factory_infos[1]["name"], - "llm_name": "qwen-vl-max", - "tags": "LLM,CHAT,IMAGE2TEXT", - "max_tokens": 765, - "model_type": LLMType.IMAGE2TEXT.value - }, - # ---------------------- ZhipuAI ---------------------- - { - "fid": factory_infos[2]["name"], - "llm_name": "glm-3-turbo", - "tags": "LLM,CHAT,", - "max_tokens": 128 * 1000, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[2]["name"], - "llm_name": "glm-4", - "tags": "LLM,CHAT,", - "max_tokens": 128 * 1000, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[2]["name"], - "llm_name": "glm-4v", - "tags": "LLM,CHAT,IMAGE2TEXT", - "max_tokens": 2000, - "model_type": LLMType.IMAGE2TEXT.value - }, - { - "fid": factory_infos[2]["name"], - "llm_name": "embedding-2", - "tags": "TEXT EMBEDDING", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, - # ------------------------ Moonshot ----------------------- - { - "fid": factory_infos[4]["name"], - "llm_name": "moonshot-v1-8k", - "tags": "LLM,CHAT,", - "max_tokens": 7900, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[4]["name"], - "llm_name": "moonshot-v1-32k", - "tags": "LLM,CHAT,", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[4]["name"], - "llm_name": "moonshot-v1-128k", - "tags": "LLM,CHAT", - "max_tokens": 128 * 1000, - "model_type": LLMType.CHAT.value - }, - # ------------------------ FastEmbed ----------------------- - { - "fid": factory_infos[5]["name"], - "llm_name": "BAAI/bge-small-en-v1.5", - "tags": "TEXT EMBEDDING,", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[5]["name"], - "llm_name": "BAAI/bge-small-zh-v1.5", - "tags": "TEXT EMBEDDING,", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, { - }, { - "fid": factory_infos[5]["name"], - "llm_name": "BAAI/bge-base-en-v1.5", - "tags": "TEXT EMBEDDING,", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, { - }, { - "fid": factory_infos[5]["name"], - "llm_name": "BAAI/bge-large-en-v1.5", - "tags": "TEXT EMBEDDING,", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[5]["name"], - "llm_name": "sentence-transformers/all-MiniLM-L6-v2", - "tags": "TEXT EMBEDDING,", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[5]["name"], - "llm_name": "nomic-ai/nomic-embed-text-v1.5", - "tags": "TEXT EMBEDDING,", - "max_tokens": 8192, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[5]["name"], - "llm_name": "jinaai/jina-embeddings-v2-small-en", - "tags": "TEXT EMBEDDING,", - "max_tokens": 2147483648, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[5]["name"], - "llm_name": "jinaai/jina-embeddings-v2-base-en", - "tags": "TEXT EMBEDDING,", - "max_tokens": 2147483648, - "model_type": LLMType.EMBEDDING.value - }, - # ------------------------ Youdao ----------------------- - { - "fid": factory_infos[7]["name"], - "llm_name": "maidalun1020/bce-embedding-base_v1", - "tags": "TEXT EMBEDDING,", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, - { - "fid": factory_infos[7]["name"], - "llm_name": "maidalun1020/bce-reranker-base_v1", - "tags": "RE-RANK, 512", - "max_tokens": 512, - "model_type": LLMType.RERANK.value - }, - # ------------------------ DeepSeek ----------------------- - { - "fid": factory_infos[8]["name"], - "llm_name": "deepseek-chat", - "tags": "LLM,CHAT,", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[8]["name"], - "llm_name": "deepseek-coder", - "tags": "LLM,CHAT,", - "max_tokens": 16385, - "model_type": LLMType.CHAT.value - }, - # ------------------------ VolcEngine ----------------------- - { - "fid": factory_infos[9]["name"], - "llm_name": "Skylark2-pro-32k", - "tags": "LLM,CHAT,32k", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[9]["name"], - "llm_name": "Skylark2-pro-4k", - "tags": "LLM,CHAT,4k", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, - # ------------------------ BaiChuan ----------------------- - { - "fid": factory_infos[10]["name"], - "llm_name": "Baichuan2-Turbo", - "tags": "LLM,CHAT,32K", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[10]["name"], - "llm_name": "Baichuan2-Turbo-192k", - "tags": "LLM,CHAT,192K", - "max_tokens": 196608, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[10]["name"], - "llm_name": "Baichuan3-Turbo", - "tags": "LLM,CHAT,32K", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[10]["name"], - "llm_name": "Baichuan3-Turbo-128k", - "tags": "LLM,CHAT,128K", - "max_tokens": 131072, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[10]["name"], - "llm_name": "Baichuan4", - "tags": "LLM,CHAT,128K", - "max_tokens": 131072, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[10]["name"], - "llm_name": "Baichuan-Text-Embedding", - "tags": "TEXT EMBEDDING", - "max_tokens": 512, - "model_type": LLMType.EMBEDDING.value - }, - # ------------------------ Jina ----------------------- - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-reranker-v1-base-en", - "tags": "RE-RANK,8k", - "max_tokens": 8196, - "model_type": LLMType.RERANK.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-reranker-v1-turbo-en", - "tags": "RE-RANK,8k", - "max_tokens": 8196, - "model_type": LLMType.RERANK.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-reranker-v1-tiny-en", - "tags": "RE-RANK,8k", - "max_tokens": 8196, - "model_type": LLMType.RERANK.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-colbert-v1-en", - "tags": "RE-RANK,8k", - "max_tokens": 8196, - "model_type": LLMType.RERANK.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-embeddings-v2-base-en", - "tags": "TEXT EMBEDDING", - "max_tokens": 8196, - "model_type": LLMType.EMBEDDING.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-embeddings-v2-base-de", - "tags": "TEXT EMBEDDING", - "max_tokens": 8196, - "model_type": LLMType.EMBEDDING.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-embeddings-v2-base-es", - "tags": "TEXT EMBEDDING", - "max_tokens": 8196, - "model_type": LLMType.EMBEDDING.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-embeddings-v2-base-code", - "tags": "TEXT EMBEDDING", - "max_tokens": 8196, - "model_type": LLMType.EMBEDDING.value - }, - { - "fid": factory_infos[11]["name"], - "llm_name": "jina-embeddings-v2-base-zh", - "tags": "TEXT EMBEDDING", - "max_tokens": 8196, - "model_type": LLMType.EMBEDDING.value - }, - # ------------------------ BAAI ----------------------- - { - "fid": factory_infos[12]["name"], - "llm_name": "BAAI/bge-large-zh-v1.5", - "tags": "TEXT EMBEDDING,", - "max_tokens": 1024, - "model_type": LLMType.EMBEDDING.value - }, - { - "fid": factory_infos[12]["name"], - "llm_name": "BAAI/bge-reranker-v2-m3", - "tags": "RE-RANK,2k", - "max_tokens": 2048, - "model_type": LLMType.RERANK.value - }, - # ------------------------ Minimax ----------------------- - { - "fid": factory_infos[13]["name"], - "llm_name": "abab6.5", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[13]["name"], - "llm_name": "abab6.5s", - "tags": "LLM,CHAT,245k", - "max_tokens": 245760, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[13]["name"], - "llm_name": "abab6.5t", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[13]["name"], - "llm_name": "abab6.5g", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[13]["name"], - "llm_name": "abab5.5s", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, - # ------------------------ Mistral ----------------------- - { - "fid": factory_infos[14]["name"], - "llm_name": "open-mixtral-8x22b", - "tags": "LLM,CHAT,64k", - "max_tokens": 64000, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[14]["name"], - "llm_name": "open-mixtral-8x7b", - "tags": "LLM,CHAT,32k", - "max_tokens": 32000, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[14]["name"], - "llm_name": "open-mistral-7b", - "tags": "LLM,CHAT,32k", - "max_tokens": 32000, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[14]["name"], - "llm_name": "mistral-large-latest", - "tags": "LLM,CHAT,32k", - "max_tokens": 32000, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[14]["name"], - "llm_name": "mistral-small-latest", - "tags": "LLM,CHAT,32k", - "max_tokens": 32000, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[14]["name"], - "llm_name": "mistral-medium-latest", - "tags": "LLM,CHAT,32k", - "max_tokens": 32000, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[14]["name"], - "llm_name": "codestral-latest", - "tags": "LLM,CHAT,32k", - "max_tokens": 32000, - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[14]["name"], - "llm_name": "mistral-embed", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.EMBEDDING - }, - # ------------------------ Azure OpenAI ----------------------- - # Please ensure the llm_name is the same as the name in Azure - # OpenAI deployment name (e.g., azure-gpt-4o). And the llm_name - # must different from the OpenAI llm_name - # - # Each model must be deployed in the Azure OpenAI service, otherwise, - # you will receive an error message 'The API deployment for - # this resource does not exist' - { - "fid": factory_infos[15]["name"], - "llm_name": "azure-gpt-4o", - "tags": "LLM,CHAT,128K", - "max_tokens": 128000, - "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-gpt-35-turbo", - "tags": "LLM,CHAT,4K", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-gpt-35-turbo-16k", - "tags": "LLM,CHAT,16k", - "max_tokens": 16385, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-text-embedding-ada-002", - "tags": "TEXT EMBEDDING,8K", - "max_tokens": 8191, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-text-embedding-3-small", - "tags": "TEXT EMBEDDING,8K", - "max_tokens": 8191, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-text-embedding-3-large", - "tags": "TEXT EMBEDDING,8K", - "max_tokens": 8191, - "model_type": LLMType.EMBEDDING.value - },{ - "fid": factory_infos[15]["name"], - "llm_name": "azure-whisper-1", - "tags": "SPEECH2TEXT", - "max_tokens": 25 * 1024 * 1024, - "model_type": LLMType.SPEECH2TEXT.value - }, - { - "fid": factory_infos[15]["name"], - "llm_name": "azure-gpt-4", - "tags": "LLM,CHAT,8K", - "max_tokens": 8191, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-gpt-4-turbo", - "tags": "LLM,CHAT,8K", - "max_tokens": 8191, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-gpt-4-32k", - "tags": "LLM,CHAT,32K", - "max_tokens": 32768, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[15]["name"], - "llm_name": "azure-gpt-4-vision-preview", - "tags": "LLM,CHAT,IMAGE2TEXT", - "max_tokens": 765, - "model_type": LLMType.IMAGE2TEXT.value - }, - # ------------------------ Bedrock ----------------------- - { - "fid": factory_infos[16]["name"], - "llm_name": "ai21.j2-ultra-v1", - "tags": "LLM,CHAT,8k", - "max_tokens": 8191, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "ai21.j2-mid-v1", - "tags": "LLM,CHAT,8k", - "max_tokens": 8191, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "cohere.command-text-v14", - "tags": "LLM,CHAT,4k", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "cohere.command-light-text-v14", - "tags": "LLM,CHAT,4k", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "cohere.command-r-v1:0", - "tags": "LLM,CHAT,128k", - "max_tokens": 128 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "cohere.command-r-plus-v1:0", - "tags": "LLM,CHAT,128k", - "max_tokens": 128000, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "anthropic.claude-v2", - "tags": "LLM,CHAT,100k", - "max_tokens": 100 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "anthropic.claude-v2:1", - "tags": "LLM,CHAT,200k", - "max_tokens": 200 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "anthropic.claude-3-sonnet-20240229-v1:0", - "tags": "LLM,CHAT,200k", - "max_tokens": 200 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "anthropic.claude-3-5-sonnet-20240620-v1:0", - "tags": "LLM,CHAT,200k", - "max_tokens": 200 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "anthropic.claude-3-haiku-20240307-v1:0", - "tags": "LLM,CHAT,200k", - "max_tokens": 200 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "anthropic.claude-3-opus-20240229-v1:0", - "tags": "LLM,CHAT,200k", - "max_tokens": 200 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "anthropic.claude-instant-v1", - "tags": "LLM,CHAT,100k", - "max_tokens": 100 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "amazon.titan-text-express-v1", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "amazon.titan-text-premier-v1:0", - "tags": "LLM,CHAT,32k", - "max_tokens": 32 * 1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "amazon.titan-text-lite-v1", - "tags": "LLM,CHAT,4k", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "meta.llama2-13b-chat-v1", - "tags": "LLM,CHAT,4k", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "meta.llama2-70b-chat-v1", - "tags": "LLM,CHAT,4k", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "meta.llama3-8b-instruct-v1:0", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "meta.llama3-70b-instruct-v1:0", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "mistral.mistral-7b-instruct-v0:2", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "mistral.mixtral-8x7b-instruct-v0:1", - "tags": "LLM,CHAT,4k", - "max_tokens": 4096, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "mistral.mistral-large-2402-v1:0", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "mistral.mistral-small-2402-v1:0", - "tags": "LLM,CHAT,8k", - "max_tokens": 8192, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "amazon.titan-embed-text-v2:0", - "tags": "TEXT EMBEDDING", - "max_tokens": 8192, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "cohere.embed-english-v3", - "tags": "TEXT EMBEDDING", - "max_tokens": 2048, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[16]["name"], - "llm_name": "cohere.embed-multilingual-v3", - "tags": "TEXT EMBEDDING", - "max_tokens": 2048, - "model_type": LLMType.EMBEDDING.value - }, { - "fid": factory_infos[17]["name"], - "llm_name": "gemini-1.5-pro-latest", - "tags": "LLM,CHAT,1024K", - "max_tokens": 1024*1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[17]["name"], - "llm_name": "gemini-1.5-flash-latest", - "tags": "LLM,CHAT,1024K", - "max_tokens": 1024*1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[17]["name"], - "llm_name": "gemini-1.0-pro", - "tags": "LLM,CHAT,30K", - "max_tokens": 30*1024, - "model_type": LLMType.CHAT.value - }, { - "fid": factory_infos[17]["name"], - "llm_name": "gemini-1.0-pro-vision-latest", - "tags": "LLM,IMAGE2TEXT,12K", - "max_tokens": 12*1024, - "model_type": LLMType.IMAGE2TEXT.value - }, { - "fid": factory_infos[17]["name"], - "llm_name": "text-embedding-004", - "tags": "TEXT EMBEDDING", - "max_tokens": 2048, - "model_type": LLMType.EMBEDDING.value - }, - # ------------------------ Groq ----------------------- - { - "fid": factory_infos[18]["name"], - "llm_name": "gemma-7b-it", - "tags": "LLM,CHAT,15k", - "max_tokens": 8192, - - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[18]["name"], - "llm_name": "gemma2-9b-it", - "tags": "LLM,CHAT,15k", - "max_tokens": 8192, - - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[18]["name"], - "llm_name": "llama3-70b-8192", - "tags": "LLM,CHAT,6k", - "max_tokens": 8192, - - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[18]["name"], - "llm_name": "llama3-8b-8192", - "tags": "LLM,CHAT,30k", - "max_tokens": 8192, - - "model_type": LLMType.CHAT.value - }, - { - "fid": factory_infos[18]["name"], - "llm_name": "mixtral-8x7b-32768", - "tags": "LLM,CHAT,5k", - "max_tokens": 32768, - - "model_type": LLMType.CHAT.value - } - ] - for info in factory_infos: + factory_llm_infos = json.load( + open( + os.path.join(get_project_base_directory(), "conf", "llm_factories.json"), + "r", + ) + ) + for factory_llm_info in factory_llm_infos["factory_llm_infos"]: + llm_infos = factory_llm_info.pop("llm") try: - LLMFactoriesService.save(**info) - except Exception as e: - pass - - LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")]) - for info in llm_infos: - try: - LLMService.save(**info) + LLMFactoriesService.save(**factory_llm_info) except Exception as e: pass + for llm_info in llm_infos: + llm_info["fid"] = factory_llm_info["name"] + try: + LLMService.save(**llm_info) + except Exception as e: + pass + try: + LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")]) + except Exception as e: + pass LLMFactoriesService.filter_delete([LLMFactories.name == "Local"]) LLMService.filter_delete([LLM.fid == "Local"]) diff --git a/conf/llm_factories.json b/conf/llm_factories.json new file mode 100644 index 0000000000..446b1eeb80 --- /dev/null +++ b/conf/llm_factories.json @@ -0,0 +1,1884 @@ +{ + "factory_llm_infos": [ + { + "name": "OpenAI", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", + "status": "1", + "llm": [ + { + "llm_name": "gpt-4o", + "tags": "LLM,CHAT,128K", + "max_tokens": 128000, + "model_type": "chat,image2text" + }, + { + "llm_name": "gpt-3.5-turbo", + "tags": "LLM,CHAT,4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "gpt-3.5-turbo-16k-0613", + "tags": "LLM,CHAT,16k", + "max_tokens": 16385, + "model_type": "chat" + }, + { + "llm_name": "text-embedding-ada-002", + "tags": "TEXT EMBEDDING,8K", + "max_tokens": 8191, + "model_type": "embedding" + }, + { + "llm_name": "text-embedding-3-small", + "tags": "TEXT EMBEDDING,8K", + "max_tokens": 8191, + "model_type": "embedding" + }, + { + "llm_name": "text-embedding-3-large", + "tags": "TEXT EMBEDDING,8K", + "max_tokens": 8191, + "model_type": "embedding" + }, + { + "llm_name": "whisper-1", + "tags": "SPEECH2TEXT", + "max_tokens": 26214400, + "model_type": "speech2text" + }, + { + "llm_name": "gpt-4", + "tags": "LLM,CHAT,8K", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "gpt-4-turbo", + "tags": "LLM,CHAT,8K", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "gpt-4-32k", + "tags": "LLM,CHAT,32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "gpt-4-vision-preview", + "tags": "LLM,CHAT,IMAGE2TEXT", + "max_tokens": 765, + "model_type": "image2text" + } + ] + }, + { + "name": "Tongyi-Qianwen", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", + "status": "1", + "llm": [ + { + "llm_name": "qwen-turbo", + "tags": "LLM,CHAT,8K", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "qwen-plus", + "tags": "LLM,CHAT,32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "qwen-max-1201", + "tags": "LLM,CHAT,6K", + "max_tokens": 5899, + "model_type": "chat" + }, + { + "llm_name": "text-embedding-v2", + "tags": "TEXT EMBEDDING,2K", + "max_tokens": 2048, + "model_type": "embedding" + }, + { + "llm_name": "paraformer-realtime-8k-v1", + "tags": "SPEECH2TEXT", + "max_tokens": 26214400, + "model_type": "speech2text" + }, + { + "llm_name": "qwen-vl-max", + "tags": "LLM,CHAT,IMAGE2TEXT", + "max_tokens": 765, + "model_type": "image2text" + } + ] + }, + { + "name": "ZHIPU-AI", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", + "status": "1", + "llm": [ + { + "llm_name": "glm-3-turbo", + "tags": "LLM,CHAT,", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "glm-4", + "tags": "LLM,CHAT,", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "glm-4v", + "tags": "LLM,CHAT,IMAGE2TEXT", + "max_tokens": 2000, + "model_type": "image2text" + }, + { + "llm_name": "embedding-2", + "tags": "TEXT EMBEDDING", + "max_tokens": 512, + "model_type": "embedding" + } + ] + }, + { + "name": "Ollama", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", + "status": "1", + "llm": [] + }, + { + "name": "Moonshot", + "logo": "", + "tags": "LLM,TEXT EMBEDDING", + "status": "1", + "llm": [ + { + "llm_name": "moonshot-v1-8k", + "tags": "LLM,CHAT,", + "max_tokens": 7900, + "model_type": "chat" + }, + { + "llm_name": "moonshot-v1-32k", + "tags": "LLM,CHAT,", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "moonshot-v1-128k", + "tags": "LLM,CHAT", + "max_tokens": 128000, + "model_type": "chat" + } + ] + }, + { + "name": "FastEmbed", + "logo": "", + "tags": "TEXT EMBEDDING", + "status": "1", + "llm": [ + { + "llm_name": "BAAI/bge-small-en-v1.5", + "tags": "TEXT EMBEDDING,", + "max_tokens": 512, + "model_type": "embedding" + }, + { + "llm_name": "BAAI/bge-small-zh-v1.5", + "tags": "TEXT EMBEDDING,", + "max_tokens": 512, + "model_type": "embedding" + }, + { + "llm_name": "BAAI/bge-base-en-v1.5", + "tags": "TEXT EMBEDDING,", + "max_tokens": 512, + "model_type": "embedding" + }, + { + "llm_name": "BAAI/bge-large-en-v1.5", + "tags": "TEXT EMBEDDING,", + "max_tokens": 512, + "model_type": "embedding" + }, + { + "llm_name": "sentence-transformers/all-MiniLM-L6-v2", + "tags": "TEXT EMBEDDING,", + "max_tokens": 512, + "model_type": "embedding" + }, + { + "llm_name": "nomic-ai/nomic-embed-text-v1.5", + "tags": "TEXT EMBEDDING,", + "max_tokens": 8192, + "model_type": "embedding" + }, + { + "llm_name": "jinaai/jina-embeddings-v2-small-en", + "tags": "TEXT EMBEDDING,", + "max_tokens": 2147483648, + "model_type": "embedding" + }, + { + "llm_name": "jinaai/jina-embeddings-v2-base-en", + "tags": "TEXT EMBEDDING,", + "max_tokens": 2147483648, + "model_type": "embedding" + } + ] + }, + { + "name": "Xinference", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION,TEXT RE-RANK", + "status": "1", + "llm": [] + }, + { + "name": "Youdao", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", + "status": "1", + "llm": [ + { + "llm_name": "maidalun1020/bce-embedding-base_v1", + "tags": "TEXT EMBEDDING,", + "max_tokens": 512, + "model_type": "embedding" + }, + { + "llm_name": "maidalun1020/bce-reranker-base_v1", + "tags": "RE-RANK, 512", + "max_tokens": 512, + "model_type": "rerank" + } + ] + }, + { + "name": "DeepSeek", + "logo": "", + "tags": "LLM", + "status": "1", + "llm": [ + { + "llm_name": "deepseek-chat", + "tags": "LLM,CHAT,", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "deepseek-coder", + "tags": "LLM,CHAT,", + "max_tokens": 16385, + "model_type": "chat" + } + ] + }, + { + "name": "VolcEngine", + "logo": "", + "tags": "LLM, TEXT EMBEDDING", + "status": "1", + "llm": [ + { + "llm_name": "Skylark2-pro-32k", + "tags": "LLM,CHAT,32k", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "Skylark2-pro-4k", + "tags": "LLM,CHAT,4k", + "max_tokens": 4096, + "model_type": "chat" + } + ] + }, + { + "name": "BaiChuan", + "logo": "", + "tags": "LLM,TEXT EMBEDDING", + "status": "1", + "llm": [ + { + "llm_name": "Baichuan2-Turbo", + "tags": "LLM,CHAT,32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "Baichuan2-Turbo-192k", + "tags": "LLM,CHAT,192K", + "max_tokens": 196608, + "model_type": "chat" + }, + { + "llm_name": "Baichuan3-Turbo", + "tags": "LLM,CHAT,32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "Baichuan3-Turbo-128k", + "tags": "LLM,CHAT,128K", + "max_tokens": 131072, + "model_type": "chat" + }, + { + "llm_name": "Baichuan4", + "tags": "LLM,CHAT,128K", + "max_tokens": 131072, + "model_type": "chat" + }, + { + "llm_name": "Baichuan-Text-Embedding", + "tags": "TEXT EMBEDDING", + "max_tokens": 512, + "model_type": "embedding" + } + ] + }, + { + "name": "Jina", + "logo": "", + "tags": "TEXT EMBEDDING, TEXT RE-RANK", + "status": "1", + "llm": [ + { + "llm_name": "jina-reranker-v1-base-en", + "tags": "RE-RANK,8k", + "max_tokens": 8196, + "model_type": "rerank" + }, + { + "llm_name": "jina-reranker-v1-turbo-en", + "tags": "RE-RANK,8k", + "max_tokens": 8196, + "model_type": "rerank" + }, + { + "llm_name": "jina-reranker-v1-tiny-en", + "tags": "RE-RANK,8k", + "max_tokens": 8196, + "model_type": "rerank" + }, + { + "llm_name": "jina-colbert-v1-en", + "tags": "RE-RANK,8k", + "max_tokens": 8196, + "model_type": "rerank" + }, + { + "llm_name": "jina-embeddings-v2-base-en", + "tags": "TEXT EMBEDDING", + "max_tokens": 8196, + "model_type": "embedding" + }, + { + "llm_name": "jina-embeddings-v2-base-de", + "tags": "TEXT EMBEDDING", + "max_tokens": 8196, + "model_type": "embedding" + }, + { + "llm_name": "jina-embeddings-v2-base-es", + "tags": "TEXT EMBEDDING", + "max_tokens": 8196, + "model_type": "embedding" + }, + { + "llm_name": "jina-embeddings-v2-base-code", + "tags": "TEXT EMBEDDING", + "max_tokens": 8196, + "model_type": "embedding" + }, + { + "llm_name": "jina-embeddings-v2-base-zh", + "tags": "TEXT EMBEDDING", + "max_tokens": 8196, + "model_type": "embedding" + } + ] + }, + { + "name": "BAAI", + "logo": "", + "tags": "TEXT EMBEDDING, TEXT RE-RANK", + "status": "1", + "llm": [ + { + "llm_name": "BAAI/bge-large-zh-v1.5", + "tags": "TEXT EMBEDDING,", + "max_tokens": 1024, + "model_type": "embedding" + }, + { + "llm_name": "BAAI/bge-reranker-v2-m3", + "tags": "RE-RANK,2k", + "max_tokens": 2048, + "model_type": "rerank" + } + ] + }, + { + "name": "Minimax", + "logo": "", + "tags": "LLM,TEXT EMBEDDING", + "status": "1", + "llm": [ + { + "llm_name": "abab6.5", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "abab6.5s", + "tags": "LLM,CHAT,245k", + "max_tokens": 245760, + "model_type": "chat" + }, + { + "llm_name": "abab6.5t", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "abab6.5g", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "abab5.5s", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + } + ] + }, + { + "name": "Mistral", + "logo": "", + "tags": "LLM,TEXT EMBEDDING", + "status": "1", + "llm": [ + { + "llm_name": "open-mixtral-8x22b", + "tags": "LLM,CHAT,64k", + "max_tokens": 64000, + "model_type": "chat" + }, + { + "llm_name": "open-mixtral-8x7b", + "tags": "LLM,CHAT,32k", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "open-mistral-7b", + "tags": "LLM,CHAT,32k", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "mistral-large-latest", + "tags": "LLM,CHAT,32k", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "mistral-small-latest", + "tags": "LLM,CHAT,32k", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "mistral-medium-latest", + "tags": "LLM,CHAT,32k", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "codestral-latest", + "tags": "LLM,CHAT,32k", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "mistral-embed", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "embedding" + } + ] + }, + { + "name": "Azure-OpenAI", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION", + "status": "1", + "llm": [ + { + "llm_name": "azure-gpt-4o", + "tags": "LLM,CHAT,128K", + "max_tokens": 128000, + "model_type": "chat,image2text" + }, + { + "llm_name": "azure-gpt-35-turbo", + "tags": "LLM,CHAT,4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "azure-gpt-35-turbo-16k", + "tags": "LLM,CHAT,16k", + "max_tokens": 16385, + "model_type": "chat" + }, + { + "llm_name": "azure-text-embedding-ada-002", + "tags": "TEXT EMBEDDING,8K", + "max_tokens": 8191, + "model_type": "embedding" + }, + { + "llm_name": "azure-text-embedding-3-small", + "tags": "TEXT EMBEDDING,8K", + "max_tokens": 8191, + "model_type": "embedding" + }, + { + "llm_name": "azure-text-embedding-3-large", + "tags": "TEXT EMBEDDING,8K", + "max_tokens": 8191, + "model_type": "embedding" + }, + { + "llm_name": "azure-whisper-1", + "tags": "SPEECH2TEXT", + "max_tokens": 26214400, + "model_type": "speech2text" + }, + { + "llm_name": "azure-gpt-4", + "tags": "LLM,CHAT,8K", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "azure-gpt-4-turbo", + "tags": "LLM,CHAT,8K", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "azure-gpt-4-32k", + "tags": "LLM,CHAT,32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "azure-gpt-4-vision-preview", + "tags": "LLM,CHAT,IMAGE2TEXT", + "max_tokens": 765, + "model_type": "image2text" + } + ] + }, + { + "name": "Bedrock", + "logo": "", + "tags": "LLM,TEXT EMBEDDING", + "status": "1", + "llm": [ + { + "llm_name": "ai21.j2-ultra-v1", + "tags": "LLM,CHAT,8k", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "ai21.j2-mid-v1", + "tags": "LLM,CHAT,8k", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "cohere.command-text-v14", + "tags": "LLM,CHAT,4k", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "cohere.command-light-text-v14", + "tags": "LLM,CHAT,4k", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "cohere.command-r-v1:0", + "tags": "LLM,CHAT,128k", + "max_tokens": 131072, + "model_type": "chat" + }, + { + "llm_name": "cohere.command-r-plus-v1:0", + "tags": "LLM,CHAT,128k", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "anthropic.claude-v2", + "tags": "LLM,CHAT,100k", + "max_tokens": 102400, + "model_type": "chat" + }, + { + "llm_name": "anthropic.claude-v2:1", + "tags": "LLM,CHAT,200k", + "max_tokens": 204800, + "model_type": "chat" + }, + { + "llm_name": "anthropic.claude-3-sonnet-20240229-v1:0", + "tags": "LLM,CHAT,200k", + "max_tokens": 204800, + "model_type": "chat" + }, + { + "llm_name": "anthropic.claude-3-5-sonnet-20240620-v1:0", + "tags": "LLM,CHAT,200k", + "max_tokens": 204800, + "model_type": "chat" + }, + { + "llm_name": "anthropic.claude-3-haiku-20240307-v1:0", + "tags": "LLM,CHAT,200k", + "max_tokens": 204800, + "model_type": "chat" + }, + { + "llm_name": "anthropic.claude-3-opus-20240229-v1:0", + "tags": "LLM,CHAT,200k", + "max_tokens": 204800, + "model_type": "chat" + }, + { + "llm_name": "anthropic.claude-instant-v1", + "tags": "LLM,CHAT,100k", + "max_tokens": 102400, + "model_type": "chat" + }, + { + "llm_name": "amazon.titan-text-express-v1", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "amazon.titan-text-premier-v1:0", + "tags": "LLM,CHAT,32k", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "amazon.titan-text-lite-v1", + "tags": "LLM,CHAT,4k", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "meta.llama2-13b-chat-v1", + "tags": "LLM,CHAT,4k", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "meta.llama2-70b-chat-v1", + "tags": "LLM,CHAT,4k", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "meta.llama3-8b-instruct-v1:0", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta.llama3-70b-instruct-v1:0", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "mistral.mistral-7b-instruct-v0:2", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "mistral.mixtral-8x7b-instruct-v0:1", + "tags": "LLM,CHAT,4k", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "mistral.mistral-large-2402-v1:0", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "mistral.mistral-small-2402-v1:0", + "tags": "LLM,CHAT,8k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "amazon.titan-embed-text-v2:0", + "tags": "TEXT EMBEDDING", + "max_tokens": 8192, + "model_type": "embedding" + }, + { + "llm_name": "cohere.embed-english-v3", + "tags": "TEXT EMBEDDING", + "max_tokens": 2048, + "model_type": "embedding" + }, + { + "llm_name": "cohere.embed-multilingual-v3", + "tags": "TEXT EMBEDDING", + "max_tokens": 2048, + "model_type": "embedding" + } + ] + }, + { + "name": "Gemini", + "logo": "", + "tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT", + "status": "1", + "llm": [ + { + "llm_name": "gemini-1.5-pro-latest", + "tags": "LLM,CHAT,1024K", + "max_tokens": 1048576, + "model_type": "chat" + }, + { + "llm_name": "gemini-1.5-flash-latest", + "tags": "LLM,CHAT,1024K", + "max_tokens": 1048576, + "model_type": "chat" + }, + { + "llm_name": "gemini-1.0-pro", + "tags": "LLM,CHAT,30K", + "max_tokens": 30720, + "model_type": "chat" + }, + { + "llm_name": "gemini-1.0-pro-vision-latest", + "tags": "LLM,IMAGE2TEXT,12K", + "max_tokens": 12288, + "model_type": "image2text" + }, + { + "llm_name": "text-embedding-004", + "tags": "TEXT EMBEDDING", + "max_tokens": 2048, + "model_type": "embedding" + } + ] + }, + { + "name": "Groq", + "logo": "", + "tags": "LLM", + "status": "1", + "llm": [ + { + "llm_name": "gemma-7b-it", + "tags": "LLM,CHAT,15k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "gemma2-9b-it", + "tags": "LLM,CHAT,15k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "llama3-70b-8192", + "tags": "LLM,CHAT,6k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "llama3-8b-8192", + "tags": "LLM,CHAT,30k", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "mixtral-8x7b-32768", + "tags": "LLM,CHAT,5k", + "max_tokens": 32768, + "model_type": "chat" + } + ] + }, + { + "name": "OpenRouter", + "logo": "", + "tags": "LLM,IMAGE2TEXT", + "status": "1", + "llm": [ + { + "llm_name": "nousresearch/hermes-2-theta-llama-3-8b", + "tags": "LLM CHAT 16K", + "max_tokens": 16384, + "model_type": "chat" + }, + { + "llm_name": "alpindale/magnum-72b", + "tags": "LLM CHAT 16K", + "max_tokens": 16384, + "model_type": "chat" + }, + { + "llm_name": "google/gemma-2-9b-it", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "google/gemma-2-9b-it:free", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "sao10k/l3-stheno-8b", + "tags": "LLM CHAT 32K", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "openrouter/flavor-of-the-week", + "tags": "LLM CHAT 32K", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "ai21/jamba-instruct", + "tags": "LLM CHAT 250K", + "max_tokens": 256000, + "model_type": "chat" + }, + { + "llm_name": "nvidia/nemotron-4-340b-instruct", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-3.5-sonnet", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "anthropic/claude-3.5-sonnet:beta", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "sao10k/l3-euryale-70b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "microsoft/phi-3-medium-4k-instruct", + "tags": "LLM CHAT 4K", + "max_tokens": 4000, + "model_type": "chat" + }, + { + "llm_name": "cognitivecomputations/dolphin-mixtral-8x22b", + "tags": "LLM CHAT 64K", + "max_tokens": 65536, + "model_type": "chat" + }, + { + "llm_name": "qwen/qwen-2-72b-instruct", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "openchat/openchat-8b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-7b-instruct", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-7b-instruct-v0.3", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/hermes-2-pro-llama-3-8b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "microsoft/phi-3-mini-128k-instruct", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "microsoft/phi-3-mini-128k-instruct:free", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "microsoft/phi-3-medium-128k-instruct", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "microsoft/phi-3-medium-128k-instruct:free", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "neversleep/llama-3-lumimaid-70b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "google/gemini-flash-1.5", + "tags": "LLM IMAGE2TEXT 2734K", + "max_tokens": 2800000, + "model_type": "image2text" + }, + { + "llm_name": "perplexity/llama-3-sonar-small-32k-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "perplexity/llama-3-sonar-small-32k-online", + "tags": "LLM CHAT 28K", + "max_tokens": 28000, + "model_type": "chat" + }, + { + "llm_name": "perplexity/llama-3-sonar-large-32k-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "perplexity/llama-3-sonar-large-32k-online", + "tags": "LLM CHAT 28K", + "max_tokens": 28000, + "model_type": "chat" + }, + { + "llm_name": "deepseek/deepseek-chat", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "deepseek/deepseek-coder", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4o", + "tags": "LLM IMAGE2TEXT 125K", + "max_tokens": 128000, + "model_type": "image2text" + }, + { + "llm_name": "openai/gpt-4o-2024-05-13", + "tags": "LLM IMAGE2TEXT 125K", + "max_tokens": 128000, + "model_type": "image2text" + }, + { + "llm_name": "meta-llama/llama-3-8b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-3-70b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-guard-2-8b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "liuhaotian/llava-yi-34b", + "tags": "LLM IMAGE2TEXT 4K", + "max_tokens": 4096, + "model_type": "image2text" + }, + { + "llm_name": "allenai/olmo-7b-instruct", + "tags": "LLM CHAT 2K", + "max_tokens": 2048, + "model_type": "chat" + }, + { + "llm_name": "qwen/qwen-110b-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "qwen/qwen-72b-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "qwen/qwen-32b-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "qwen/qwen-14b-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "qwen/qwen-7b-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "qwen/qwen-4b-chat", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-3-8b-instruct:free", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "neversleep/llama-3-lumimaid-8b", + "tags": "LLM CHAT 24K", + "max_tokens": 24576, + "model_type": "chat" + }, + { + "llm_name": "neversleep/llama-3-lumimaid-8b:extended", + "tags": "LLM CHAT 24K", + "max_tokens": 24576, + "model_type": "chat" + }, + { + "llm_name": "snowflake/snowflake-arctic-instruct", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "fireworks/firellava-13b", + "tags": "LLM IMAGE2TEXT 4K", + "max_tokens": 4096, + "model_type": "image2text" + }, + { + "llm_name": "lynn/soliloquy-l3", + "tags": "LLM CHAT 24K", + "max_tokens": 24576, + "model_type": "chat" + }, + { + "llm_name": "sao10k/fimbulvetr-11b-v2", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-3-8b-instruct:extended", + "tags": "LLM CHAT 16K", + "max_tokens": 16384, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-3-8b-instruct:nitro", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-3-70b-instruct:nitro", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-3-8b-instruct", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-3-70b-instruct", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mixtral-8x22b-instruct", + "tags": "LLM CHAT 64K", + "max_tokens": 65536, + "model_type": "chat" + }, + { + "llm_name": "microsoft/wizardlm-2-8x22b", + "tags": "LLM CHAT 64K", + "max_tokens": 65536, + "model_type": "chat" + }, + { + "llm_name": "microsoft/wizardlm-2-7b", + "tags": "LLM CHAT 32K", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "undi95/toppy-m-7b:nitro", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mixtral-8x22b", + "tags": "LLM CHAT 64K", + "max_tokens": 65536, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4-turbo", + "tags": "LLM IMAGE2TEXT 125K", + "max_tokens": 128000, + "model_type": "image2text" + }, + { + "llm_name": "google/gemini-pro-1.5", + "tags": "LLM IMAGE2TEXT 2734K", + "max_tokens": 2800000, + "model_type": "image2text" + }, + { + "llm_name": "cohere/command-r-plus", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "databricks/dbrx-instruct", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "sophosympatheia/midnight-rose-70b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "cohere/command", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "cohere/command-r", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-3-haiku", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "anthropic/claude-3-haiku:beta", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "google/gemma-7b-it:nitro", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mixtral-8x7b-instruct:nitro", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-7b-instruct:nitro", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-2-70b-chat:nitro", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "gryphe/mythomax-l2-13b:nitro", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-3-opus", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "anthropic/claude-3-sonnet", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "anthropic/claude-3-opus:beta", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "anthropic/claude-3-sonnet:beta", + "tags": "LLM IMAGE2TEXT 195K", + "max_tokens": 200000, + "model_type": "image2text" + }, + { + "llm_name": "mistralai/mistral-large", + "tags": "LLM CHAT 32K", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "google/gemma-7b-it", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "google/gemma-7b-it:free", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-hermes-2-mistral-7b-dpo", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/codellama-70b-instruct", + "tags": "LLM CHAT 2K", + "max_tokens": 2048, + "model_type": "chat" + }, + { + "llm_name": "recursal/eagle-7b", + "tags": "LLM CHAT 9K", + "max_tokens": 10000, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-3.5-turbo-0613", + "tags": "LLM CHAT 4K", + "max_tokens": 4095, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4-turbo-preview", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "undi95/remm-slerp-l2-13b:extended", + "tags": "LLM CHAT 6K", + "max_tokens": 6144, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-hermes-2-mixtral-8x7b-sft", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-tiny", + "tags": "LLM CHAT 32K", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-small", + "tags": "LLM CHAT 32K", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-medium", + "tags": "LLM CHAT 32K", + "max_tokens": 32000, + "model_type": "chat" + }, + { + "llm_name": "austism/chronos-hermes-13b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "neversleep/noromaid-mixtral-8x7b-instruct", + "tags": "LLM CHAT 8K", + "max_tokens": 8000, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-hermes-yi-34b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-7b-instruct-v0.2", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "cognitivecomputations/dolphin-mixtral-8x7b", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "google/gemini-pro", + "tags": "LLM CHAT 89K", + "max_tokens": 91728, + "model_type": "chat" + }, + { + "llm_name": "google/gemini-pro-vision", + "tags": "LLM IMAGE2TEXT 44K", + "max_tokens": 45875, + "model_type": "image2text" + }, + { + "llm_name": "mistralai/mixtral-8x7b", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mixtral-8x7b-instruct", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "rwkv/rwkv-5-world-3b", + "tags": "LLM CHAT 9K", + "max_tokens": 10000, + "model_type": "chat" + }, + { + "llm_name": "recursal/rwkv-5-3b-ai-town", + "tags": "LLM CHAT 9K", + "max_tokens": 10000, + "model_type": "chat" + }, + { + "llm_name": "togethercomputer/stripedhyena-nous-7b", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "togethercomputer/stripedhyena-hessian-7b", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "koboldai/psyfighter-13b-2", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "gryphe/mythomist-7b", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "openrouter/cinematika-7b", + "tags": "LLM CHAT 8K", + "max_tokens": 8000, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-capybara-7b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-capybara-7b:free", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "openchat/openchat-7b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "openchat/openchat-7b:free", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "neversleep/noromaid-20b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "gryphe/mythomist-7b:free", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "intel/neural-chat-7b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-2", + "tags": "LLM CHAT 195K", + "max_tokens": 200000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-2.1", + "tags": "LLM CHAT 195K", + "max_tokens": 200000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-instant-1.1", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-2:beta", + "tags": "LLM CHAT 195K", + "max_tokens": 200000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-2.1:beta", + "tags": "LLM CHAT 195K", + "max_tokens": 200000, + "model_type": "chat" + }, + { + "llm_name": "teknium/openhermes-2.5-mistral-7b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-capybara-34b", + "tags": "LLM CHAT 195K", + "max_tokens": 200000, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4-vision-preview", + "tags": "LLM IMAGE2TEXT 125K", + "max_tokens": 128000, + "model_type": "image2text" + }, + { + "llm_name": "lizpreciatior/lzlv-70b-fp16-hf", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "undi95/toppy-m-7b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "alpindale/goliath-120b", + "tags": "LLM CHAT 6K", + "max_tokens": 6144, + "model_type": "chat" + }, + { + "llm_name": "undi95/toppy-m-7b:free", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "openrouter/auto", + "tags": "LLM CHAT 195K", + "max_tokens": 200000, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-3.5-turbo-1106", + "tags": "LLM CHAT 16K", + "max_tokens": 16385, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4-1106-preview", + "tags": "LLM CHAT 125K", + "max_tokens": 128000, + "model_type": "chat" + }, + { + "llm_name": "huggingfaceh4/zephyr-7b-beta:free", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "google/palm-2-chat-bison-32k", + "tags": "LLM CHAT 89K", + "max_tokens": 91750, + "model_type": "chat" + }, + { + "llm_name": "google/palm-2-codechat-bison-32k", + "tags": "LLM CHAT 89K", + "max_tokens": 91750, + "model_type": "chat" + }, + { + "llm_name": "teknium/openhermes-2-mistral-7b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "open-orca/mistral-7b-openorca", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "gryphe/mythomax-l2-13b:extended", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "xwin-lm/xwin-lm-70b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-3.5-turbo-instruct", + "tags": "LLM CHAT 4K", + "max_tokens": 4095, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-7b-instruct-v0.1", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "mistralai/mistral-7b-instruct:free", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "pygmalionai/mythalion-13b", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-3.5-turbo-16k", + "tags": "LLM CHAT 16K", + "max_tokens": 16385, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4-32k", + "tags": "LLM CHAT 32K", + "max_tokens": 32767, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4-32k-0314", + "tags": "LLM CHAT 32K", + "max_tokens": 32767, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/codellama-34b-instruct", + "tags": "LLM CHAT 8K", + "max_tokens": 8192, + "model_type": "chat" + }, + { + "llm_name": "phind/phind-codellama-34b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "nousresearch/nous-hermes-llama2-13b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "mancer/weaver", + "tags": "LLM CHAT 8K", + "max_tokens": 8000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-2.0", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-instant-1", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-1", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-1.2", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-instant-1.0", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-2.0:beta", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "anthropic/claude-instant-1:beta", + "tags": "LLM CHAT 98K", + "max_tokens": 100000, + "model_type": "chat" + }, + { + "llm_name": "undi95/remm-slerp-l2-13b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "google/palm-2-chat-bison", + "tags": "LLM CHAT 25K", + "max_tokens": 25804, + "model_type": "chat" + }, + { + "llm_name": "google/palm-2-codechat-bison", + "tags": "LLM CHAT 19K", + "max_tokens": 20070, + "model_type": "chat" + }, + { + "llm_name": "gryphe/mythomax-l2-13b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-2-13b-chat", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "meta-llama/llama-2-70b-chat", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-3.5-turbo", + "tags": "LLM CHAT 16K", + "max_tokens": 16385, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-3.5-turbo-0125", + "tags": "LLM CHAT 16K", + "max_tokens": 16385, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-3.5-turbo-0301", + "tags": "LLM CHAT 4K", + "max_tokens": 4095, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4", + "tags": "LLM CHAT 8K", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "openai/gpt-4-0314", + "tags": "LLM CHAT 8K", + "max_tokens": 8191, + "model_type": "chat" + }, + { + "llm_name": "01-ai/yi-large", + "tags": "LLM CHAT 32K", + "max_tokens": 32768, + "model_type": "chat" + }, + { + "llm_name": "01-ai/yi-34b-200k", + "tags": "LLM CHAT 195K", + "max_tokens": 200000, + "model_type": "chat" + }, + { + "llm_name": "01-ai/yi-34b-chat", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "01-ai/yi-34b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + }, + { + "llm_name": "01-ai/yi-6b", + "tags": "LLM CHAT 4K", + "max_tokens": 4096, + "model_type": "chat" + } + ] + } + ] +} \ No newline at end of file diff --git a/rag/llm/__init__.py b/rag/llm/__init__.py index 4d780b4e80..2833319a44 100644 --- a/rag/llm/__init__.py +++ b/rag/llm/__init__.py @@ -45,7 +45,8 @@ "Tongyi-Qianwen": QWenCV, "ZHIPU-AI": Zhipu4V, "Moonshot": LocalCV, - 'Gemini':GeminiCV + 'Gemini':GeminiCV, + 'OpenRouter':OpenRouterCV } @@ -65,7 +66,8 @@ "Mistral": MistralChat, 'Gemini' : GeminiChat, "Bedrock": BedrockChat, - "Groq": GroqChat + "Groq": GroqChat, + 'OpenRouter':OpenRouterChat } diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index c3701b1681..e883869a01 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -685,7 +685,6 @@ def chat_streamly(self, system, history, gen_conf): yield response._chunks[-1].usage_metadata.total_token_count - class GroqChat: def __init__(self, key, model_name,base_url=''): self.client = Groq(api_key=key) @@ -697,7 +696,6 @@ def chat(self, system, history, gen_conf): for k in list(gen_conf.keys()): if k not in ["temperature", "top_p", "max_tokens"]: del gen_conf[k] - ans = "" try: response = self.client.chat.completions.create( @@ -707,7 +705,7 @@ def chat(self, system, history, gen_conf): ) ans = response.choices[0].message.content if response.choices[0].finish_reason == "length": - ans += "...\nFor the content length reason, it stopped, continue?" if self.is_english( + ans += "...\nFor the content length reason, it stopped, continue?" if is_english( [ans]) else "······\n由于长度的原因,回答被截断了,要继续吗?" return ans, response.usage.total_tokens except Exception as e: @@ -734,11 +732,20 @@ def chat_streamly(self, system, history, gen_conf): ans += resp.choices[0].delta.content total_tokens += 1 if resp.choices[0].finish_reason == "length": - ans += "...\nFor the content length reason, it stopped, continue?" if self.is_english( + ans += "...\nFor the content length reason, it stopped, continue?" if is_english( [ans]) else "······\n由于长度的原因,回答被截断了,要继续吗?" yield ans except Exception as e: yield ans + "\n**ERROR**: " + str(e) - yield total_tokens \ No newline at end of file + yield total_tokens + + +## openrouter +class OpenRouterChat(Base): + def __init__(self, key, model_name, base_url="https://openrouter.ai/api/v1"): + self.base_url = "https://openrouter.ai/api/v1" + self.client = OpenAI(base_url=self.base_url, api_key=key) + self.model_name = model_name + diff --git a/rag/llm/cv_model.py b/rag/llm/cv_model.py index 19843a352b..09b7347f4f 100644 --- a/rag/llm/cv_model.py +++ b/rag/llm/cv_model.py @@ -23,6 +23,8 @@ import os import base64 from io import BytesIO +import json +import requests from api.utils import get_uuid from api.utils.file_utils import get_project_base_directory @@ -212,7 +214,7 @@ def __init__(self, key, model_name="gemini-1.0-pro-vision-latest", lang="Chinese self.model = GenerativeModel(model_name=self.model_name) self.model._client = _client self.lang = lang - + def describe(self, image, max_tokens=2048): from PIL.Image import open gen_config = {'max_output_tokens':max_tokens} @@ -227,6 +229,63 @@ def describe(self, image, max_tokens=2048): ) return res.text,res.usage_metadata.total_token_count + +class OpenRouterCV(Base): + def __init__( + self, + key, + model_name, + lang="Chinese", + base_url="https://openrouter.ai/api/v1/chat/completions", + ): + self.model_name = model_name + self.lang = lang + self.base_url = "https://openrouter.ai/api/v1/chat/completions" + self.key = key + + def describe(self, image, max_tokens=300): + b64 = self.image2base64(image) + response = requests.post( + url=self.base_url, + headers={ + "Authorization": f"Bearer {self.key}", + }, + data=json.dumps( + { + "model": self.model_name, + "messages": self.prompt(b64), + "max_tokens": max_tokens, + } + ), + ) + response = response.json() + return ( + response["choices"][0]["message"]["content"].strip(), + response["usage"]["total_tokens"], + ) + + def prompt(self, b64): + return [ + { + "role": "user", + "content": [ + { + "type": "image_url", + "image_url": {"url": f"data:image/jpeg;base64,{b64}"}, + }, + { + "type": "text", + "text": ( + "请用中文详细描述一下图中的内容,比如时间,地点,人物,事情,人物心情等,如果有数据请提取出数据。" + if self.lang.lower() == "chinese" + else "Please describe the content of this picture, like where, when, who, what happen. If it has number data, please extract them out." + ), + }, + ], + } + ] + + class LocalCV(Base): def __init__(self, key, model_name="glm-4v", lang="Chinese", **kwargs): pass diff --git a/web/src/assets/svg/llm/open-router.svg b/web/src/assets/svg/llm/open-router.svg new file mode 100644 index 0000000000..ba2bd69349 --- /dev/null +++ b/web/src/assets/svg/llm/open-router.svg @@ -0,0 +1,18 @@ + + + + diff --git a/web/src/pages/user-setting/setting-model/index.tsx b/web/src/pages/user-setting/setting-model/index.tsx index 6f4319e858..cdf0d53b04 100644 --- a/web/src/pages/user-setting/setting-model/index.tsx +++ b/web/src/pages/user-setting/setting-model/index.tsx @@ -63,6 +63,7 @@ const IconMap = { Bedrock: 'bedrock', Gemini:'gemini', Groq: 'Groq', + OpenRouter:'open-router' }; const LlmIcon = ({ name }: { name: string }) => {