Skip to content

Commit

Permalink
fix self deployed llm lost (infiniflow#2510)
Browse files Browse the repository at this point in the history
### What problem does this PR solve?

infiniflow#2509 

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
  • Loading branch information
KevinHuSh authored Sep 20, 2024
1 parent 8d3e6e9 commit 33d6361
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion api/apps/llm_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,7 @@ def my_llms():
@manager.route('/list', methods=['GET'])
@login_required
def list_app():
self_deploied = ["Youdao","FastEmbed", "BAAI", "Ollama", "Xinference", "LocalAI", "LM-Studio"]
model_type = request.args.get("model_type")
try:
objs = TenantLLMService.query(tenant_id=current_user.id)
Expand All @@ -313,7 +314,7 @@ def list_app():
llms = [m.to_dict()
for m in llms if m.status == StatusEnum.VALID.value]
for m in llms:
m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in ["Youdao","FastEmbed", "BAAI"]
m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in self_deploied

llm_set = set([m["llm_name"] for m in llms])
for o in objs:
Expand Down

0 comments on commit 33d6361

Please sign in to comment.