Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove /chat/fastchat API endpoint #2506

Merged
merged 1 commit into from
Dec 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions server/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from fastapi.middleware.cors import CORSMiddleware
from starlette.responses import RedirectResponse
from server.chat.chat import chat
from server.chat.openai_chat import openai_chat
from server.chat.search_engine_chat import search_engine_chat
from server.chat.completion import completion
from server.chat.feedback import chat_feedback
Expand Down Expand Up @@ -59,11 +58,6 @@ def mount_app_routes(app: FastAPI, run_mode: str = None):
summary="swagger 文档")(document)

# Tag: Chat
app.post("/chat/fastchat",
tags=["Chat"],
summary="与llm模型对话(直接与fastchat api对话)",
)(openai_chat)

app.post("/chat/chat",
tags=["Chat"],
summary="与llm模型对话(通过LLMChain)",
Expand Down
58 changes: 0 additions & 58 deletions server/chat/openai_chat.py

This file was deleted.

12 changes: 0 additions & 12 deletions tests/api/test_stream_chat_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,18 +48,6 @@ def dump_output(r, title):
}


def test_chat_fastchat(api="/chat/fastchat"):
url = f"{api_base_url}{api}"
data2 = {
"stream": True,
"messages": data["history"] + [{"role": "user", "content": "推荐一部科幻电影"}]
}
dump_input(data2, api)
response = requests.post(url, headers=headers, json=data2, stream=True)
dump_output(response, api)
assert response.status_code == 200


def test_chat_chat(api="/chat/chat"):
url = f"{api_base_url}{api}"
dump_input(data, api)
Expand Down
36 changes: 0 additions & 36 deletions webui_pages/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,38 +259,6 @@ def get_prompt_template(
return self._get_response_value(response, value_func=lambda r: r.text)

# 对话相关操作

def chat_fastchat(
self,
messages: List[Dict],
stream: bool = True,
model: str = LLM_MODELS[0],
temperature: float = TEMPERATURE,
max_tokens: int = None,
**kwargs: Any,
):
'''
对应api.py/chat/fastchat接口
'''
data = {
"messages": messages,
"stream": stream,
"model": model,
"temperature": temperature,
"max_tokens": max_tokens,
}

# print(f"received input message:")
# pprint(data)

response = self.post(
"/chat/fastchat",
json=data,
stream=True,
**kwargs,
)
return self._httpx_stream2generator(response)

def chat_chat(
self,
query: str,
Expand Down Expand Up @@ -1058,10 +1026,6 @@ def check_success_msg(data: Union[str, dict, list], key: str = "msg") -> str:
api = ApiRequest()
aapi = AsyncApiRequest()

# print(api.chat_fastchat(
# messages=[{"role": "user", "content": "hello"}]
# ))

# with api.chat_chat("你好") as r:
# for t in r.iter_text(None):
# print(t)
Expand Down