Skip to content

Commit

Permalink
Use model_dump
Browse files Browse the repository at this point in the history
  • Loading branch information
Josh-XT committed Oct 4, 2023
1 parent 9e4b9a0 commit 45a2766
Showing 1 changed file with 6 additions and 4 deletions.
10 changes: 6 additions & 4 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,10 @@ class ChatCompletionsResponse(BaseModel):
)
async def chat_completions(c: ChatCompletions, user=Depends(verify_api_key)):
if not c.stream:
return ChatCompletionsResponse(LLM(**c.dict()).chat(messages=c.messages))
return ChatCompletionsResponse(LLM(**c.model_dump()).chat(messages=c.messages))
else:
return StreamingResponse(
streaming_generation(data=LLM(**c.dict()).chat(messages=c.messages)),
streaming_generation(data=LLM(**c.model_dump()).chat(messages=c.messages)),
media_type="text/event-stream",
)

Expand Down Expand Up @@ -134,10 +134,12 @@ class CompletionsResponse(BaseModel):
)
async def completions(c: Completions, user=Depends(verify_api_key)):
if not c.stream:
return CompletionsResponse(LLM(**c.dict()).completion(prompt=c.prompt))
return CompletionsResponse(LLM(**c.model_dump()).completion(prompt=c.prompt))
else:
return StreamingResponse(
streaming_generation(data=LLM(**c.dict()).completion(prompt=c.prompt)),
streaming_generation(
data=LLM(**c.model_dump()).completion(prompt=c.prompt)
),
media_type="text/event-stream",
)

Expand Down

0 comments on commit 45a2766

Please sign in to comment.