Skip to content

Commit

Permalink
feat(opentrons-ai-server): update endpoint payload type
Browse files Browse the repository at this point in the history
update endpoint payload type
  • Loading branch information
koji committed May 21, 2024
1 parent f4ee5f5 commit 06146e7
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 4 deletions.
6 changes: 3 additions & 3 deletions opentrons-ai-server/api/domain/openai_predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,15 +94,15 @@ class atomic_descr(BaseModel):
descriptions.append(x)
return descriptions

def refine_response(self, assitant_message: str) -> str:
if assitant_message is None:
def refine_response(self, assistant_message: str) -> str:
if assistant_message is None:
return ""
system_message: ChatCompletionMessageParam = {
"role": "system",
"content": f"{general_rules_1}\n Please leave useful comments for each command.",
}

user_message: ChatCompletionMessageParam = {"role": "user", "content": assitant_message}
user_message: ChatCompletionMessageParam = {"role": "user", "content": assistant_message}

response = self.client.chat.completions.create(
model=self.settings.OPENAI_MODEL_NAME,
Expand Down
2 changes: 1 addition & 1 deletion opentrons-ai-server/api/handler/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def create_chat_completion(event: Dict[str, Any]) -> Dict[str, Any]:

settings: Settings = Settings.build()
openai: OpenAIPredict = OpenAIPredict(settings=settings)
response: Union[str, None] = openai.predict(prompt=body.message)
response: Union[str, None] = openai.predict(prompt=body.message, chat_completion_message_params=body.chat_history)

if response is None or response == "":
return create_response(HTTPStatus.NO_CONTENT, ChatResponse(reply="No response was generated", fake=body.fake).model_dump())
Expand Down
6 changes: 6 additions & 0 deletions opentrons-ai-server/api/models/chat_request.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
from pydantic import BaseModel
from typing import List


class Chat(BaseModel):
role: str
content: str

class ChatRequest(BaseModel):
message: str
chat_history: List[Chat]
fake: bool

0 comments on commit 06146e7

Please sign in to comment.