Skip to content

Commit

Permalink
save
Browse files Browse the repository at this point in the history
  • Loading branch information
AlisoSouza committed Dec 11, 2024
1 parent a612355 commit ac591da
Show file tree
Hide file tree
Showing 5 changed files with 76 additions and 44 deletions.
1 change: 1 addition & 0 deletions nexus/actions/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ class Flow(models.Model):
)

editable = models.BooleanField(default=True)
send_llm_response_to_flow = models.BooleanField(default=False)

class Meta:
constraints = [
Expand Down
11 changes: 8 additions & 3 deletions router/clients/flows/http/flow_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,24 @@ def start_flow(
user: str,
urns: List,
user_message: str,
llm_message: str = None,
msg_event: dict = None,
attachments: list = None
) -> None:

url = f"{self.__host}/api/v2/internals/flow_starts/"
params = {
"message": user_message,
}

if llm_message:
params.update({"answer": llm_message})

payload = {
"user": user,
"flow": flow.uuid,
"urns": urns,
"params": {
"message": user_message,
}
"params": params
}

if msg_event:
Expand Down
1 change: 1 addition & 0 deletions router/entities/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ class FlowDTO:
prompt: str
fallback: str
content_base_uuid: str
send_llm_response_to_flow: bool = False
3 changes: 2 additions & 1 deletion router/repositories/orm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@ def get_project_flow_by_name(
name=flow.name,
prompt=flow.prompt,
fallback=flow.fallback,
content_base_uuid=str(flow.content_base.uuid)
content_base_uuid=str(flow.content_base.uuid),
send_llm_response_to_flow=flow.send_llm_response_to_flow,
)

def project_flow_fallback(
Expand Down
104 changes: 64 additions & 40 deletions router/route.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,54 +64,57 @@ def route(
log_usecase,
message_log=None
):
try:
content_base: ContentBaseDTO = content_base_repository.get_content_base_by_project(message.project_uuid)

if classification == Classifier.CLASSIFICATION_OTHER:

print("[ + Fallback + ]")

fallback_flow: FlowDTO = flows_repository.project_flow_fallback(fallback=True)

if settings.USE_REDIS_CACHE_CONTEXT:
last_messages: List[ContactMessageDTO] = message_logs_repository.list_cached_messages(message.project_uuid, message.contact_urn)
else:
last_messages: List[ContactMessageDTO] = message_logs_repository.list_last_messages(message.project_uuid, message.contact_urn, 5)

agent: AgentDTO = content_base_repository.get_agent(content_base.uuid)
agent = agent.set_default_if_null()

instructions: List[InstructionDTO] = content_base_repository.list_instructions(content_base.uuid)
instructions: List[str] = [instruction.instruction for instruction in instructions]
def get_last_messages():
if settings.USE_REDIS_CACHE_CONTEXT:
last_messages: List[ContactMessageDTO] = message_logs_repository.list_cached_messages(message.project_uuid, message.contact_urn)
else:
last_messages: List[ContactMessageDTO] = message_logs_repository.list_last_messages(message.project_uuid, message.contact_urn, 5)

return last_messages

def get_agent():
agent: AgentDTO = content_base_repository.get_agent(content_base.uuid)
agent = agent.set_default_if_null()
return agent

def get_instructions():
instructions: List[InstructionDTO] = content_base_repository.list_instructions(content_base.uuid)
instructions: List[str] = [instruction.instruction for instruction in instructions]

if instructions == []:
instructions += settings.DEFAULT_INSTRUCTIONS

return instructions

def get_clean_chunks(full_chunks):
chunks: List[str] = []
for chunk in full_chunks:
full_page = chunk.get("full_page").replace("\x00", "\uFFFD")
try:
full_page.encode("latin-1")
chunks.append(full_page)
except UnicodeEncodeError:
full_page = fix_encoding(full_page)
chunks.append(full_page)

chunk["full_page"] = full_page
return chunks

def get_llm_response(bad_words_filter: bool = True):
last_messages: List[ContactMessageDTO] = get_last_messages()
agent: AgentDTO = get_agent()
instructions: List[str] = get_instructions()

if instructions == []:
instructions += settings.DEFAULT_INSTRUCTIONS

# TODO: Implement after changes on create_base_brain_structure usecase.
# response_language: str = get_language_codes(llm_config.language)

# if llm_config.model.lower() != "chatgpt":
# instructions.append(f"Sempre responda em {response_language}")
print(f"[+ Instructions: {instructions} +]")

full_chunks: List[Dict] = get_chunks(
indexer,
text=message.text,
content_base_uuid=content_base.uuid
)

print(f"[+ Instructions: {instructions} +]")

chunks: List[str] = []
for chunk in full_chunks:
full_page = chunk.get("full_page").replace("\x00", "\uFFFD")
try:
full_page.encode("latin-1")
chunks.append(full_page)
except UnicodeEncodeError:
full_page = fix_encoding(full_page)
chunks.append(full_page)

chunk["full_page"] = full_page
chunks: List[str] = get_clean_chunks(full_chunks)

print(f"[ + Chunks: {chunks} + ]")

Expand All @@ -125,7 +128,25 @@ def route(
last_messages=last_messages,
)

llm_response = bad_words_filter(llm_response)
if bad_words_filter:
llm_response = bad_words_filter(llm_response)

return llm_response

try:
content_base: ContentBaseDTO = content_base_repository.get_content_base_by_project(message.project_uuid)

if classification == Classifier.CLASSIFICATION_OTHER:
# print("[ + Fallback + ]")
# fallback_flow: FlowDTO = flows_repository.project_flow_fallback(fallback=True)
llm_response: str = get_llm_response()


# TODO: Implement after changes on create_base_brain_structure usecase.
# response_language: str = get_language_codes(llm_config.language)

# if llm_config.model.lower() != "chatgpt":
# instructions.append(f"Sempre responda em {response_language}")

print(f"[+ LLM Response: {llm_response} +]")

Expand Down Expand Up @@ -175,6 +196,9 @@ def route(

flow: FlowDTO = flows_repository.get_project_flow_by_name(name=classification)

if flow.send_llm_response_to_flow:
pass

log_usecase.update_log_field(
project_id=message.project_uuid,
content_base_id=content_base.uuid,
Expand Down

0 comments on commit ac591da

Please sign in to comment.