Skip to content
This repository has been archived by the owner on Mar 26, 2024. It is now read-only.

Commit

Permalink
Merge pull request #27 from mertbozkir/grace/dev
Browse files Browse the repository at this point in the history
Final PR for hackathon! 🧪
  • Loading branch information
mertbozkir authored Jun 25, 2023
2 parents 8388e79 + 7a6b642 commit 793b628
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 13 deletions.
23 changes: 16 additions & 7 deletions handlers/userinput.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,37 @@
import streamlit as st
import ast
from public import tpl_bot, tpl_user

from .audio import handle_text_2_speech


def handle_userinput(user_question):
response = st.session_state.conversation({'question': user_question})
st.session_state.chat_history = response['chat_history']
chat_history_list = response['chat_history']

chat_history = st.session_state.chat_history
# Extract content from each message object
for i, message in enumerate(chat_history_list):
st.session_state.chat_history.insert(i, message.content)
# only keep the latest 12 chat history
if len(st.session_state.chat_history) >= 12:
st.session_state.chat_history = st.session_state.chat_history[:-2]

chat_history = st.session_state.chat_history
print(f"length of chat_history is {len(chat_history)}")
for i, message in enumerate(chat_history):
if i % 2 == 0: # User's message #FIXME
if i % 2 == 0: # User's message
print(f'User question is {message}')
st.write(
tpl_user.replace(
'{{MSG}}', message.content,
'{{MSG}}', message,
), unsafe_allow_html=True,
)
else: # AI message
st.write(
tpl_bot.replace(
'{{MSG}}', message.content,
'{{MSG}}', message,
), unsafe_allow_html=True,
)

if len(chat_history) > 0:
handle_text_2_speech(chat_history[-1].content)
# if len(chat_history) > 0:
# handle_text_2_speech(chat_history[-1].content)
11 changes: 6 additions & 5 deletions utils/ai/open_ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,12 @@ def upsert(data) -> Pinecone:
return vectorstore


def create_or_get_conversation_chain(vectorstore) -> BaseConversationalRetrievalChain:
template = """/
Can you give us the results as markdown code, in a funny way?
def create_or_get_conversation_chain(vectorstore):
template = """
Return results as markdown code?
"""
llm = ChatOpenAI(model=OPENAI_CHAT_MODEL)
#llm = ChatOpenAI(model=OPENAI_CHAT_MODEL)
llm = ChatOpenAI()
memory = ConversationBufferMemory(
memory_key='chat_history', return_messages=True,
)
Expand All @@ -70,5 +71,5 @@ def create_or_get_conversation_chain(vectorstore) -> BaseConversationalRetrieval
memory=memory,
condense_question_prompt=prompt_template,
)
ic(f'conversation_chain is {conversation_chain}')
# ic(f'conversation_chain is {conversation_chain}')
return conversation_chain
2 changes: 1 addition & 1 deletion views/home.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def home():
if 'conversation' not in st.session_state:
st.session_state.conversation = None
if 'chat_history' not in st.session_state:
st.session_state.chat_history = None
st.session_state.chat_history = []

st.header('Chat based on open-source documentation! :globe_with_meridians:')
user_question = st.text_input('Ask a question about your dvc pipeline:')
Expand Down

0 comments on commit 793b628

Please sign in to comment.