diff --git a/README.md b/README.md index 6af3a5251..63144d85a 100644 --- a/README.md +++ b/README.md @@ -1 +1,35 @@ # rag-tutorial-v2 + +## Running with OLLAMA + +1. Install the following packages: +```shell +$ pip3 install langchain +$ pip3 install chromadb +$ pip3 install pypdf +$ pip3 install pytest +$ pip3 install langchain-community +$ pip3 install -U langchain-chroma +``` + +2. Make sure you have ollama on your machine by checking out the instructions in [Ollama Github](https://github.com/ollama/ollama) + +3. Once ollama has been downloaded, start the server in another terminal window: +```shell +$ ollama serve +``` + +4. Pull the models needed: +```shell +$ ollama pull nomic-embed-text +$ ollama pull mistral +``` + +5. Run the program. Below is an example: +```shell +$ python3 query_data.py "how much total money does a player start with in Monopoly?" +``` + +The return answer should be shown in the terminal. + + diff --git a/get_embedding_function.py b/get_embedding_function.py index 79d04113b..c38284574 100644 --- a/get_embedding_function.py +++ b/get_embedding_function.py @@ -3,8 +3,8 @@ def get_embedding_function(): - embeddings = BedrockEmbeddings( - credentials_profile_name="default", region_name="us-east-1" - ) - # embeddings = OllamaEmbeddings(model="nomic-embed-text") + #embeddings = BedrockEmbeddings( + # credentials_profile_name="default", region_name="us-east-1" + #) + embeddings = OllamaEmbeddings(model="nomic-embed-text") return embeddings diff --git a/query_data.py b/query_data.py index 33299e582..9e5dd246b 100644 --- a/query_data.py +++ b/query_data.py @@ -1,5 +1,6 @@ import argparse -from langchain.vectorstores.chroma import Chroma +#from langchain.vectorstores.chroma import Chroma +from langchain_chroma import Chroma from langchain.prompts import ChatPromptTemplate from langchain_community.llms.ollama import Ollama