Skip to content

Commit

Permalink
refine agent directories. (opea-project#1109)
Browse files Browse the repository at this point in the history
  • Loading branch information
lkk12014402 authored Jan 6, 2025
1 parent b933b66 commit cf90932
Show file tree
Hide file tree
Showing 44 changed files with 43 additions and 43 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/docker/compose/agent-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

# this file should be run in the root of the repo
services:
agent-langchain:
agent:
build:
dockerfile: comps/agent/langchain/Dockerfile
image: ${REGISTRY:-opea}/agent-langchain:${TAG:-latest}
dockerfile: comps/agent/src/Dockerfile
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,19 @@ COPY comps /home/user/comps

RUN pip install --no-cache-dir --upgrade pip setuptools && \
if [ ${ARCH} = "cpu" ]; then \
pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu -r /home/user/comps/agent/langchain/requirements.txt; \
pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu -r /home/user/comps/agent/src/requirements.txt; \
else \
pip install --no-cache-dir -r /home/user/comps/agent/langchain/requirements.txt; \
pip install --no-cache-dir -r /home/user/comps/agent/src/requirements.txt; \
fi

ENV PYTHONPATH=$PYTHONPATH:/home/user

USER root

RUN mkdir -p /home/user/comps/agent/langchain/status && chown -R user /home/user/comps/agent/langchain/status
RUN mkdir -p /home/user/comps/agent/src/status && chown -R user /home/user/comps/agent/src/status

USER user

WORKDIR /home/user/comps/agent/langchain/
WORKDIR /home/user/comps/agent/src/

ENTRYPOINT ["python", "agent.py"]
14 changes: 7 additions & 7 deletions comps/agent/langchain/README.md → comps/agent/src/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ We currently support the following types of agents. Please refer to the example
1. ReAct: use `react_langchain` or `react_langgraph` or `react_llama` as strategy. First introduced in this seminal [paper](https://arxiv.org/abs/2210.03629). The ReAct agent engages in "reason-act-observe" cycles to solve problems. Please refer to this [doc](https://python.langchain.com/v0.2/docs/how_to/migrate_agent/) to understand the differences between the langchain and langgraph versions of react agents. See table below to understand the validated LLMs for each react strategy.
2. RAG agent: use `rag_agent` or `rag_agent_llama` strategy. This agent is specifically designed for improving RAG performance. It has the capability to rephrase query, check relevancy of retrieved context, and iterate if context is not relevant. See table below to understand the validated LLMs for each rag agent strategy.
3. Plan and execute: `plan_execute` strategy. This type of agent first makes a step-by-step plan given a user request, and then execute the plan sequentially (or in parallel, to be implemented in future). If the execution results can solve the problem, then the agent will output an answer; otherwise, it will replan and execute again.
4. SQL agent: use `sql_agent_llama` or `sql_agent` strategy. This agent is specifically designed and optimized for answering questions aabout data in SQL databases. Users need to specify `db_name` and `db_path` for the agent to access the SQL database. For more technical details read descriptions [here](src/strategy/sqlagent/README.md).
4. SQL agent: use `sql_agent_llama` or `sql_agent` strategy. This agent is specifically designed and optimized for answering questions aabout data in SQL databases. Users need to specify `db_name` and `db_path` for the agent to access the SQL database. For more technical details read descriptions [here](integrations/strategy/sqlagent/README.md).

**Note**:

Expand Down Expand Up @@ -60,7 +60,7 @@ Examples of how to register tools can be found in [Section 4](#-4-provide-your-o

```bash
cd GenAIComps/ # back to GenAIComps/ folder
docker build -t opea/agent-langchain:latest -f comps/agent/langchain/Dockerfile . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
docker build -t opea/agent:latest -f comps/agent/src/Dockerfile . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
```

#### 2.2.1 Start Agent microservices with TGI
Expand All @@ -78,7 +78,7 @@ docker run -d --runtime=habana --name "comps-tgi-gaudi-service" -p 8080:80 -v ./
docker logs comps-tgi-gaudi-service

# Agent: react_llama strategy
docker run -d --runtime=runc --name="comps-langchain-agent-endpoint" -v $WORKPATH/comps/agent/langchain/tools:/home/user/comps/agent/langchain/tools -p 9090:9090 --ipc=host -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=tgi -e recursion_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent-langchain:latest
docker run -d --runtime=runc --name="comps-langchain-agent-endpoint" -v $WORKPATH/comps/agent/langchain/tools:/home/user/comps/agent/langchain/tools -p 9090:9090 --ipc=host -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=tgi -e recursion_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent:latest

# check status
docker logs comps-langchain-agent-endpoint
Expand All @@ -105,7 +105,7 @@ docker run -d --runtime=habana --rm --name "comps-vllm-gaudi-service" -p 8080:80
docker logs comps-vllm-gaudi-service

# Agent
docker run -d --runtime=runc --name="comps-langchain-agent-endpoint" -v $WORKPATH/comps/agent/langchain/tools:/home/user/comps/agent/langchain/tools -p 9090:9090 --ipc=host -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=vllm -e recursion_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent-langchain:latest
docker run -d --runtime=runc --name="comps-langchain-agent-endpoint" -v $WORKPATH/comps/agent/langchain/tools:/home/user/comps/agent/langchain/tools -p 9090:9090 --ipc=host -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=vllm -e recursion_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent:latest

# check status
docker logs comps-langchain-agent-endpoint
Expand All @@ -114,7 +114,7 @@ docker logs comps-langchain-agent-endpoint
> debug mode
>
> ```bash
> docker run --rm --runtime=runc --name="comps-langchain-agent-endpoint" -v ./comps/agent/langchain/:/home/user/comps/agent/langchain/ -p 9090:9090 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=vllm -e recursion_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent-langchain:latest
> docker run --rm --runtime=runc --name="comps-langchain-agent-endpoint" -v ./comps/agent/langchain/:/home/user/comps/agent/langchain/ -p 9090:9090 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=vllm -e recursion_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent:latest
> ```
## 🚀 3. Validate Microservice
Expand Down Expand Up @@ -189,7 +189,7 @@ def opea_rag_query(query):

```bash
# Agent
docker run -d --runtime=runc --name="comps-langchain-agent-endpoint" -v my_tools:/home/user/comps/agent/langchain/tools -p 9090:9090 --ipc=host -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=tgi -e recursive_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent-langchain:latest
docker run -d --runtime=runc --name="comps-langchain-agent-endpoint" -v my_tools:/home/user/comps/agent/langchain/tools -p 9090:9090 --ipc=host -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e model=${model} -e ip_address=${ip_address} -e strategy=react_llama -e llm_endpoint_url=http://${ip_address}:8080 -e llm_engine=tgi -e recursive_limit=15 -e require_human_feedback=false -e tools=/home/user/comps/agent/langchain/tools/custom_tools.yaml opea/agent:latest
```

- validate with my_tools
Expand All @@ -205,5 +205,5 @@ data: [DONE]

## 5. Customize agent strategy

For advanced developers who want to implement their own agent strategies, you can add a separate folder in `src\strategy`, implement your agent by inherit the `BaseAgent` class, and add your strategy into the `src\agent.py`. The architecture of this agent microservice is shown in the diagram below as a reference.
For advanced developers who want to implement their own agent strategies, you can add a separate folder in `integrations\strategy`, implement your agent by inherit the `BaseAgent` class, and add your strategy into the `integrations\agent.py`. The architecture of this agent microservice is shown in the diagram below as a reference.
![Architecture Overview](agent_arch.jpg)
8 changes: 4 additions & 4 deletions comps/agent/langchain/agent.py → comps/agent/src/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@
sys.path.append(comps_path)

from comps import CustomLogger, GeneratedDoc, LLMParamsDoc, ServiceType, opea_microservices, register_microservice
from comps.agent.langchain.src.agent import instantiate_agent
from comps.agent.langchain.src.global_var import assistants_global_kv, threads_global_kv
from comps.agent.langchain.src.thread import instantiate_thread_memory, thread_completion_callback
from comps.agent.langchain.src.utils import get_args
from comps.agent.src.integrations.agent import instantiate_agent
from comps.agent.src.integrations.global_var import assistants_global_kv, threads_global_kv
from comps.agent.src.integrations.thread import instantiate_thread_memory, thread_completion_callback
from comps.agent.src.integrations.utils import get_args
from comps.cores.proto.api_protocol import (
AssistantsObject,
ChatCompletionRequest,
Expand Down
File renamed without changes
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes
File renamed without changes
6 changes: 3 additions & 3 deletions comps/agent/langchain/test.py → comps/agent/src/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@

import pandas as pd
import requests
from src.utils import get_args
from integrations.utils import get_args


def test_agent_local(args):
from src.agent import instantiate_agent
from integrations.agent import instantiate_agent

if args.q == 0:
df = pd.DataFrame({"query": ["What is the Intel OPEA Project?"]})
Expand Down Expand Up @@ -148,7 +148,7 @@ def process_request(api, query, is_stream=False):


def test_ut(args):
from src.tools import get_tools_descriptions
from integrations.tools import get_tools_descriptions

tools = get_tools_descriptions("tools/custom_tools.py")
for tool in tools:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import json

import requests
from src.utils import get_args
from integrations.utils import get_args


def test_assistants_http(args):
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
10 changes: 5 additions & 5 deletions tests/agent/sql_agent_test/test_sql_agent.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ LOG_PATH="$WORKPATH/tests"
export WORKDIR=$(dirname "$WORKPATH")
echo $WORKDIR

export agent_image="opea/agent-langchain:comps"
export agent_image="opea/agent:comps"
export agent_container_name="test-comps-agent-endpoint"

export ip_address=$(hostname -I | awk '{print $1}')
Expand All @@ -29,7 +29,7 @@ export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-70B-Instruct"
export LLM_ENDPOINT_URL="http://${ip_address}:${vllm_port}"
export temperature=0.01
export max_new_tokens=4096
export TOOLSET_PATH=$WORKPATH/comps/agent/langchain/tools/ # $WORKPATH/tests/agent/sql_agent_test/
export TOOLSET_PATH=$WORKPATH/comps/agent/src/tools/ # $WORKPATH/tests/agent/sql_agent_test/
echo "TOOLSET_PATH=${TOOLSET_PATH}"
export recursion_limit=15
export db_name=california_schools
Expand Down Expand Up @@ -75,12 +75,12 @@ function build_docker_images() {
echo "Building the docker images"
cd $WORKPATH
echo $WORKPATH
docker build --no-cache -t $agent_image --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy -f comps/agent/langchain/Dockerfile .
docker build --no-cache -t $agent_image --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy -f comps/agent/src/Dockerfile .
if [ $? -ne 0 ]; then
echo "opea/agent-langchain built fail"
echo "opea/agent built fail"
exit 1
else
echo "opea/agent-langchain built successful"
echo "opea/agent built successful"
fi
}

Expand Down
32 changes: 16 additions & 16 deletions tests/agent/test_agent_langchain_on_intel_hpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ ls $vllm_volume

export WORKPATH=$WORKPATH

export agent_image="opea/agent-langchain:comps"
export agent_image="opea/agent:comps"
export agent_container_name="test-comps-agent-endpoint"

export model=meta-llama/Meta-Llama-3.1-70B-Instruct
Expand All @@ -31,20 +31,20 @@ export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-70B-Instruct"
export LLM_ENDPOINT_URL="http://${ip_address}:${vllm_port}"
export temperature=0.01
export max_new_tokens=4096
export TOOLSET_PATH=$WORKPATH/comps/agent/langchain/tools/
export TOOLSET_PATH=$WORKPATH/comps/agent/src/tools/
echo "TOOLSET_PATH=${TOOLSET_PATH}"
export recursion_limit=15

function build_docker_images() {
echo "Building the docker images"
cd $WORKPATH
echo $WORKPATH
docker build --no-cache -t $agent_image --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy -f comps/agent/langchain/Dockerfile .
docker build --no-cache -t $agent_image --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy -f comps/agent/src/Dockerfile .
if [ $? -ne 0 ]; then
echo "opea/agent-langchain built fail"
echo "opea/agent built fail"
exit 1
else
echo "opea/agent-langchain built successful"
echo "opea/agent built successful"
fi
}

Expand Down Expand Up @@ -164,8 +164,8 @@ function start_vllm_service_70B() {
echo "Service started successfully"
}

function start_react_langchain_agent_service() {
echo "Starting react_langchain agent microservice"
function start_react_agent_service() {
echo "Starting react agent microservice"
docker compose -f $WORKPATH/tests/agent/react_langchain.yaml up -d
sleep 120s
docker logs test-comps-agent-endpoint
Expand All @@ -174,7 +174,7 @@ function start_react_langchain_agent_service() {


function start_react_langgraph_agent_service_openai() {
echo "Starting react_langchain agent microservice"
echo "Starting react agent microservice"
docker compose -f $WORKPATH/tests/agent/react_langgraph_openai.yaml up -d
sleep 120s
docker logs test-comps-agent-endpoint
Expand Down Expand Up @@ -253,7 +253,7 @@ function validate_microservice() {
# "query": "What is OPEA?"
# }')
CONTENT=$(python3 $WORKPATH/tests/agent/test.py)
local EXIT_CODE=$(validate "$CONTENT" "OPEA" "test-agent-langchain")
local EXIT_CODE=$(validate "$CONTENT" "OPEA" "test-agent")
echo "$EXIT_CODE"
local EXIT_CODE="${EXIT_CODE:0-1}"
echo "return value is $EXIT_CODE"
Expand All @@ -270,7 +270,7 @@ function validate_microservice() {
function validate_microservice_streaming() {
echo "Testing agent service - chat completion API"
CONTENT=$(python3 $WORKPATH/tests/agent/test.py --stream)
local EXIT_CODE=$(validate "$CONTENT" "OPEA" "test-agent-langchain")
local EXIT_CODE=$(validate "$CONTENT" "OPEA" "test-agent")
echo "$EXIT_CODE"
local EXIT_CODE="${EXIT_CODE:0-1}"
echo "return value is $EXIT_CODE"
Expand All @@ -286,16 +286,16 @@ function validate_microservice_streaming() {
function validate_assistant_api() {
cd $WORKPATH
echo "Testing agent service - assistant api"
local CONTENT=$(python3 comps/agent/langchain/test_assistant_api.py --ip_addr ${ip_address} --ext_port 9095 --assistants_api_test --query 'What is Intel OPEA project?' 2>&1 | tee ${LOG_PATH}/test-agent-langchain-assistantsapi.log)
local EXIT_CODE=$(validate "$CONTENT" "OPEA" "test-agent-langchain-assistantsapi")
local CONTENT=$(python3 comps/agent/src/test_assistant_api.py --ip_addr ${ip_address} --ext_port 9095 --assistants_api_test --query 'What is Intel OPEA project?' 2>&1 | tee ${LOG_PATH}/test-agent-assistantsapi.log)
local EXIT_CODE=$(validate "$CONTENT" "OPEA" "test-agent-assistantsapi")
echo "$EXIT_CODE"
local EXIT_CODE="${EXIT_CODE:0-1}"
echo "return value is $EXIT_CODE"
if [ "$EXIT_CODE" == "1" ]; then
echo "==================TGI logs ======================"
docker logs comps-tgi-gaudi-service
echo "==================Agent logs ======================"
docker logs comps-langchain-agent-endpoint
docker logs comps-agent-endpoint
exit 1
fi
}
Expand Down Expand Up @@ -357,7 +357,7 @@ function main() {
build_vllm_docker_images

# ==================== Tests with 70B model ====================
# RAG agent, react_llama, react_langchain, assistant apis
# RAG agent, react_llama, react, assistant apis

start_vllm_service_70B

Expand All @@ -376,8 +376,8 @@ function main() {
echo "============================================="


# # # test react_langchain
start_react_langchain_agent_service
# # # test react
start_react_agent_service
echo "=============Testing ReAct Langchain============="
validate_microservice_streaming
validate_assistant_api
Expand Down

0 comments on commit cf90932

Please sign in to comment.