Skip to content

Commit

Permalink
Fix intent detection code issue (#651)
Browse files Browse the repository at this point in the history
* update lvm tgi llama requirements

Signed-off-by: letonghan <[email protected]>

* fix intent detection issue

Signed-off-by: letonghan <[email protected]>

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Signed-off-by: letonghan <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
letonghan and pre-commit-ci[bot] authored Sep 10, 2024
1 parent 17bfb30 commit 4c0f527
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 16 deletions.
3 changes: 2 additions & 1 deletion comps/intent_detection/langchain/intent_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ def llm_generate(input: LLMParamsDoc):
timeout=600,
)

prompt = PromptTemplate(template=IntentTemplate.generate_intent_template, input_variables=["query"])
prompt_template = 'Please identify the intent of the user query. You may only respond with "chitchat" or \QA" without explanations or engaging in conversation.### User Query: {query}, ### Response: '
prompt = PromptTemplate(template=prompt_template, input_variables=["query"])

llm_chain = LLMChain(prompt=prompt, llm=llm)

Expand Down
1 change: 1 addition & 0 deletions comps/intent_detection/langchain/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ opentelemetry-exporter-otlp
opentelemetry-sdk
prometheus-fastapi-instrumentator
shortuuid
uvicorn
8 changes: 0 additions & 8 deletions comps/intent_detection/langchain/template.py

This file was deleted.

1 change: 1 addition & 0 deletions comps/lvms/tgi-llava/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ datasets
docarray[full]
fastapi
huggingface_hub
langchain-core
opentelemetry-api
opentelemetry-exporter-otlp
opentelemetry-sdk
Expand Down
31 changes: 24 additions & 7 deletions tests/intent_detection/test_intent_detection_langchain.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@ set -xe
WORKPATH=$(dirname "$PWD")
LOG_PATH="$WORKPATH/tests"
ip_address=$(hostname -I | awk '{print $1}')

function build_docker_images() {
cd $WORKPATH
docker build --no-cache -t opea/llm-tgi:latest -f comps/intent_detection/langchain/Dockerfile .
docker build --no-cache -t opea/intent-detection:comps -f comps/intent_detection/langchain/Dockerfile .
}

function start_service() {
Expand All @@ -22,8 +23,19 @@ function start_service() {
export TGI_LLM_ENDPOINT="http://${ip_address}:${tgi_endpoint}"
intent_port=5043
unset http_proxy
docker run -d --name="test-comps-intent-server" -p ${intent_port}:9000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e TGI_LLM_ENDPOINT=$TGI_LLM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/llm-tgi:latest
sleep 5m
docker run -d --name="test-comps-intent-server" -p ${intent_port}:9000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e TGI_LLM_ENDPOINT=$TGI_LLM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/intent-detection:comps

# check whether tgi is fully ready
n=0
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
docker logs test-comps-intent-tgi-endpoint > ${LOG_PATH}/tgi.log
n=$((n+1))
if grep -q Connected ${LOG_PATH}/tgi.log; then
break
fi
sleep 5s
done
sleep 5s
}

function validate_microservice() {
Expand All @@ -33,11 +45,16 @@ function validate_microservice() {
-d '{"query":"What is Deep Learning?","max_new_tokens":10,"top_k":1,"temperature":0.001,"streaming":false}' \
-H 'Content-Type: application/json')

echo "==============="
echo $result
if [[ $result == *"QA"* ]]; then
echo $result
echo "Result correct."
else
echo "Result wrong. Received was $result"
docker logs test-comps-intent-server > ${LOG_PATH}/intent_detection.log
docker logs test-comps-intent-tgi-endpoint > ${LOG_PATH}/tgi.log
exit 1
fi

docker logs test-comps-intent-server >> ${LOG_PATH}/intent_detection.log
docker logs test-comps-intent-tgi-endpoint >> ${LOG_PATH}/tgi-endpoint.log
}

function stop_docker() {
Expand Down

0 comments on commit 4c0f527

Please sign in to comment.