diff --git a/comps/intent_detection/langchain/intent_detection.py b/comps/intent_detection/langchain/intent_detection.py index 17224eb38..17b9bd4a7 100644 --- a/comps/intent_detection/langchain/intent_detection.py +++ b/comps/intent_detection/langchain/intent_detection.py @@ -31,7 +31,8 @@ def llm_generate(input: LLMParamsDoc): timeout=600, ) - prompt = PromptTemplate(template=IntentTemplate.generate_intent_template, input_variables=["query"]) + prompt_template = 'Please identify the intent of the user query. You may only respond with "chitchat" or \QA" without explanations or engaging in conversation.### User Query: {query}, ### Response: ' + prompt = PromptTemplate(template=prompt_template, input_variables=["query"]) llm_chain = LLMChain(prompt=prompt, llm=llm) diff --git a/comps/intent_detection/langchain/requirements.txt b/comps/intent_detection/langchain/requirements.txt index 55cf47ae7..a84c3734a 100644 --- a/comps/intent_detection/langchain/requirements.txt +++ b/comps/intent_detection/langchain/requirements.txt @@ -7,3 +7,4 @@ opentelemetry-exporter-otlp opentelemetry-sdk prometheus-fastapi-instrumentator shortuuid +uvicorn diff --git a/comps/intent_detection/langchain/template.py b/comps/intent_detection/langchain/template.py deleted file mode 100644 index 1a425ce43..000000000 --- a/comps/intent_detection/langchain/template.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - - -class IntentTemplate: - def generate_intent_template(query): - return f"""Please identify the intent of the user query. You may only respond with "chitchat" or "QA" without explanations or engaging in conversation. -### User Query: {query}, ### Response: """ diff --git a/comps/lvms/tgi-llava/requirements.txt b/comps/lvms/tgi-llava/requirements.txt index 556dfb0c1..be1c23a6d 100644 --- a/comps/lvms/tgi-llava/requirements.txt +++ b/comps/lvms/tgi-llava/requirements.txt @@ -2,6 +2,7 @@ datasets docarray[full] fastapi huggingface_hub +langchain-core opentelemetry-api opentelemetry-exporter-otlp opentelemetry-sdk diff --git a/tests/intent_detection/test_intent_detection_langchain.sh b/tests/intent_detection/test_intent_detection_langchain.sh index 108545f99..1d6e0f061 100644 --- a/tests/intent_detection/test_intent_detection_langchain.sh +++ b/tests/intent_detection/test_intent_detection_langchain.sh @@ -7,9 +7,10 @@ set -xe WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') + function build_docker_images() { cd $WORKPATH - docker build --no-cache -t opea/llm-tgi:latest -f comps/intent_detection/langchain/Dockerfile . + docker build --no-cache -t opea/intent-detection:comps -f comps/intent_detection/langchain/Dockerfile . } function start_service() { @@ -22,8 +23,19 @@ function start_service() { export TGI_LLM_ENDPOINT="http://${ip_address}:${tgi_endpoint}" intent_port=5043 unset http_proxy - docker run -d --name="test-comps-intent-server" -p ${intent_port}:9000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e TGI_LLM_ENDPOINT=$TGI_LLM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/llm-tgi:latest - sleep 5m + docker run -d --name="test-comps-intent-server" -p ${intent_port}:9000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e TGI_LLM_ENDPOINT=$TGI_LLM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/intent-detection:comps + + # check whether tgi is fully ready + n=0 + until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do + docker logs test-comps-intent-tgi-endpoint > ${LOG_PATH}/tgi.log + n=$((n+1)) + if grep -q Connected ${LOG_PATH}/tgi.log; then + break + fi + sleep 5s + done + sleep 5s } function validate_microservice() { @@ -33,11 +45,16 @@ function validate_microservice() { -d '{"query":"What is Deep Learning?","max_new_tokens":10,"top_k":1,"temperature":0.001,"streaming":false}' \ -H 'Content-Type: application/json') - echo "===============" - echo $result + if [[ $result == *"QA"* ]]; then + echo $result + echo "Result correct." + else + echo "Result wrong. Received was $result" + docker logs test-comps-intent-server > ${LOG_PATH}/intent_detection.log + docker logs test-comps-intent-tgi-endpoint > ${LOG_PATH}/tgi.log + exit 1 + fi - docker logs test-comps-intent-server >> ${LOG_PATH}/intent_detection.log - docker logs test-comps-intent-tgi-endpoint >> ${LOG_PATH}/tgi-endpoint.log } function stop_docker() {