From 1b6989797fa00b21ccc58f1a1593f7f5bd81917a Mon Sep 17 00:00:00 2001 From: lvliang-intel Date: Thu, 12 Sep 2024 00:08:14 +0800 Subject: [PATCH] Add OpenAI client access OPEA microservice UT cases (#653) * Add OpeanAI access microservice UT cases Signed-off-by: lvliang-intel * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add the absolute path Signed-off-by: lvliang-intel * refactor code Signed-off-by: lvliang-intel * add openai denpendency Signed-off-by: lvliang-intel * install openai in scripts Signed-off-by: lvliang-intel * fix ci issue Signed-off-by: lvliang-intel --------- Signed-off-by: lvliang-intel Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../test_embeddings_tei_langchain.sh | 12 ++++++ tests/llms/test_llms_text-generation_tgi.sh | 19 +++++++-- tests/utils/validate_svc_with_openai.py | 42 +++++++++++++++++++ 3 files changed, 70 insertions(+), 3 deletions(-) create mode 100644 tests/utils/validate_svc_with_openai.py diff --git a/tests/embeddings/test_embeddings_tei_langchain.sh b/tests/embeddings/test_embeddings_tei_langchain.sh index 9dab9ee902..cd957a2b8d 100644 --- a/tests/embeddings/test_embeddings_tei_langchain.sh +++ b/tests/embeddings/test_embeddings_tei_langchain.sh @@ -46,6 +46,16 @@ function validate_microservice() { fi } +function validate_microservice_with_openai() { + tei_service_port=5001 + python3 ${WORKPATH}/tests/utils/validate_svc_with_openai.py $ip_address $tei_service_port "embedding" + if [ $? -ne 0 ]; then + docker logs test-comps-embedding-tei-endpoint + docker logs test-comps-embedding-tei-server + exit 1 + fi +} + function stop_docker() { cid=$(docker ps -aq --filter "name=test-comps-embedding-*") if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi @@ -59,6 +69,8 @@ function main() { start_service validate_microservice + pip install openai + validate_microservice_with_openai stop_docker echo y | docker system prune diff --git a/tests/llms/test_llms_text-generation_tgi.sh b/tests/llms/test_llms_text-generation_tgi.sh index d8b01e7426..293bbb292b 100644 --- a/tests/llms/test_llms_text-generation_tgi.sh +++ b/tests/llms/test_llms_text-generation_tgi.sh @@ -60,6 +60,16 @@ function validate_microservice() { fi } +function validate_microservice_with_openai() { + llm_service_port=5005 + python3 ${WORKPATH}/tests/utils/validate_svc_with_openai.py "$ip_address" "$llm_service_port" "llm" + if [ $? -ne 0 ]; then + docker logs test-comps-llm-tgi-endpoint >> ${LOG_PATH}/llm-tgi.log + docker logs test-comps-llm-tgi-server >> ${LOG_PATH}/llm-tgi-server.log + exit 1 + fi +} + function stop_docker() { cid=$(docker ps -aq --filter "name=test-comps-llm-tgi*") if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi @@ -70,15 +80,18 @@ function main() { stop_docker build_docker_images + pip install openai + llm_models=( Intel/neural-chat-7b-v3-3 - meta-llama/Llama-2-7b-chat-hf - meta-llama/Meta-Llama-3-8B-Instruct - microsoft/Phi-3-mini-4k-instruct + # meta-llama/Llama-2-7b-chat-hf + # meta-llama/Meta-Llama-3-8B-Instruct + # microsoft/Phi-3-mini-4k-instruct ) for model in "${llm_models[@]}"; do start_service "${model}" validate_microservice + validate_microservice_with_openai stop_docker done diff --git a/tests/utils/validate_svc_with_openai.py b/tests/utils/validate_svc_with_openai.py new file mode 100644 index 0000000000..2d16f281b0 --- /dev/null +++ b/tests/utils/validate_svc_with_openai.py @@ -0,0 +1,42 @@ +#!/bin/bash +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import os +import sys + +import openai + + +def validate_svc(ip_address, service_port, service_type): + openai.api_key = os.getenv("OPENAI_API_KEY", "empty") + + endpoint = f"http://{ip_address}:{service_port}" + client = openai.OpenAI( + api_key=openai.api_key, + base_url=endpoint + "/v1", + ) + + if service_type == "llm": + response = client.chat.completions.create(model="tgi", messages="What is Deep Learning?", max_tokens=128) + elif service_type == "embedding": + response = client.embeddings.create(model="tei", input="What is Deep Learning?") + else: + print(f"Unknown service type: {service_type}") + exit(1) + result = response.choices[0].text.strip() if service_type == "llm" else response.data[0].embedding + if "Deep Learning is" in result if service_type == "llm" else result: + print("Result correct.") + else: + print(f"Result wrong. Received was {result}") + exit(1) + + +if __name__ == "__main__": + if len(sys.argv) != 4: + print("Usage: python3 validate_svc_with_openai.py ") + exit(1) + ip_address = sys.argv[1] + service_port = sys.argv[2] + service_type = sys.argv[3] + validate_svc(ip_address, service_port, service_type)