From b541fd8bdb277b523fa13b247a274509c7dfebe3 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 30 Aug 2024 17:44:08 +0800 Subject: [PATCH] update image build yaml (#529) Signed-off-by: chensuyue Signed-off-by: zepan --- .github/workflows/_comps-workflow.yml | 79 +++++-------------- .../docker/compose/agent-compose-cd.yaml | 9 +++ .../compose/chathistory-compose-cd.yaml | 9 +++ .../docker/compose/dataprep-compose-cd.yaml | 25 ++++++ .../docker/compose/dataprep-compose.yaml | 8 -- .../docker/compose/embeddings-compose-cd.yaml | 16 ++++ .../docker/compose/guardrails-compose-cd.yaml | 8 ++ .../docker/compose/guardrails-compose.yaml | 4 - .../docker/compose/llms-compose-cd.yaml | 8 ++ .../docker/compose/lvms-compose-cd.yaml | 23 ++++++ .../docker/compose/nginx-compose-cd.yaml | 9 +++ .../compose/prompt_registry-compose-cd.yaml | 9 +++ .../docker/compose/reranks-compose-cd.yaml | 16 ++++ .../docker/compose/retrievers-compose.yaml | 16 ++++ .github/workflows/manual-comps-test.yml | 21 ++++- comps/nginx/docker/Dockerfile | 4 +- comps/retrievers/llamaindex/docker/Dockerfile | 1 + tests/test_embeddings_llama_index.sh | 16 ++-- tests/test_nginx.sh | 4 +- 19 files changed, 201 insertions(+), 84 deletions(-) create mode 100644 .github/workflows/docker/compose/agent-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/chathistory-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/dataprep-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/embeddings-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/guardrails-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/llms-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/lvms-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/nginx-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/prompt_registry-compose-cd.yaml create mode 100644 .github/workflows/docker/compose/reranks-compose-cd.yaml diff --git a/.github/workflows/_comps-workflow.yml b/.github/workflows/_comps-workflow.yml index 58cb02dba..c2a68b0ca 100644 --- a/.github/workflows/_comps-workflow.yml +++ b/.github/workflows/_comps-workflow.yml @@ -20,10 +20,17 @@ on: default: true required: false type: boolean - # scan: - # default: true - # required: false - # type: boolean + test: + default: true + description: "Test comps with docker compose" + required: false + type: boolean + mode: + default: "CD" + description: "Whether the test range is CI or CD" + required: false + type: string + jobs: #################################################################################################### # Image Build @@ -39,82 +46,38 @@ jobs: uses: actions/checkout@v4 - name: Clone required Repo + id: get-yaml-path run: | cd ${{ github.workspace }}/.github/workflows/docker/compose # service=$(echo ${{ inputs.service }} | cut -d'_' -f1) - docker_compose_yml=${{ github.workspace }}/.github/workflows/docker/compose/${{ inputs.service }}-compose.yaml - echo ${docker_compose_yml} + if [[ "${{ inputs.mode }}" == "CD" ]]; then + docker_compose_yml=${{ github.workspace }}/.github/workflows/docker/compose/${{ inputs.service }}-compose-cd.yaml + else + docker_compose_yml=${{ github.workspace }}/.github/workflows/docker/compose/${{ inputs.service }}-compose.yaml + fi + echo "docker_compose_path=${docker_compose_yml}" >> $GITHUB_OUTPUT if [[ $(grep -c "llava-tgi:" ${docker_compose_yml}) != 0 ]]; then git clone https://github.com/yuanwu2017/tgi-gaudi.git && cd tgi-gaudi && git checkout v2.0.4 fi if [[ $(grep -c "vllm-openvino:" ${docker_compose_yml}) != 0 ]]; then git clone https://github.com/vllm-project/vllm.git vllm-openvino fi - # echo "service=$service" >> $GITHUB_ENV - name: Build Image if: ${{ fromJSON(inputs.build) }} uses: opea-project/validation/actions/image-build@main with: - work_dir: ${{ github.workspace }}/ - docker_compose_path: ${{ github.workspace }}/.github/workflows/docker/compose/${{ inputs.service }}-compose.yaml + work_dir: ${{ github.workspace }} + docker_compose_path: ${{ steps.get-yaml-path.outputs.docker_compose_path }} registry: ${OPEA_IMAGE_REPO}opea tag: ${{ inputs.tag }} - # #################################################################################################### - # # Trivy Scan - # #################################################################################################### - # get-image-list: - # needs: [build-images] - # if: ${{ fromJSON(inputs.scan) && inputs.node == 'gaudi' }} - # runs-on: ubuntu-latest - # outputs: - # matrix: ${{ steps.scan-matrix.outputs.matrix }} - # steps: - # - name: Checkout out Repo - # uses: actions/checkout@v4 - - # - name: Set Matrix - # id: scan-matrix - # run: | - # pip install yq - # compose_path=${{ github.workspace }}/${{ inputs.example }}/docker/docker_build_compose.yaml - # echo "matrix=$(cat ${compose_path} | yq -r '.[]' | jq 'keys' | jq -c '.')" >> $GITHUB_OUTPUT - - # scan-images: - # needs: [get-image-list, build-images] - # if: ${{ fromJSON(inputs.scan) && inputs.node == 'gaudi'}} - # runs-on: "docker-build-${{ inputs.node }}" - # strategy: - # matrix: - # image: ${{ fromJSON(needs.get-image-list.outputs.matrix) }} - # fail-fast: false - # steps: - # - name: Pull Image - # run: | - # docker pull ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }} - # echo "OPEA_IMAGE_REPO=${OPEA_IMAGE_REPO}" >> $GITHUB_ENV - - # - name: Scan Container - # uses: opea-project/validation/actions/trivy-scan@main - # with: - # image-ref: ${{ env.OPEA_IMAGE_REPO }}opea/${{ matrix.image }}:${{ inputs.tag }} - # output: ${{ matrix.image }}-scan.txt - - # - name: Cleanup - # if: always() - # run: docker rmi -f ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }} - - # - uses: actions/upload-artifact@v4.3.4 - # with: - # name: ${{ matrix.image }}-scan - # path: ${{ matrix.image }}-scan.txt - # overwrite: true #################################################################################################### # Docker Compose Test #################################################################################################### test-service-compose: needs: [build-images] + if: ${{ fromJSON(inputs.test) }} uses: ./.github/workflows/_run-docker-compose.yml with: tag: ${{ inputs.tag }} diff --git a/.github/workflows/docker/compose/agent-compose-cd.yaml b/.github/workflows/docker/compose/agent-compose-cd.yaml new file mode 100644 index 000000000..a285ecc34 --- /dev/null +++ b/.github/workflows/docker/compose/agent-compose-cd.yaml @@ -0,0 +1,9 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# this file should be run in the root of the repo +services: + comps-agent-langchain: + build: + dockerfile: comps/agent/langchain/docker/Dockerfile + image: ${REGISTRY}opea/comps-agent-langchain:${TAG:-latest} diff --git a/.github/workflows/docker/compose/chathistory-compose-cd.yaml b/.github/workflows/docker/compose/chathistory-compose-cd.yaml new file mode 100644 index 000000000..f8930cde8 --- /dev/null +++ b/.github/workflows/docker/compose/chathistory-compose-cd.yaml @@ -0,0 +1,9 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# this file should be run in the root of the repo +services: + chathistory-mongo-server: + build: + dockerfile: comps/chathistory/mongo/docker/Dockerfile + image: ${REGISTRY}opea/chathistory-mongo-server:${TAG:-latest} diff --git a/.github/workflows/docker/compose/dataprep-compose-cd.yaml b/.github/workflows/docker/compose/dataprep-compose-cd.yaml new file mode 100644 index 000000000..19f4c063d --- /dev/null +++ b/.github/workflows/docker/compose/dataprep-compose-cd.yaml @@ -0,0 +1,25 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# others: dataprep-redis-llama-index,dataprep-on-ray-redis +services: + dataprep-redis-llama-index: + build: + dockerfile: comps/dataprep/redis/llama_index/docker/Dockerfile + image: ${REGISTRY}opea/dataprep-redis-llama-index:${TAG:-latest} + dataprep-on-ray-redis: + build: + dockerfile: comps/dataprep/redis/langchain_ray/docker/Dockerfile + image: ${REGISTRY}opea/dataprep-on-ray-redis:${TAG:-latest} + dataprep-milvus: + build: + dockerfile: comps/dataprep/milvus/docker/Dockerfile + image: ${REGISTRY}opea/dataprep-milvus:${TAG:-latest} + dataprep-pgvector: + build: + dockerfile: comps/dataprep/pgvector/langchain/docker/Dockerfile + image: ${REGISTRY}opea/dataprep-pgvector:${TAG:-latest} + dataprep-pinecone: + build: + dockerfile: comps/dataprep/pinecone/docker/Dockerfile + image: ${REGISTRY}opea/dataprep-pinecone:${TAG:-latest} diff --git a/.github/workflows/docker/compose/dataprep-compose.yaml b/.github/workflows/docker/compose/dataprep-compose.yaml index 5cca84cb4..1671235f4 100644 --- a/.github/workflows/docker/compose/dataprep-compose.yaml +++ b/.github/workflows/docker/compose/dataprep-compose.yaml @@ -13,11 +13,3 @@ services: build: dockerfile: comps/dataprep/qdrant/docker/Dockerfile image: ${REGISTRY}opea/dataprep-qdrant:${TAG:-latest} - dataprep-redis-llama-index: - build: - dockerfile: comps/dataprep/redis/llama_index/docker/Dockerfile - image: ${REGISTRY}opea/dataprep-redis-llama-index:${TAG:-latest} - dataprep-on-ray-redis: - build: - dockerfile: comps/dataprep/redis/langchain_ray/docker/Dockerfile - image: ${REGISTRY}opea/dataprep-on-ray-redis:${TAG:-latest} diff --git a/.github/workflows/docker/compose/embeddings-compose-cd.yaml b/.github/workflows/docker/compose/embeddings-compose-cd.yaml new file mode 100644 index 000000000..3d08a1b53 --- /dev/null +++ b/.github/workflows/docker/compose/embeddings-compose-cd.yaml @@ -0,0 +1,16 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + embedding-langchain-mosec-endpoint: + build: + dockerfile: comps/embeddings/langchain-mosec/mosec-docker/Dockerfile + image: ${REGISTRY}opea/embedding-langchain-mosec-endpoint:${TAG:-latest} + embedding-langchain-mosec: + build: + dockerfile: comps/embeddings/langchain-mosec/docker/Dockerfile + image: ${REGISTRY}opea/embedding-langchain-mosec:${TAG:-latest} + embedding-tei-llama-index: + build: + dockerfile: comps/embeddings/llama_index/docker/Dockerfile + image: ${REGISTRY}opea/embedding-tei-llama-index:${TAG:-latest} diff --git a/.github/workflows/docker/compose/guardrails-compose-cd.yaml b/.github/workflows/docker/compose/guardrails-compose-cd.yaml new file mode 100644 index 000000000..e6365a99d --- /dev/null +++ b/.github/workflows/docker/compose/guardrails-compose-cd.yaml @@ -0,0 +1,8 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + guardrails-pii-detection: + build: + dockerfile: comps/guardrails/pii_detection/docker/Dockerfile + image: ${REGISTRY}opea/guardrails-pii-detection:${TAG:-latest} diff --git a/.github/workflows/docker/compose/guardrails-compose.yaml b/.github/workflows/docker/compose/guardrails-compose.yaml index 30592aecb..eec433c25 100644 --- a/.github/workflows/docker/compose/guardrails-compose.yaml +++ b/.github/workflows/docker/compose/guardrails-compose.yaml @@ -9,7 +9,3 @@ services: build: dockerfile: comps/guardrails/llama_guard/docker/Dockerfile image: ${REGISTRY}opea/guardrails-tgi:${TAG:-latest} - guardrails-pii-detection: - build: - dockerfile: comps/guardrails/pii_detection/docker/Dockerfile - image: ${REGISTRY}opea/guardrails-pii-detection:${TAG:-latest} diff --git a/.github/workflows/docker/compose/llms-compose-cd.yaml b/.github/workflows/docker/compose/llms-compose-cd.yaml new file mode 100644 index 000000000..f60e0e921 --- /dev/null +++ b/.github/workflows/docker/compose/llms-compose-cd.yaml @@ -0,0 +1,8 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + llm-native: + build: + dockerfile: comps/llms/text-generation/native/docker/Dockerfile + image: ${REGISTRY}opea/llm-native:${TAG:-latest} diff --git a/.github/workflows/docker/compose/lvms-compose-cd.yaml b/.github/workflows/docker/compose/lvms-compose-cd.yaml new file mode 100644 index 000000000..fbdad3011 --- /dev/null +++ b/.github/workflows/docker/compose/lvms-compose-cd.yaml @@ -0,0 +1,23 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# this file should be run in the root of the repo +services: + lvm: + build: + dockerfile: comps/lvms/Dockerfile + image: ${REGISTRY}opea/lvm:${TAG:-latest} + # Xeon CPU + llava: + build: + dockerfile: comps/lvms/llava/Dockerfile + image: ${REGISTRY}opea/llava:${TAG:-latest} + # Gaudi2 HPU + llava_hpu: + build: + dockerfile: comps/lvms/llava/Dockerfile_hpu + image: ${REGISTRY}opea/llava_hpu:${TAG:-latest} + lvm-tgi: + build: + dockerfile: comps/lvms/Dockerfile_tgi + image: ${REGISTRY}opea/lvm-tgi:${TAG:-latest} diff --git a/.github/workflows/docker/compose/nginx-compose-cd.yaml b/.github/workflows/docker/compose/nginx-compose-cd.yaml new file mode 100644 index 000000000..e6cf05aa4 --- /dev/null +++ b/.github/workflows/docker/compose/nginx-compose-cd.yaml @@ -0,0 +1,9 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# this file should be run in the root of the repo +services: + nginx: + build: + dockerfile: comps/nginx/docker/Dockerfile + image: ${REGISTRY}opea/nginx:${TAG:-latest} diff --git a/.github/workflows/docker/compose/prompt_registry-compose-cd.yaml b/.github/workflows/docker/compose/prompt_registry-compose-cd.yaml new file mode 100644 index 000000000..52923a2f1 --- /dev/null +++ b/.github/workflows/docker/compose/prompt_registry-compose-cd.yaml @@ -0,0 +1,9 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# this file should be run in the root of the repo +services: + promptregistry-mongo-server: + build: + dockerfile: comps/prompt_registry/mongo/docker/Dockerfile + image: ${REGISTRY}opea/promptregistry-mongo-server:${TAG:-latest} diff --git a/.github/workflows/docker/compose/reranks-compose-cd.yaml b/.github/workflows/docker/compose/reranks-compose-cd.yaml new file mode 100644 index 000000000..85339c8b8 --- /dev/null +++ b/.github/workflows/docker/compose/reranks-compose-cd.yaml @@ -0,0 +1,16 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + reranking-fastrag: + build: + dockerfile: comps/reranks/fastrag/docker/Dockerfile + image: ${REGISTRY}opea/reranking-fastrag:${TAG:-latest} + reranking-langchain-mosec-endpoint: + build: + dockerfile: comps/reranks/langchain-mosec/mosec-docker/Dockerfile + image: ${REGISTRY}opea/reranking-langchain-mosec-endpoint:${TAG:-latest} + reranking-langchain-mosec: + build: + dockerfile: comps/reranks/langchain-mosec/docker/Dockerfile + image: ${REGISTRY}opea/reranking-langchain-mosec:${TAG:-latest} diff --git a/.github/workflows/docker/compose/retrievers-compose.yaml b/.github/workflows/docker/compose/retrievers-compose.yaml index d9de4b27f..289871ed5 100644 --- a/.github/workflows/docker/compose/retrievers-compose.yaml +++ b/.github/workflows/docker/compose/retrievers-compose.yaml @@ -11,3 +11,19 @@ services: build: dockerfile: comps/retrievers/haystack/qdrant/docker/Dockerfile image: ${REGISTRY}opea/retriever-qdrant:${TAG:-latest} + retriever-pgvector: + build: + dockerfile: comps/retrievers/langchain/pgvector/docker/Dockerfile + image: ${REGISTRY}opea/retriever-qdrant:${TAG:-latest} + retriever-pinecone: + build: + dockerfile: comps/retrievers/langchain/pinecone/docker/Dockerfile + image: ${REGISTRY}opea/retriever-pinecone:${TAG:-latest} + retriever-milvus: + build: + dockerfile: comps/retrievers/langchain/milvus/docker/Dockerfile + image: ${REGISTRY}opea/retriever-milvus:${TAG:-latest} + retriever-redis-llamaindex: + build: + dockerfile: comps/retrievers/llamaindex/docker/Dockerfile + image: ${REGISTRY}opea/retriever-redis-llamaindex:${TAG:-latest} diff --git a/.github/workflows/manual-comps-test.yml b/.github/workflows/manual-comps-test.yml index 010cd0d7a..bde3bf9fa 100644 --- a/.github/workflows/manual-comps-test.yml +++ b/.github/workflows/manual-comps-test.yml @@ -7,7 +7,7 @@ on: inputs: services: default: "asr" - description: "List of services to test [agent_langchain,asr,chathistory_mongo,dataprep_milvus...]" #,embeddings,guardrails,knowledgegraphs,llms,lvms,prompt_registry,ragas,reranks,retrievers,tts,vectorstores,web_retrievers]" + description: "List of services to test [agent,asr,chathistory,dataprep,embeddings,guardrails,llms,lvms,nginx,prompt_registry,reranks,retrievers,tts,web_retrievers]" required: true type: string build: @@ -15,6 +15,21 @@ on: description: "Build test required images for Comps" required: false type: boolean + test: + default: true + description: "Test comps with docker compose" + required: false + type: boolean + tag: + default: "comps" + description: "Tag to apply to images" + required: true + type: string + mode: + default: "CD" + description: "Whether the test range is CI or CD" + required: false + type: string permissions: read-all @@ -40,6 +55,8 @@ jobs: uses: ./.github/workflows/_comps-workflow.yml with: service: ${{ matrix.service }} - tag: "comps" + tag: ${{ inputs.tag }} node: gaudi + mode: ${{ inputs.mode }} + test: ${{ inputs.test }} secrets: inherit diff --git a/comps/nginx/docker/Dockerfile b/comps/nginx/docker/Dockerfile index 6816fb0a5..447d3946a 100644 --- a/comps/nginx/docker/Dockerfile +++ b/comps/nginx/docker/Dockerfile @@ -6,7 +6,7 @@ FROM nginx:alpine RUN apk add --no-cache gettext -COPY nginx.conf.template /etc/nginx/nginx.conf.template +COPY comps/nginx/docker/nginx.conf.template /etc/nginx/nginx.conf.template ENV FRONTEND_SERVICE_IP=localhost ENV FRONTEND_SERVICE_PORT=5173 @@ -14,7 +14,7 @@ ENV BACKEND_SERVICE_NAME=chatqna ENV BACKEND_SERVICE_IP=localhost ENV BACKEND_SERVICE_PORT=8888 -COPY start-nginx.sh /usr/local/bin/start-nginx.sh +COPY comps/nginx/docker/start-nginx.sh /usr/local/bin/start-nginx.sh RUN chmod +x /usr/local/bin/start-nginx.sh CMD ["/usr/local/bin/start-nginx.sh"] diff --git a/comps/retrievers/llamaindex/docker/Dockerfile b/comps/retrievers/llamaindex/docker/Dockerfile index fec155b05..4b022718a 100644 --- a/comps/retrievers/llamaindex/docker/Dockerfile +++ b/comps/retrievers/llamaindex/docker/Dockerfile @@ -4,6 +4,7 @@ FROM ubuntu:22.04 RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ + python3-pip \ libgl1-mesa-glx \ libjemalloc-dev diff --git a/tests/test_embeddings_llama_index.sh b/tests/test_embeddings_llama_index.sh index 048726044..81eac442b 100644 --- a/tests/test_embeddings_llama_index.sh +++ b/tests/test_embeddings_llama_index.sh @@ -11,12 +11,12 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { cd $WORKPATH echo $(pwd) - docker build --no-cache -t opea/embedding-tei-llamaindex:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/llama_index/docker/Dockerfile . + docker build --no-cache -t opea/embedding-tei-llama-index:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/llama_index/docker/Dockerfile . if [ $? -ne 0 ]; then - echo "opea/embedding-tei-llamaindex built fail" + echo "opea/embedding-tei-llama-index built fail" exit 1 else - echo "opea/embedding-tei-llamaindex built successful" + echo "opea/embedding-tei-llama-index built successful" fi } @@ -24,17 +24,17 @@ function start_service() { tei_endpoint=5001 model="BAAI/bge-large-en-v1.5" revision="refs/pr/5" - docker run -d --name="test-comps-embedding-tei-llamaindex-endpoint" -p $tei_endpoint:80 -v ./data:/data -e http_proxy=$http_proxy -e https_proxy=$https_proxy --pull always ghcr.io/huggingface/text-embeddings-inference:cpu-1.2 --model-id $model --revision $revision + docker run -d --name="test-comps-embedding-tei-llama-index-endpoint" -p $tei_endpoint:80 -v ./data:/data -e http_proxy=$http_proxy -e https_proxy=$https_proxy --pull always ghcr.io/huggingface/text-embeddings-inference:cpu-1.2 --model-id $model --revision $revision export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:${tei_endpoint}" tei_service_port=5010 - docker run -d --name="test-comps-embedding-tei-llamaindex-server" -e http_proxy=$http_proxy -e https_proxy=$https_proxy -p ${tei_service_port}:6000 --ipc=host -e TEI_EMBEDDING_ENDPOINT=$TEI_EMBEDDING_ENDPOINT opea/embedding-tei-llamaindex:comps + docker run -d --name="test-comps-embedding-tei-llama-index-server" -e http_proxy=$http_proxy -e https_proxy=$https_proxy -p ${tei_service_port}:6000 --ipc=host -e TEI_EMBEDDING_ENDPOINT=$TEI_EMBEDDING_ENDPOINT opea/embedding-tei-llama-index:comps sleep 3m } function validate_microservice() { tei_service_port=5010 URL="http://${ip_address}:$tei_service_port/v1/embeddings" - docker logs test-comps-embedding-tei-llamaindex-server >> ${LOG_PATH}/embedding.log + docker logs test-comps-embedding-tei-llama-index-server >> ${LOG_PATH}/embedding.log HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -d '{"text":"What is Deep Learning?"}' -H 'Content-Type: application/json' "$URL") if [ "$HTTP_STATUS" -eq 200 ]; then echo "[ embedding - llama_index ] HTTP status is 200. Checking content..." @@ -44,12 +44,12 @@ function validate_microservice() { echo "[ embedding - llama_index ] Content is as expected." else echo "[ embedding - llama_index ] Content does not match the expected result: $CONTENT" - docker logs test-comps-embedding-tei-llamaindex-server >> ${LOG_PATH}/embedding.log + docker logs test-comps-embedding-tei-llama-index-server >> ${LOG_PATH}/embedding.log exit 1 fi else echo "[ embedding - llama_index ] HTTP status is not 200. Received status was $HTTP_STATUS" - docker logs test-comps-embedding-tei-llamaindex-server >> ${LOG_PATH}/embedding.log + docker logs test-comps-embedding-tei-llama-index-server >> ${LOG_PATH}/embedding.log exit 1 fi } diff --git a/tests/test_nginx.sh b/tests/test_nginx.sh index 3675a5753..626c6974a 100644 --- a/tests/test_nginx.sh +++ b/tests/test_nginx.sh @@ -9,8 +9,8 @@ LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { - cd $WORKPATH/comps/nginx/docker - docker build --no-cache --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -t opea/nginx:comps -f ./Dockerfile . + cd $WORKPATH + docker build --no-cache --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -t opea/nginx:comps -f comps/nginx/docker/Dockerfile . if [ $? -ne 0 ]; then echo "opea/nginx built fail" exit 1