From ed8dbaac4713459f2be2172bcd8222ddb763e935 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 28 Nov 2024 13:36:14 +0800 Subject: [PATCH] Revert "WA for the issue of vllm Dockerfile.cpu build failure (#1195)" (#1206) --- .github/workflows/_example-workflow.yml | 2 +- ChatQnA/docker_image_build/build.yaml | 12 ++++++------ ..._vllm_on_xeon.sh => test_compose_vllm_on_xeon.sh} | 0 3 files changed, 7 insertions(+), 7 deletions(-) rename ChatQnA/tests/{_test_compose_vllm_on_xeon.sh => test_compose_vllm_on_xeon.sh} (100%) diff --git a/.github/workflows/_example-workflow.yml b/.github/workflows/_example-workflow.yml index b05d67eed..a86ac2592 100644 --- a/.github/workflows/_example-workflow.yml +++ b/.github/workflows/_example-workflow.yml @@ -75,7 +75,7 @@ jobs: docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then git clone https://github.com/vllm-project/vllm.git - cd vllm && git checkout 446c780 && cd ../ + cd vllm && git rev-parse HEAD && cd ../ fi if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then git clone https://github.com/HabanaAI/vllm-fork.git diff --git a/ChatQnA/docker_image_build/build.yaml b/ChatQnA/docker_image_build/build.yaml index bb33c57ba..a8fa36bc3 100644 --- a/ChatQnA/docker_image_build/build.yaml +++ b/ChatQnA/docker_image_build/build.yaml @@ -113,12 +113,12 @@ services: dockerfile: comps/guardrails/llama_guard/langchain/Dockerfile extends: chatqna image: ${REGISTRY:-opea}/guardrails-tgi:${TAG:-latest} - # vllm: - # build: - # context: vllm - # dockerfile: Dockerfile.cpu - # extends: chatqna - # image: ${REGISTRY:-opea}/vllm:${TAG:-latest} + vllm: + build: + context: vllm + dockerfile: Dockerfile.cpu + extends: chatqna + image: ${REGISTRY:-opea}/vllm:${TAG:-latest} vllm-gaudi: build: context: vllm-fork diff --git a/ChatQnA/tests/_test_compose_vllm_on_xeon.sh b/ChatQnA/tests/test_compose_vllm_on_xeon.sh similarity index 100% rename from ChatQnA/tests/_test_compose_vllm_on_xeon.sh rename to ChatQnA/tests/test_compose_vllm_on_xeon.sh