From 618f45bab12cecc0689db2c5b6c3f62a600737cd Mon Sep 17 00:00:00 2001 From: lvliang-intel Date: Thu, 7 Nov 2024 11:28:48 +0800 Subject: [PATCH] Upgrade habana docker version to 1.18.0 (#854) * Upgrade habana docker version to 1.18.0 Signed-off-by: lvliang-intel * fix issues Signed-off-by: lvliang-intel * fix ci issue Signed-off-by: lvliang-intel --------- Signed-off-by: lvliang-intel --- .github/workflows/scripts/freeze_images.sh | 3 +-- comps/animation/wav2lip/dependency/Dockerfile.intel_hpu | 2 +- comps/asr/whisper/dependency/Dockerfile.intel_hpu | 3 +-- .../embeddings/multimodal/bridgetower/Dockerfile.intel_hpu | 2 +- comps/finetuning/Dockerfile.intel_hpu | 3 +-- comps/image2image/Dockerfile.intel_hpu | 3 +-- comps/image2video/dependency/Dockerfile.intel_hpu | 3 +-- comps/llms/text-generation/native/langchain/Dockerfile | 3 +-- comps/llms/text-generation/native/llama_index/Dockerfile | 3 +-- .../vllm/langchain/dependency/Dockerfile.intel_hpu | 3 +-- .../vllm/llama_index/dependency/Dockerfile.intel_hpu | 4 +--- comps/llms/text-generation/vllm/ray/dependency/Dockerfile | 6 +++--- comps/lvms/llama-vision/Dockerfile | 2 +- comps/lvms/llama-vision/Dockerfile_guard | 2 +- comps/lvms/llama-vision/Dockerfile_tp | 2 +- comps/lvms/llava/dependency/Dockerfile.intel_hpu | 3 +-- comps/text2image/Dockerfile.intel_hpu | 3 +-- comps/tts/speecht5/dependency/Dockerfile.intel_hpu | 3 +-- .../llms/test_llms_text-generation_vllm_ray_on_intel_hpu.sh | 4 ++-- 19 files changed, 22 insertions(+), 35 deletions(-) diff --git a/.github/workflows/scripts/freeze_images.sh b/.github/workflows/scripts/freeze_images.sh index 27743fd90..51c2341f2 100644 --- a/.github/workflows/scripts/freeze_images.sh +++ b/.github/workflows/scripts/freeze_images.sh @@ -5,8 +5,7 @@ declare -A dict dict["langchain/langchain"]="docker://docker.io/langchain/langchain" -# dict["vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2"]="docker://vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2" -dict["opea/habanalabs:1.16.1-pytorch-installer-2.2.2"]="docker://docker.io/opea/habanalabs:1.16.1-pytorch-installer-2.2.2" +dict["vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0"]="docker://vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0" function get_latest_version() { repo_image=$1 diff --git a/comps/animation/wav2lip/dependency/Dockerfile.intel_hpu b/comps/animation/wav2lip/dependency/Dockerfile.intel_hpu index e7fa9414d..8ac0666f0 100644 --- a/comps/animation/wav2lip/dependency/Dockerfile.intel_hpu +++ b/comps/animation/wav2lip/dependency/Dockerfile.intel_hpu @@ -1,6 +1,6 @@ # Use a base image # FROM python:3.11-slim -FROM vault.habana.ai/gaudi-docker/1.16.2/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest AS hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu # Set environment variables ENV LANG=en_US.UTF-8 diff --git a/comps/asr/whisper/dependency/Dockerfile.intel_hpu b/comps/asr/whisper/dependency/Dockerfile.intel_hpu index c7620f965..52f01f299 100644 --- a/comps/asr/whisper/dependency/Dockerfile.intel_hpu +++ b/comps/asr/whisper/dependency/Dockerfile.intel_hpu @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ diff --git a/comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu b/comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu index b0326346f..7363b49d7 100644 --- a/comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu +++ b/comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest AS hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/comps/finetuning/Dockerfile.intel_hpu b/comps/finetuning/Dockerfile.intel_hpu index 8c1484c62..ee4b6608b 100644 --- a/comps/finetuning/Dockerfile.intel_hpu +++ b/comps/finetuning/Dockerfile.intel_hpu @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # Use the same python version with ray -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu ENV DEVICE="hpu" diff --git a/comps/image2image/Dockerfile.intel_hpu b/comps/image2image/Dockerfile.intel_hpu index a47d9dfb0..78b60b868 100644 --- a/comps/image2image/Dockerfile.intel_hpu +++ b/comps/image2image/Dockerfile.intel_hpu @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/comps/image2video/dependency/Dockerfile.intel_hpu b/comps/image2video/dependency/Dockerfile.intel_hpu index 050a8c391..272a23fb8 100644 --- a/comps/image2video/dependency/Dockerfile.intel_hpu +++ b/comps/image2video/dependency/Dockerfile.intel_hpu @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/comps/llms/text-generation/native/langchain/Dockerfile b/comps/llms/text-generation/native/langchain/Dockerfile index 45c8569bf..a3f5d9b24 100644 --- a/comps/llms/text-generation/native/langchain/Dockerfile +++ b/comps/llms/text-generation/native/langchain/Dockerfile @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu ENV LANG=en_US.UTF-8 ARG REPO=https://github.com/huggingface/optimum-habana.git diff --git a/comps/llms/text-generation/native/llama_index/Dockerfile b/comps/llms/text-generation/native/llama_index/Dockerfile index fb9ec2f73..e9f4e37f2 100644 --- a/comps/llms/text-generation/native/llama_index/Dockerfile +++ b/comps/llms/text-generation/native/llama_index/Dockerfile @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu ENV LANG=en_US.UTF-8 ARG REPO=https://github.com/huggingface/optimum-habana.git diff --git a/comps/llms/text-generation/vllm/langchain/dependency/Dockerfile.intel_hpu b/comps/llms/text-generation/vllm/langchain/dependency/Dockerfile.intel_hpu index e636e7316..f3703e4e7 100644 --- a/comps/llms/text-generation/vllm/langchain/dependency/Dockerfile.intel_hpu +++ b/comps/llms/text-generation/vllm/langchain/dependency/Dockerfile.intel_hpu @@ -1,8 +1,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -#FROM vault.habana.ai/gaudi-docker/1.17.0/ubuntu22.04/habanalabs/pytorch-installer-2.3.1:latest -FROM vault.habana.ai/gaudi-docker/1.16.2/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN git clone https://github.com/HabanaAI/vllm-fork.git /workspace/vllm diff --git a/comps/llms/text-generation/vllm/llama_index/dependency/Dockerfile.intel_hpu b/comps/llms/text-generation/vllm/llama_index/dependency/Dockerfile.intel_hpu index 9f7577ae2..8166f471e 100644 --- a/comps/llms/text-generation/vllm/llama_index/dependency/Dockerfile.intel_hpu +++ b/comps/llms/text-generation/vllm/llama_index/dependency/Dockerfile.intel_hpu @@ -1,9 +1,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu - +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/comps/llms/text-generation/vllm/ray/dependency/Dockerfile b/comps/llms/text-generation/vllm/ray/dependency/Dockerfile index 684f171bb..6746f9da0 100644 --- a/comps/llms/text-generation/vllm/ray/dependency/Dockerfile +++ b/comps/llms/text-generation/vllm/ray/dependency/Dockerfile @@ -1,8 +1,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu ENV LANG=en_US.UTF-8 @@ -16,7 +15,8 @@ RUN pip install --no-cache-dir --upgrade-strategy eager optimum[habana] && \ RUN pip install --no-cache-dir -v git+https://github.com/HabanaAI/vllm-fork.git@cf6952d RUN pip install --no-cache-dir "ray>=2.10" "ray[serve,tune]>=2.10" -RUN sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ +RUN ssh-keygen -A && \ + sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ service ssh restart ENV PYTHONPATH=$PYTHONPATH:/root:/home/user/vllm/ray diff --git a/comps/lvms/llama-vision/Dockerfile b/comps/lvms/llama-vision/Dockerfile index a43b0f9e9..b68a796e3 100644 --- a/comps/lvms/llama-vision/Dockerfile +++ b/comps/lvms/llama-vision/Dockerfile @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -FROM vault.habana.ai/gaudi-docker/1.17.1/ubuntu22.04/habanalabs/pytorch-installer-2.3.1:latest as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu ENV LANG=en_US.UTF-8 diff --git a/comps/lvms/llama-vision/Dockerfile_guard b/comps/lvms/llama-vision/Dockerfile_guard index cfc087e1e..61a613ed5 100644 --- a/comps/lvms/llama-vision/Dockerfile_guard +++ b/comps/lvms/llama-vision/Dockerfile_guard @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -FROM vault.habana.ai/gaudi-docker/1.17.1/ubuntu22.04/habanalabs/pytorch-installer-2.3.1:latest as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu ENV LANG=en_US.UTF-8 diff --git a/comps/lvms/llama-vision/Dockerfile_tp b/comps/lvms/llama-vision/Dockerfile_tp index 4987bc63b..6c046b770 100644 --- a/comps/lvms/llama-vision/Dockerfile_tp +++ b/comps/lvms/llama-vision/Dockerfile_tp @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -FROM vault.habana.ai/gaudi-docker/1.17.1/ubuntu22.04/habanalabs/pytorch-installer-2.3.1:latest as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu ENV LANG=en_US.UTF-8 diff --git a/comps/lvms/llava/dependency/Dockerfile.intel_hpu b/comps/lvms/llava/dependency/Dockerfile.intel_hpu index 1837fb8ee..e4da185f4 100644 --- a/comps/lvms/llava/dependency/Dockerfile.intel_hpu +++ b/comps/lvms/llava/dependency/Dockerfile.intel_hpu @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/comps/text2image/Dockerfile.intel_hpu b/comps/text2image/Dockerfile.intel_hpu index ffa228cf8..798a56aac 100644 --- a/comps/text2image/Dockerfile.intel_hpu +++ b/comps/text2image/Dockerfile.intel_hpu @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/comps/tts/speecht5/dependency/Dockerfile.intel_hpu b/comps/tts/speecht5/dependency/Dockerfile.intel_hpu index 2e4b5c3f3..6ac7418da 100644 --- a/comps/tts/speecht5/dependency/Dockerfile.intel_hpu +++ b/comps/tts/speecht5/dependency/Dockerfile.intel_hpu @@ -2,8 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 # HABANA environment -# FROM vault.habana.ai/gaudi-docker/1.16.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.2:latest as hpu -FROM opea/habanalabs:1.16.1-pytorch-installer-2.2.2 as hpu +FROM vault.habana.ai/gaudi-docker/1.18.0/ubuntu22.04/habanalabs/pytorch-installer-2.4.0 AS hpu RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/tests/llms/test_llms_text-generation_vllm_ray_on_intel_hpu.sh b/tests/llms/test_llms_text-generation_vllm_ray_on_intel_hpu.sh index 8f9dbec64..d55e8f6a2 100644 --- a/tests/llms/test_llms_text-generation_vllm_ray_on_intel_hpu.sh +++ b/tests/llms/test_llms_text-generation_vllm_ray_on_intel_hpu.sh @@ -35,7 +35,7 @@ function build_docker_images() { } function start_service() { - export LLM_MODEL="facebook/opt-125m" + export LLM_MODEL="Intel/neural-chat-7b-v3-3" port_number=5031 docker run -d --rm \ --name="test-comps-vllm-ray-service" \ @@ -78,7 +78,7 @@ function validate_microservice() { port_number=5031 result=$(http_proxy="" curl http://${ip_address}:$port_number/v1/chat/completions \ -H "Content-Type: application/json" \ - -d '{"model": "facebook/opt-125m", "messages": [{"role": "user", "content": "How are you?"}]}') + -d '{"model": "Intel/neural-chat-7b-v3-3", "messages": [{"role": "user", "content": "How are you?"}]}') if [[ $result == *"message"* ]]; then echo "Result correct." else