Skip to content

Commit

Permalink
switch to using upstream 'tgi-gaudi' on HuggingFace (#616)
Browse files Browse the repository at this point in the history
* switch to using upstream 'tei-gaudi' on HuggingFace

Signed-off-by: Abolfazl Shahbazi <[email protected]>
  • Loading branch information
ashahba authored Sep 6, 2024
1 parent a8a46bc commit 90cc44f
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 19 deletions.
3 changes: 0 additions & 3 deletions .github/workflows/_comps-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,6 @@ jobs:
fi
cd ${{ github.workspace }}
if [[ $(grep -c "llava-tgi:" ${docker_compose_yml}) != 0 ]]; then
git clone https://github.com/yuanwu2017/tgi-gaudi.git && cd tgi-gaudi && git checkout v2.0.4 && cd ../
fi
if [[ $(grep -c "vllm-openvino:" ${docker_compose_yml}) != 0 ]]; then
git clone https://github.com/vllm-project/vllm.git vllm-openvino
fi
Expand Down
5 changes: 0 additions & 5 deletions .github/workflows/docker/compose/lvms-compose-cd.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,3 @@ services:
build:
dockerfile: comps/lvms/Dockerfile_tgi
image: ${REGISTRY:-opea}/lvm-tgi:${TAG:-latest}
llava-tgi:
build:
context: tgi-gaudi
dockerfile: Dockerfile
image: ${REGISTRY:-opea}/llava-tgi:${TAG:-latest}
12 changes: 1 addition & 11 deletions tests/test_lvms_tgi_llava_next.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,6 @@ ip_address=$(hostname -I | awk '{print $1}')
function build_docker_images() {
cd $WORKPATH
echo $(pwd)
git clone https://github.com/yuanwu2017/tgi-gaudi.git && cd tgi-gaudi && git checkout v2.0.4
docker build --no-cache -t opea/llava-tgi:comps .
if [ $? -ne 0 ]; then
echo "opea/llava-tgi built fail"
exit 1
else
echo "opea/llava-tgi built successful"
fi

cd ..
docker build --no-cache -t opea/lvm-tgi:comps -f comps/lvms/Dockerfile_tgi .
if [ $? -ne 0 ]; then
echo "opea/lvm-tgi built fail"
Expand All @@ -32,7 +22,7 @@ function build_docker_images() {
function start_service() {
unset http_proxy
model="llava-hf/llava-v1.6-mistral-7b-hf"
docker run -d --name="test-comps-lvm-llava-tgi" -e http_proxy=$http_proxy -e https_proxy=$https_proxy -p 5027:80 --runtime=habana -e PT_HPU_ENABLE_LAZY_COLLECTIVES=true -e SKIP_TOKENIZER_IN_TGI=true -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --ipc=host opea/llava-tgi:comps --model-id $model --max-input-tokens 4096 --max-total-tokens 8192
docker run -d --name="test-comps-lvm-llava-tgi" -e http_proxy=$http_proxy -e https_proxy=$https_proxy -p 5027:80 --runtime=habana -e PT_HPU_ENABLE_LAZY_COLLECTIVES=true -e SKIP_TOKENIZER_IN_TGI=true -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --ipc=host ghcr.io/huggingface/tgi-gaudi:2.0.4 --model-id $model --max-input-tokens 4096 --max-total-tokens 8192
docker run -d --name="test-comps-lvm-tgi" -e LVM_ENDPOINT=http://$ip_address:5027 -e http_proxy=$http_proxy -e https_proxy=$https_proxy -p 5028:9399 --ipc=host opea/lvm-tgi:comps
sleep 3m
}
Expand Down

0 comments on commit 90cc44f

Please sign in to comment.