diff --git a/ChatQnA/docker/gaudi/README.md b/ChatQnA/docker/gaudi/README.md index 8156741092..f54c29d560 100644 --- a/ChatQnA/docker/gaudi/README.md +++ b/ChatQnA/docker/gaudi/README.md @@ -93,6 +93,7 @@ Then run the command `docker images`, you will have the following 8 Docker Image Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. ```bash +export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" diff --git a/ChatQnA/docker/gaudi/docker_compose.yaml b/ChatQnA/docker/gaudi/docker_compose.yaml index ab6633119e..5015ff44e2 100644 --- a/ChatQnA/docker/gaudi/docker_compose.yaml +++ b/ChatQnA/docker/gaudi/docker_compose.yaml @@ -19,6 +19,7 @@ services: ports: - "6007:6007" environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} REDIS_URL: ${REDIS_URL} @@ -35,6 +36,7 @@ services: - SYS_NICE ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} HABANA_VISIBLE_DEVICES: all @@ -49,6 +51,7 @@ services: - "6000:6000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} @@ -65,6 +68,7 @@ services: - "7000:7000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} REDIS_URL: ${REDIS_URL} @@ -82,6 +86,7 @@ services: - "./data:/data" shm_size: 1g environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} @@ -97,6 +102,7 @@ services: - "8000:8000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} @@ -115,6 +121,7 @@ services: volumes: - "./data:/data" environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} @@ -136,6 +143,7 @@ services: - "9000:9000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -161,6 +169,7 @@ services: ports: - "8888:8888" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} @@ -178,6 +187,7 @@ services: ports: - "5173:5173" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - CHAT_BASE_URL=${BACKEND_SERVICE_ENDPOINT} diff --git a/CodeGen/docker/gaudi/README.md b/CodeGen/docker/gaudi/README.md index 4173b5206b..4a2030d561 100644 --- a/CodeGen/docker/gaudi/README.md +++ b/CodeGen/docker/gaudi/README.md @@ -51,6 +51,7 @@ Then run the command `docker images`, you will have the following 3 Docker image Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. ```bash +export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} export LLM_MODEL_ID="meta-llama/CodeLlama-7b-hf" diff --git a/CodeGen/docker/gaudi/docker_compose.yaml b/CodeGen/docker/gaudi/docker_compose.yaml index 507ff434a8..c5cb22c09a 100644 --- a/CodeGen/docker/gaudi/docker_compose.yaml +++ b/CodeGen/docker/gaudi/docker_compose.yaml @@ -13,6 +13,7 @@ services: volumes: - "./data:/data" environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} HABANA_VISIBLE_DEVICES: all @@ -32,6 +33,7 @@ services: - "9000:9000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -48,6 +50,7 @@ services: ports: - "7778:7778" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} @@ -62,6 +65,7 @@ services: ports: - "5173:5173" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - BASIC_URL=${BACKEND_SERVICE_ENDPOINT} diff --git a/CodeTrans/docker/gaudi/README.md b/CodeTrans/docker/gaudi/README.md index 698c63eeaa..87cd9f3ea1 100755 --- a/CodeTrans/docker/gaudi/README.md +++ b/CodeTrans/docker/gaudi/README.md @@ -47,6 +47,7 @@ Then run the command `docker images`, you will have the following Docker Images: Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. Notice that the `LLM_MODEL_ID` indicates the LLM model used for TGI service. ```bash +export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} export LLM_MODEL_ID="HuggingFaceH4/mistral-7b-grok" diff --git a/CodeTrans/docker/gaudi/docker_compose.yaml b/CodeTrans/docker/gaudi/docker_compose.yaml index e612e78645..12fd41c32d 100644 --- a/CodeTrans/docker/gaudi/docker_compose.yaml +++ b/CodeTrans/docker/gaudi/docker_compose.yaml @@ -13,6 +13,7 @@ services: volumes: - "./data:/data" environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} HABANA_VISIBLE_DEVICES: all @@ -30,6 +31,7 @@ services: - "9000:9000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -47,6 +49,7 @@ services: ports: - "7777:7777" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} @@ -61,6 +64,7 @@ services: ports: - "5173:5173" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - BASE_URL=${BACKEND_SERVICE_ENDPOINT} diff --git a/CodeTrans/docker/xeon/README.md b/CodeTrans/docker/xeon/README.md index 5b38b64fd8..ac13ea71f2 100755 --- a/CodeTrans/docker/xeon/README.md +++ b/CodeTrans/docker/xeon/README.md @@ -55,6 +55,7 @@ Then run the command `docker images`, you will have the following Docker Images: Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. Notice that the `LLM_MODEL_ID` indicates the LLM model used for TGI service. ```bash +export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} export LLM_MODEL_ID="HuggingFaceH4/mistral-7b-grok" diff --git a/CodeTrans/docker/xeon/docker_compose.yaml b/CodeTrans/docker/xeon/docker_compose.yaml index 60b187a58b..f5f5a2dca5 100644 --- a/CodeTrans/docker/xeon/docker_compose.yaml +++ b/CodeTrans/docker/xeon/docker_compose.yaml @@ -14,6 +14,7 @@ services: - "./data:/data" shm_size: 1g environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} @@ -25,6 +26,7 @@ services: - "9000:9000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -42,6 +44,7 @@ services: ports: - "7777:7777" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} @@ -56,6 +59,7 @@ services: ports: - "5173:5173" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - BASE_URL=${BACKEND_SERVICE_ENDPOINT} diff --git a/DocSum/docker/gaudi/README.md b/DocSum/docker/gaudi/README.md index 8d1b8fdb50..c495df4cd8 100644 --- a/DocSum/docker/gaudi/README.md +++ b/DocSum/docker/gaudi/README.md @@ -58,6 +58,7 @@ Then run the command `docker images`, you will have the following Docker Images: Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. ```bash +export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" diff --git a/DocSum/docker/gaudi/docker_compose.yaml b/DocSum/docker/gaudi/docker_compose.yaml index bca27bab4a..8e0021b0d8 100644 --- a/DocSum/docker/gaudi/docker_compose.yaml +++ b/DocSum/docker/gaudi/docker_compose.yaml @@ -11,6 +11,7 @@ services: ports: - "8008:80" environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -31,6 +32,7 @@ services: - "9000:9000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -48,6 +50,7 @@ services: ports: - "8888:8888" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} @@ -62,6 +65,7 @@ services: ports: - "5173:5173" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - DOC_BASE_URL=${BACKEND_SERVICE_ENDPOINT} diff --git a/DocSum/docker/xeon/README.md b/DocSum/docker/xeon/README.md index 3fc834f40a..a575a1f9f6 100644 --- a/DocSum/docker/xeon/README.md +++ b/DocSum/docker/xeon/README.md @@ -59,6 +59,7 @@ Then run the command `docker images`, you will have the following Docker Images: Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. ```bash +export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" diff --git a/DocSum/docker/xeon/docker_compose.yaml b/DocSum/docker/xeon/docker_compose.yaml index badbcb321c..8fad40f3a4 100644 --- a/DocSum/docker/xeon/docker_compose.yaml +++ b/DocSum/docker/xeon/docker_compose.yaml @@ -11,6 +11,7 @@ services: ports: - "8008:80" environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -28,6 +29,7 @@ services: - "9000:9000" ipc: host environment: + no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -45,6 +47,7 @@ services: ports: - "8888:8888" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} @@ -59,6 +62,7 @@ services: ports: - "5173:5173" environment: + - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - DOC_BASE_URL=${BACKEND_SERVICE_ENDPOINT}