diff --git a/CodeTrans/README.md b/CodeTrans/README.md index b6a223576..240a97f08 100644 --- a/CodeTrans/README.md +++ b/CodeTrans/README.md @@ -20,7 +20,7 @@ Currently we support two ways of deploying Code Translation services on docker: docker pull opea/codetrans:latest ``` -2. Start services using the docker images `built from source`: [Guide](./docker) +2. Start services using the docker images `built from source`: [Guide](./docker/xeon/README.md) ### Required Models @@ -44,6 +44,8 @@ To set up environment variables for deploying Code Translation services, follow # Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1" export no_proxy="Your_No_Proxy" export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + # Example: NGINX_PORT=80 + export NGINX_PORT=${your_nginx_port} ``` 2. If you are in a proxy environment, also set the proxy-related environment variables: diff --git a/CodeTrans/docker/docker_build_compose.yaml b/CodeTrans/docker/docker_build_compose.yaml index 2f2849619..95bcef181 100644 --- a/CodeTrans/docker/docker_build_compose.yaml +++ b/CodeTrans/docker/docker_build_compose.yaml @@ -22,3 +22,9 @@ services: dockerfile: comps/llms/text-generation/tgi/Dockerfile extends: codetrans image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest} + nginx: + build: + context: GenAIComps/comps/nginx/docker + dockerfile: ./Dockerfile + extends: codetrans + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} diff --git a/CodeTrans/docker/gaudi/README.md b/CodeTrans/docker/gaudi/README.md index 6367e0c36..e58c00c0d 100755 --- a/CodeTrans/docker/gaudi/README.md +++ b/CodeTrans/docker/gaudi/README.md @@ -13,7 +13,7 @@ git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps ``` -### 2. Build the LLM Docker Image with the following command +### 2. Build the LLM Docker Image ```bash docker build -t opea/llm-tgi:latest --no-cache --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/text-generation/tgi/Dockerfile . @@ -34,11 +34,19 @@ cd GenAIExamples/CodeTrans/docker/ui docker build -t opea/codetrans-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile . ``` +### 5. Build Nginx Docker Image + +```bash +cd GenAIComps/comps/nginx/docker +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./Dockerfile . +``` + Then run the command `docker images`, you will have the following Docker Images: - `opea/llm-tgi:latest` - `opea/codetrans:latest` - `opea/codetrans-ui:latest` +- `opea/nginx:latest` ## 🚀 Start Microservices @@ -54,17 +62,30 @@ Change the `LLM_MODEL_ID` below for your needs. ### Setup Environment Variables -```bash -export no_proxy=${your_no_proxy} -export http_proxy=${your_http_proxy} -export https_proxy=${your_http_proxy} -export LLM_MODEL_ID="HuggingFaceH4/mistral-7b-grok" -export TGI_LLM_ENDPOINT="http://${host_ip}:8008" -export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} -export MEGA_SERVICE_HOST_IP=${host_ip} -export LLM_SERVICE_HOST_IP=${host_ip} -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:7777/v1/codetrans" -``` +1. Set the required environment variables: + + ```bash + # Example: host_ip="192.168.1.1" + export host_ip="External_Public_IP" + # Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1" + export no_proxy="Your_No_Proxy" + export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + # Example: NGINX_PORT=80 + export NGINX_PORT=${your_nginx_port} + ``` + +2. If you are in a proxy environment, also set the proxy-related environment variables: + + ```bash + export http_proxy="Your_HTTP_Proxy" + export https_proxy="Your_HTTPs_Proxy" + ``` + +3. Set up other environment variables: + + ```bash + source ../set_env.sh + ``` ### Start Microservice Docker Containers @@ -101,9 +122,24 @@ curl http://${host_ip}:7777/v1/codetrans \ -d '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' ``` +4. Nginx Service + +```bash +curl http://${host_ip}:${NGINX_PORT}/v1/codetrans \ + -H "Content-Type: application/json" \ + -d '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' +``` + ## 🚀 Launch the UI +### Launch with origin port + Open this URL `http://{host_ip}:5173` in your browser to access the frontend. + +### Launch with Nginx + +If you want to launch the UI using Nginx, open this URL: `http://{host_ip}:{NGINX_PORT}` in your browser to access the frontend. + ![image](https://github.com/intel-ai-tce/GenAIExamples/assets/21761437/71214938-819c-4979-89cb-c03d937cd7b5) Here is an example for summarizing a article. diff --git a/CodeTrans/docker/gaudi/compose.yaml b/CodeTrans/docker/gaudi/compose.yaml index e206d13be..f6285d2ef 100644 --- a/CodeTrans/docker/gaudi/compose.yaml +++ b/CodeTrans/docker/gaudi/compose.yaml @@ -64,6 +64,25 @@ services: - BASE_URL=${BACKEND_SERVICE_ENDPOINT} ipc: host restart: always + codetrans-gaudi-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: codetrans-gaudi-nginx-server + depends_on: + - codetrans-gaudi-backend-server + - codetrans-gaudi-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=${FRONTEND_SERVICE_IP} + - FRONTEND_SERVICE_PORT=${FRONTEND_SERVICE_PORT} + - BACKEND_SERVICE_NAME=${BACKEND_SERVICE_NAME} + - BACKEND_SERVICE_IP=${BACKEND_SERVICE_IP} + - BACKEND_SERVICE_PORT=${BACKEND_SERVICE_PORT} + ipc: host + restart: always networks: default: diff --git a/CodeTrans/docker/set_env.sh b/CodeTrans/docker/set_env.sh index c51012aa0..5eae8f0cd 100644 --- a/CodeTrans/docker/set_env.sh +++ b/CodeTrans/docker/set_env.sh @@ -9,3 +9,8 @@ export TGI_LLM_ENDPOINT="http://${host_ip}:8008" export MEGA_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:7777/v1/codetrans" +export FRONTEND_SERVICE_IP=${host_ip} +export FRONTEND_SERVICE_PORT=5173 +export BACKEND_SERVICE_NAME=codetrans +export BACKEND_SERVICE_IP=${host_ip} +export BACKEND_SERVICE_PORT=7777 diff --git a/CodeTrans/docker/xeon/README.md b/CodeTrans/docker/xeon/README.md index 4d3fa6b54..8ee9bc856 100755 --- a/CodeTrans/docker/xeon/README.md +++ b/CodeTrans/docker/xeon/README.md @@ -42,11 +42,19 @@ cd GenAIExamples/CodeTrans/docker/ui docker build -t opea/codetrans-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile . ``` +### 5. Build Nginx Docker Image + +```bash +cd GenAIComps/comps/nginx/docker +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./Dockerfile . +``` + Then run the command `docker images`, you will have the following Docker Images: - `opea/llm-tgi:latest` - `opea/codetrans:latest` - `opea/codetrans-ui:latest` +- `opea/nginx:latest` ## 🚀 Start Microservices @@ -62,17 +70,30 @@ Change the `LLM_MODEL_ID` below for your needs. ### Setup Environment Variables -```bash -export no_proxy=${your_no_proxy} -export http_proxy=${your_http_proxy} -export https_proxy=${your_http_proxy} -export LLM_MODEL_ID="HuggingFaceH4/mistral-7b-grok" -export TGI_LLM_ENDPOINT="http://${host_ip}:8008" -export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} -export MEGA_SERVICE_HOST_IP=${host_ip} -export LLM_SERVICE_HOST_IP=${host_ip} -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:7777/v1/codetrans" -``` +1. Set the required environment variables: + + ```bash + # Example: host_ip="192.168.1.1" + export host_ip="External_Public_IP" + # Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1" + export no_proxy="Your_No_Proxy" + export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + # Example: NGINX_PORT=80 + export NGINX_PORT=${your_nginx_port} + ``` + +2. If you are in a proxy environment, also set the proxy-related environment variables: + + ```bash + export http_proxy="Your_HTTP_Proxy" + export https_proxy="Your_HTTPs_Proxy" + ``` + +3. Set up other environment variables: + + ```bash + source ../set_env.sh + ``` ### Start Microservice Docker Containers @@ -108,3 +129,27 @@ curl http://${host_ip}:7777/v1/codetrans \ -H "Content-Type: application/json" \ -d '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' ``` + +4. Nginx Service + +```bash +curl http://${host_ip}:${NGINX_PORT}/v1/codetrans \ + -H "Content-Type: application/json" \ + -d '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' +``` + +## 🚀 Launch the UI + +### Launch with origin port + +Open this URL `http://{host_ip}:5173` in your browser to access the frontend. + +### Launch with Nginx + +If you want to launch the UI using Nginx, open this URL: `http://{host_ip}:{NGINX_PORT}` in your browser to access the frontend. + +![image](https://github.com/intel-ai-tce/GenAIExamples/assets/21761437/71214938-819c-4979-89cb-c03d937cd7b5) + +Here is an example for summarizing a article. + +![image](https://github.com/intel-ai-tce/GenAIExamples/assets/21761437/be543e96-ddcd-4ee0-9f2c-4e99fee77e37) diff --git a/CodeTrans/docker/xeon/compose.yaml b/CodeTrans/docker/xeon/compose.yaml index e62989122..122028b56 100644 --- a/CodeTrans/docker/xeon/compose.yaml +++ b/CodeTrans/docker/xeon/compose.yaml @@ -59,6 +59,26 @@ services: - BASE_URL=${BACKEND_SERVICE_ENDPOINT} ipc: host restart: always + codetrans-xeon-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: codetrans-xeon-nginx-server + depends_on: + - codetrans-xeon-backend-server + - codetrans-xeon-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=${FRONTEND_SERVICE_IP} + - FRONTEND_SERVICE_PORT=${FRONTEND_SERVICE_PORT} + - BACKEND_SERVICE_NAME=${BACKEND_SERVICE_NAME} + - BACKEND_SERVICE_IP=${BACKEND_SERVICE_IP} + - BACKEND_SERVICE_PORT=${BACKEND_SERVICE_PORT} + ipc: host + restart: always + networks: default: driver: bridge diff --git a/CodeTrans/tests/test_codetrans_on_gaudi.sh b/CodeTrans/tests/test_codetrans_on_gaudi.sh index b70448b5c..e86b8d007 100644 --- a/CodeTrans/tests/test_codetrans_on_gaudi.sh +++ b/CodeTrans/tests/test_codetrans_on_gaudi.sh @@ -19,7 +19,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="codetrans codetrans-ui llm-tgi" + service_list="codetrans codetrans-ui llm-tgi nginx" docker compose -f docker_build_compose.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/tgi-gaudi:2.0.1 @@ -37,6 +37,12 @@ function start_services() { export MEGA_SERVICE_HOST_IP=${ip_address} export LLM_SERVICE_HOST_IP=${ip_address} export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans" + export FRONTEND_SERVICE_IP=${ip_address} + export FRONTEND_SERVICE_PORT=5173 + export BACKEND_SERVICE_NAME=codetrans + export BACKEND_SERVICE_IP=${ip_address} + export BACKEND_SERVICE_PORT=7777 + export NGINX_PORT=80 sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env @@ -108,6 +114,15 @@ function validate_megaservice() { "mega-codetrans" \ "codetrans-gaudi-backend-server" \ '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' + + # test the megeservice via nginx + validate_services \ + "${ip_address}:80/v1/codetrans" \ + "print" \ + "mega-codetrans-nginx" \ + "codetrans-gaudi-nginx-server" \ + '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' + } function validate_frontend() { diff --git a/CodeTrans/tests/test_codetrans_on_xeon.sh b/CodeTrans/tests/test_codetrans_on_xeon.sh index be3fab39f..beca7dd53 100644 --- a/CodeTrans/tests/test_codetrans_on_xeon.sh +++ b/CodeTrans/tests/test_codetrans_on_xeon.sh @@ -19,10 +19,10 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="codetrans codetrans-ui llm-tgi" + service_list="codetrans codetrans-ui llm-tgi nginx" docker compose -f docker_build_compose.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-generation-inference:1.4 + docker pull ghcr.io/huggingface/text-generation-inference:sha-e4201f4-intel-cpu docker images && sleep 1s } @@ -36,6 +36,12 @@ function start_services() { export MEGA_SERVICE_HOST_IP=${ip_address} export LLM_SERVICE_HOST_IP=${ip_address} export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans" + export FRONTEND_SERVICE_IP=${ip_address} + export FRONTEND_SERVICE_PORT=5173 + export BACKEND_SERVICE_NAME=codetrans + export BACKEND_SERVICE_IP=${ip_address} + export BACKEND_SERVICE_PORT=7777 + export NGINX_PORT=80 sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env @@ -109,6 +115,14 @@ function validate_megaservice() { "codetrans-xeon-backend-server" \ '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' + # test the megeservice via nginx + validate_services \ + "${ip_address}:80/v1/codetrans" \ + "print" \ + "mega-codetrans-nginx" \ + "codetrans-xeon-nginx-server" \ + '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' + } function validate_frontend() {