From bc4bbfa8494be5b08b8c1a21f38769de11991507 Mon Sep 17 00:00:00 2001 From: Dina Suehiro Jones Date: Fri, 13 Sep 2024 19:10:25 -0700 Subject: [PATCH] Fix issues with the VisualQnA instructions (#809) Signed-off-by: Dina Suehiro Jones Signed-off-by: dmsuehir Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- VisualQnA/Dockerfile | 2 +- .../docker_compose/intel/cpu/xeon/README.md | 15 ++++++++++----- .../docker_compose/intel/hpu/gaudi/README.md | 19 ++++++++++++------- 3 files changed, 23 insertions(+), 13 deletions(-) diff --git a/VisualQnA/Dockerfile b/VisualQnA/Dockerfile index f090d3e68..588cd379d 100644 --- a/VisualQnA/Dockerfile +++ b/VisualQnA/Dockerfile @@ -23,7 +23,7 @@ RUN pip install --no-cache-dir --upgrade pip && \ COPY ./visualqna.py /home/user/visualqna.py -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +ENV PYTHONPATH=/home/user/GenAIComps USER user diff --git a/VisualQnA/docker_compose/intel/cpu/xeon/README.md b/VisualQnA/docker_compose/intel/cpu/xeon/README.md index 9b1127881..9f8b65de0 100644 --- a/VisualQnA/docker_compose/intel/cpu/xeon/README.md +++ b/VisualQnA/docker_compose/intel/cpu/xeon/README.md @@ -41,10 +41,12 @@ git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps ``` -### 1. Build LVM Image +### 1. Build LVM and NGINX Docker Images ```bash docker build --no-cache -t opea/lvm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/tgi-llava/Dockerfile . + +docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/nginx/Dockerfile . ``` ### 2. Build MegaService Docker Image @@ -55,7 +57,7 @@ To construct the Mega Service, we utilize the [GenAIComps](https://github.com/op git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/VisualQnA docker build --no-cache -t opea/visualqna:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . -cd ../../.. +cd ../.. ``` ### 3. Build UI Docker Image @@ -65,7 +67,7 @@ Build frontend Docker image via below command: ```bash cd GenAIExamples/VisualQnA/ui docker build --no-cache -t opea/visualqna-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f docker/Dockerfile . -cd ../../../.. +cd ../../.. ``` ### 4. Pull TGI Xeon Image @@ -74,12 +76,13 @@ cd ../../../.. docker pull ghcr.io/huggingface/text-generation-inference:sha-e4201f4-intel-cpu ``` -Then run the command `docker images`, you will have the following 4 Docker Images: +Then run the command `docker images`, you will have the following 5 Docker Images: 1. `ghcr.io/huggingface/text-generation-inference:sha-e4201f4-intel-cpu` 2. `opea/lvm-tgi:latest` 3. `opea/visualqna:latest` 4. `opea/visualqna-ui:latest` +5. `opea/nginx` ## 🚀 Start Microservices @@ -98,7 +101,7 @@ export host_ip="External_Public_IP" **Append the value of the public IP address to the no_proxy list** ``` -export your_no_proxy=${your_no_proxy},"External_Public_IP" +export your_no_proxy="${your_no_proxy},${host_ip}" ``` ```bash @@ -131,6 +134,8 @@ docker compose -f compose.yaml up -d Follow the instructions to validate MicroServices. +> Note: If you see an "Internal Server Error" from the `curl` command, wait a few minutes for the microserver to be ready and then try again. + 1. LLM Microservice ```bash diff --git a/VisualQnA/docker_compose/intel/hpu/gaudi/README.md b/VisualQnA/docker_compose/intel/hpu/gaudi/README.md index c45367c70..4a0b5f0bf 100644 --- a/VisualQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/VisualQnA/docker_compose/intel/hpu/gaudi/README.md @@ -13,10 +13,12 @@ git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps ``` -### 2. Build LLM Image +### 2. Build LVM and NGINX Docker Images ```bash docker build --no-cache -t opea/lvm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/tgi-llava/Dockerfile . + +docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/nginx/Dockerfile . ``` ### 3. Pull TGI Gaudi Image @@ -31,9 +33,9 @@ To construct the Mega Service, we utilize the [GenAIComps](https://github.com/op ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/VisualQnA/docker +cd GenAIExamples/VisualQnA docker build --no-cache -t opea/visualqna:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . -cd ../../.. +cd ../.. ``` ### 5. Build UI Docker Image @@ -41,17 +43,18 @@ cd ../../.. Build frontend Docker image via below command: ```bash -cd GenAIExamples/VisualQnA// +cd GenAIExamples/VisualQnA/ui docker build --no-cache -t opea/visualqna-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile . -cd ../../../.. +cd ../../.. ``` -Then run the command `docker images`, you will have the following 4 Docker Images: +Then run the command `docker images`, you will have the following 5 Docker Images: -1. `opea/llava-tgi:latest` +1. `ghcr.io/huggingface/tgi-gaudi:2.0.4` 2. `opea/lvm-tgi:latest` 3. `opea/visualqna:latest` 4. `opea/visualqna-ui:latest` +5. `opea/nginx` ## 🚀 Start MicroServices and MegaService @@ -89,6 +92,8 @@ docker compose -f compose.yaml up -d Follow the instructions to validate MicroServices. +> Note: If you see an "Internal Server Error" from the `curl` command, wait a few minutes for the microserver to be ready and then try again. + 1. LLM Microservice ```bash