Skip to content

Commit

Permalink
update image build for 2 new examples (#729)
Browse files Browse the repository at this point in the history
Signed-off-by: chensuyue <[email protected]>
Signed-off-by: Chendi.Xue <[email protected]>
  • Loading branch information
chensuyue committed Sep 10, 2024
1 parent fa12083 commit 0869029
Show file tree
Hide file tree
Showing 8 changed files with 85 additions and 22 deletions.
2 changes: 1 addition & 1 deletion DocIndexRetriever/docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ WORKDIR /home/user/GenAIComps
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt

COPY GenAIExamples/DocIndexRetriever/docker/retrieval_tool.py /home/user/retrieval_tool.py
COPY ./retrieval_tool.py /home/user/retrieval_tool.py

ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps

Expand Down
3 changes: 2 additions & 1 deletion DocIndexRetriever/docker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ DocRetriever are the most widely adopted use case for leveraging the different m
```bash
cd ..
git clone https://github.com/opea-project/GenAIExamples.git
docker build --no-cache -t opea/doc-index-retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f GenAIExamples/DocIndexRetriever/docker/Dockerfile .
cd GenAIExamples/DocIndexRetriever/docker
docker build --no-cache -t opea/doc-index-retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./Dockerfile .
```

## 3. Start all the services Docker Containers
Expand Down
36 changes: 36 additions & 0 deletions DocIndexRetriever/docker/docker_build_compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

services:
doc-index-retriever:
build:
args:
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
no_proxy: ${no_proxy}
dockerfile: ./Dockerfile
image: ${REGISTRY:-opea}/doc-index-retriever:${TAG:-latest}
embedding-tei:
build:
context: GenAIComps
dockerfile: comps/embeddings/langchain/docker/Dockerfile
extends: doc-index-retriever
image: ${REGISTRY:-opea}/embedding-tei:${TAG:-latest}
retriever-redis:
build:
context: GenAIComps
dockerfile: comps/retrievers/langchain/redis/docker/Dockerfile
extends: chatqna
image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest}
reranking-tei:
build:
context: GenAIComps
dockerfile: comps/reranks/tei/docker/Dockerfile
extends: doc-index-retriever
image: ${REGISTRY:-opea}/reranking-tei:${TAG:-latest}
dataprep-on-ray-redis:
build:
context: GenAIComps
dockerfile: comps/dataprep/redis/langchain_ray/docker/Dockerfile
extends: doc-index-retriever
image: ${REGISTRY:-opea}/dataprep-on-ray-redis:${TAG:-latest}
2 changes: 1 addition & 1 deletion DocIndexRetriever/docker/gaudi/docker_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ services:
image: opea/reranking-tei:latest
container_name: reranking-tei-server
ports:
- "18000:8000"
- "8000:8000"
ipc: host
entrypoint: python local_reranking.py
environment:
Expand Down
44 changes: 36 additions & 8 deletions DocIndexRetriever/docker/xeon/docker_compose.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

Expand All @@ -9,34 +8,38 @@ services:
image: redis/redis-stack:7.2.0-v9
container_name: redis-vector-db
ports:
- "16379:6379"
- "6379:6379"
- "8001:8001"
dataprep-redis-service:
image: opea/dataprep-on-ray-redis:latest
image: opea/dataprep-redis:latest
container_name: dataprep-redis-server
depends_on:
- redis-vector-db
ports:
- "6007:6007"
- "6008:6008"
- "6009:6009"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
REDIS_URL: ${REDIS_URL}
INDEX_NAME: ${INDEX_NAME}
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
tei-embedding-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.2
container_name: tei-embedding-server
ports:
- "8090:80"
- "6006:80"
volumes:
- "./data:/data"
shm_size: 1g
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
command: --model-id ${EMBEDDING_MODEL_ID}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
embedding:
image: opea/embedding-tei:latest
container_name: embedding-tei-server
Expand All @@ -49,6 +52,7 @@ services:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
Expand All @@ -68,17 +72,40 @@ services:
https_proxy: ${https_proxy}
REDIS_URL: ${REDIS_URL}
INDEX_NAME: ${INDEX_NAME}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
LANGCHAIN_PROJECT: "opea-retriever-service"
restart: unless-stopped

tei-reranking-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.2
container_name: tei-reranking-server
ports:
- "8808:80"
volumes:
- "./data:/data"
shm_size: 1g
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HF_HUB_DISABLE_PROGRESS_BARS: 1
HF_HUB_ENABLE_HF_TRANSFER: 0
command: --model-id ${RERANK_MODEL_ID} --auto-truncate

reranking:
image: opea/reranking-tei:latest
container_name: reranking-tei-server
container_name: reranking-tei-xeon-server
volumes:
- ${WORKDIR}/GenAIComps/comps/:/home/user/comps
depends_on:
- tei-reranking-service
ports:
- "18000:8000"
- "8000:8000"
ipc: host
entrypoint: python local_reranking.py
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
Expand All @@ -99,6 +126,7 @@ services:
- tei-embedding-service
- embedding
- retriever
- tei-reranking-service
- reranking
ports:
- "8889:8889"
Expand Down
7 changes: 3 additions & 4 deletions DocIndexRetriever/tests/test_docindexretriever_on_gaudi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,11 @@ LOG_PATH="$WORKPATH/tests"
ip_address=$(hostname -I | awk '{print $1}')

function build_docker_images() {
cd $WORKPATH/../../
cd $WORKPATH/docker
if [ ! -d "GenAIComps" ] ; then
git clone https://github.com/opea-project/GenAIComps.git
fi
cd GenAIComps
git status

docker build -t opea/embedding-tei:latest -f comps/embeddings/langchain/docker/Dockerfile .
docker build -t opea/retriever-redis:latest -f comps/retrievers/langchain/redis/docker/Dockerfile .
Expand All @@ -25,8 +24,8 @@ function build_docker_images() {
docker pull ghcr.io/huggingface/tgi-gaudi:latest
docker pull redis/redis-stack:7.2.0-v9

cd $WORKPATH/../../
docker build -t opea/doc-index-retriever:latest -f GenAIExamples/DocIndexRetriever/docker/Dockerfile .
cd $WORKPATH/docker
docker build -t opea/doc-index-retriever:latest -f ./Dockerfile .
}

function start_services() {
Expand Down
10 changes: 5 additions & 5 deletions ProductivitySuite/docker/docker_build_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,19 @@ services:
dockerfile: comps/dataprep/redis/langchain/docker/Dockerfile
extends: chatqna
image: ${REGISTRY:-opea}/dataprep-redis:${TAG:-latest}
promptregistry-mongo:
promptregistry-mongo-server:
build:
context: GenAIComps
dockerfile: comps/prompt_registry/mongo/docker/Dockerfile
extends: chatqna
image: ${REGISTRY:-opea}/promptregistry-mongo-server:${TAG:-latest}
chathistory-mongo:
chathistory-mongo-server:
build:
context: GenAIComps
dockerfile: comps/chathistory/mongo/docker/Dockerfile
extends: chatqna
image: ${REGISTRY:-opea}/chathistory-mongo-server:${TAG:-latest}
productivity-suite-react-ui:
productivity-suite-react-ui-server:
build:
context: ui
dockerfile: ./docker/Dockerfile.react
Expand Down Expand Up @@ -86,13 +86,13 @@ services:
context: ../../FaqGen/docker/
dockerfile: ./Dockerfile
image: ${REGISTRY:-opea}/faqgen:${TAG:-latest}
llm_faqgen:
llm-faqgen-tgi:
build:
context: GenAIComps
dockerfile: comps/llms/faq-generation/tgi/Dockerfile
extends: faqgen
image: ${REGISTRY:-opea}/llm-faqgen-tgi:${TAG:-latest}
llm_docsum_server:
llm-docsum-tgi:
build:
context: GenAIComps
dockerfile: comps/llms/summarization/tgi/Dockerfile
Expand Down
3 changes: 1 addition & 2 deletions ProductivitySuite/tests/test_productivitysuite_on_xeon.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ function build_docker_images() {
git clone https://github.com/opea-project/GenAIComps.git

echo "Build all the images with --no-cache, check docker_image_build.log for details..."
service_list="chatqna dataprep-redis embedding-tei promptregistry-mongo llm_docsum_server llm_faqgen chathistory-mongo retriever-redis reranking-tei llm-tgi productivity-suite-react-ui codegen docsum faqgen"
docker compose -f docker_build_compose.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
docker compose -f docker_build_compose.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log

docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
docker pull ghcr.io/huggingface/text-generation-inference:2.1.0
Expand Down

0 comments on commit 0869029

Please sign in to comment.