Skip to content

Commit

Permalink
Helm chart: Add default minimal pod security (#133)
Browse files Browse the repository at this point in the history
* Helm chart: Add default minimal pod security

Signed-off-by: Lianhao Lu <[email protected]>
  • Loading branch information
lianhao committed Jun 26, 2024
1 parent e12f4ec commit 8fcf0ad
Show file tree
Hide file tree
Showing 36 changed files with 328 additions and 179 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/scripts/e2e/chart_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,24 +10,24 @@ IMAGE_REPO=${OPEA_IMAGE_REPO:-""}

function init_codegen() {
# insert a prefix before opea/.*, the prefix is IMAGE_REPO
find . -name '*values.yaml' -type f -exec sed -i "s#repository: opea/*#repository: ${IMAGE_REPO}opea/#g" {} \;
find .. -name '*values.yaml' -type f -exec sed -i "s#repository: opea/*#repository: ${IMAGE_REPO}opea/#g" {} \;
# set huggingface token
find . -name '*values.yaml' -type f -exec sed -i "s#insert-your-huggingface-token-here#$(cat /home/$USER_ID/.cache/huggingface/token)#g" {} \;
# replace the mount dir "Volume: *" with "Volume: $CHART_MOUNT"
find . -name '*values.yaml' -type f -exec sed -i "s#volume: .*#volume: $CHART_MOUNT#g" {} \;
find . -name '*values.yaml' -type f -exec sed -i "s#modelUseHostPath: .*#modelUseHostPath: $CHART_MOUNT#g" {} \;
# replace the pull policy "IfNotPresent" with "Always"
find . -name '*values.yaml' -type f -exec sed -i "s#pullPolicy: IfNotPresent#pullPolicy: Always#g" {} \;
find .. -name '*values.yaml' -type f -exec sed -i "s#pullPolicy: IfNotPresent#pullPolicy: Always#g" {} \;
}

function init_chatqna() {
# replace volume: /mnt with volume: $CHART_MOUNT
find . -name '*values.yaml' -type f -exec sed -i "s#volume: /mnt#volume: $CHART_MOUNT#g" {} \;
find . -name '*values.yaml' -type f -exec sed -i "s#modelUseHostPath: /mnt#modelUseHostPath: $CHART_MOUNT#g" {} \;
# replace the repository "image: opea/*" with "image: ${IMAGE_REPO}opea/"
find . -name '*values.yaml' -type f -exec sed -i "s#repository: opea/*#repository: ${IMAGE_REPO}opea/#g" {} \;
find .. -name '*values.yaml' -type f -exec sed -i "s#repository: opea/*#repository: ${IMAGE_REPO}opea/#g" {} \;
# set huggingface token
find . -name '*values.yaml' -type f -exec sed -i "s#insert-your-huggingface-token-here#$(cat /home/$USER_ID/.cache/huggingface/token)#g" {} \;
# replace the pull policy "IfNotPresent" with "Always"
find . -name '*values.yaml' -type f -exec sed -i "s#pullPolicy: IfNotPresent#pullPolicy: Always#g" {} \;
find .. -name '*values.yaml' -type f -exec sed -i "s#pullPolicy: IfNotPresent#pullPolicy: Always#g" {} \;
}

function validate_codegen() {
Expand Down
16 changes: 8 additions & 8 deletions helm-charts/chatqna/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,17 @@ helm dependency update chatqna
export HFTOKEN="insert-your-huggingface-token-here"
export MODELDIR="/mnt"
export MODELNAME="Intel/neural-chat-7b-v3-3"
helm install chatqna chatqna --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} --set global.volume=${MODELDIR} --set llm-uservice.tgi.LLM_MODEL_ID=${MODELNAME}
helm install chatqna chatqna --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} --set global.modelUseHostPath=${MODELDIR} --set llm-uservice.tgi.LLM_MODEL_ID=${MODELNAME}
# To use Gaudi device
# helm install chatqna chatqna --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} --values chatqna/gaudi-values.yaml
```

## Values

| Key | Type | Default | Description |
| ------------------------------- | ------ | ----------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- |
| image.repository | string | `"opea/chatqna:latest"` | |
| service.port | string | `"8888"` | |
| global.HUGGINGFACEHUB_API_TOKEN | string | `""` | Your own Hugging Face API token |
| global.volume | string | `"/mnt"` | Cached models directory, tgi will not download if the model is cached here. The "volume" will be mounted to container as /data directory |
| llm-uservice.tgi.LLM_MODEL_ID | string | `"Intel/neural-chat-7b-v3-3"` | Models id from https://huggingface.co/, or predownloaded model directory |
| Key | Type | Default | Description |
| ------------------------------- | ------ | ----------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- |
| image.repository | string | `"opea/chatqna:latest"` | |
| service.port | string | `"8888"` | |
| global.HUGGINGFACEHUB_API_TOKEN | string | `""` | Your own Hugging Face API token |
| global.modelUseHostPath | string | `"/mnt"` | Cached models directory, tgi will not download if the model is cached here. The host path "volume" will be mounted to container as /data directory |
| llm-uservice.tgi.LLM_MODEL_ID | string | `"Intel/neural-chat-7b-v3-3"` | Models id from https://huggingface.co/, or predownloaded model directory |
15 changes: 14 additions & 1 deletion helm-charts/chatqna/gaudi-values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,16 @@ service:
type: ClusterIP
port: 8888

securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
capabilities:
drop:
- ALL
seccompProfile:
type: RuntimeDefault

# To override values in subchart llm-uservice
llm-uservice:
# To override values in subchart tgi
Expand All @@ -30,11 +40,14 @@ llm-uservice:
resources:
limits:
habana.ai/gaudi: 1

global:
http_proxy:
https_proxy:
no_proxy:
HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here"
volume: /mnt
LANGCHAIN_TRACING_V2: false
LANGCHAIN_API_KEY: "insert-your-langchain-key-here"
# set modelUseHostPath to host directory if you want to use hostPath volume for model storage
# comment out modeluseHostPath if you want to download the model from huggingface
modelUseHostPath: /mnt
14 changes: 13 additions & 1 deletion helm-charts/chatqna/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,16 @@ service:
type: ClusterIP
port: 8888

securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
capabilities:
drop:
- ALL
seccompProfile:
type: RuntimeDefault

# To override values in subchart llm-uservice
llm-uservice:
# To override values in subchart tgi
Expand All @@ -30,6 +40,8 @@ global:
https_proxy:
no_proxy:
HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here"
volume: /mnt
LANGCHAIN_TRACING_V2: false
LANGCHAIN_API_KEY: "insert-your-langchain-key-here"
# set modelUseHostPath to host directory if you want to use hostPath volume for model storage
# comment out modeluseHostPath if you want to download the model from huggingface
modelUseHostPath: /mnt
16 changes: 8 additions & 8 deletions helm-charts/codegen/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,17 @@ helm dependency update codegen
export HFTOKEN="insert-your-huggingface-token-here"
export MODELDIR="/mnt"
export MODELNAME="m-a-p/OpenCodeInterpreter-DS-6.7B"
helm install codegen codegen --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} --set global.volume=${MODELDIR} --set llm-uservice.tgi.LLM_MODEL_ID=${MODELNAME}
helm install codegen codegen --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} --set global.modelUseHostPath=${MODELDIR} --set llm-uservice.tgi.LLM_MODEL_ID=${MODELNAME}
# To use Gaudi device
# helm install codegen codegen --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} --values codegen/gaudi-values.yaml
```

## Values

| Key | Type | Default | Description |
| ------------------------------- | ------ | -------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- |
| image.repository | string | `"opea/codegen:latest"` | |
| service.port | string | `"7778"` | |
| global.HUGGINGFACEHUB_API_TOKEN | string | `""` | Your own Hugging Face API token |
| global.volume | string | `"/mnt"` | Cached models directory, tgi will not download if the model is cached here. The "volume" will be mounted to container as /data directory |
| llm-uservice.tgi.LLM_MODEL_ID | string | `"ise-uiuc/Magicoder-S-DS-6.7B"` | Models id from https://huggingface.co/, or predownloaded model directory |
| Key | Type | Default | Description |
| ------------------------------- | ------ | -------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| image.repository | string | `"opea/codegen:latest"` | |
| service.port | string | `"7778"` | |
| global.HUGGINGFACEHUB_API_TOKEN | string | `""` | Your own Hugging Face API token |
| global.modelUseHostPath | string | `"/mnt"` | Cached models directory, tgi will not download if the model is cached here. The host path "modelUseHostPath" will be mounted to container as /data directory |
| llm-uservice.tgi.LLM_MODEL_ID | string | `"ise-uiuc/Magicoder-S-DS-6.7B"` | Models id from https://huggingface.co/, or predownloaded model directory |
16 changes: 14 additions & 2 deletions helm-charts/codegen/gaudi-values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,16 @@ service:
type: ClusterIP
port: 7778

securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
capabilities:
drop:
- ALL
seccompProfile:
type: RuntimeDefault

# To override values in subchart llm-uservice
llm-uservice:
image:
Expand All @@ -28,18 +38,20 @@ llm-uservice:
tgi:
LLM_MODEL_ID: ise-uiuc/Magicoder-S-DS-6.7B
# LLM_MODEL_ID: /data/Magicoder-S-DS-6.7B
volume: /mnt
image:
repository: ghcr.io/huggingface/tgi-gaudi
tag: "1.2.1"
resources:
limits:
habana.ai/gaudi: 1

global:
http_proxy:
https_proxy:
no_proxy:
HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here"
volume: /mnt
LANGCHAIN_TRACING_V2: false
LANGCHAIN_API_KEY: "insert-your-langchain-key-here"
# set modelUseHostPath to host directory if you want to use hostPath volume for model storage
# comment out modeluseHostPath if you want to download the model from huggingface
modelUseHostPath: /mnt
14 changes: 13 additions & 1 deletion helm-charts/codegen/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,16 @@ service:
type: ClusterIP
port: 7778

securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
capabilities:
drop:
- ALL
seccompProfile:
type: RuntimeDefault

# To override values in subchart llm-uservice
llm-uservice:
image:
Expand All @@ -34,6 +44,8 @@ global:
https_proxy:
no_proxy:
HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here"
volume: /mnt
LANGCHAIN_TRACING_V2: false
LANGCHAIN_API_KEY: "insert-your-langchain-key-here"
# set modelUseHostPath to host directory if you want to use hostPath volume for model storage
# comment out modeluseHostPath if you want to download the model from huggingface
modelUseHostPath: /mnt
14 changes: 7 additions & 7 deletions helm-charts/common/embedding-usvc/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ To install the chart, run the following:
```console
$ export MODELDIR="/mnt"
$ export MODELNAME="BAAI/bge-base-en-v1.5"
$ helm install embedding embedding-usvc --set global.volume=${MODELDIR} --set tei.EMBEDDING_MODEL_ID=${MODELNAME}
$ helm install embedding embedding-usvc --set global.modelUseHostPath=${MODELDIR} --set tei.EMBEDDING_MODEL_ID=${MODELNAME}
```

## Values

| Key | Type | Default | Description |
| ---------------------- | ------ | ----------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- |
| image.repository | string | `"opea/embedding-tei:latest"` | |
| service.port | string | `"6000"` | |
| tei.EMBEDDING_MODEL_ID | string | `"BAAI/bge-base-en-v1.5"` | Models id from https://huggingface.co/, or predownloaded model directory |
| global.volume | string | `"/mnt"` | Cached models directory, tgi will not download if the model is cached here. The "volume" will be mounted to container as /data directory |
| Key | Type | Default | Description |
| ----------------------- | ------ | ----------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| image.repository | string | `"opea/embedding-tei:latest"` | |
| service.port | string | `"6000"` | |
| tei.EMBEDDING_MODEL_ID | string | `"BAAI/bge-base-en-v1.5"` | Models id from https://huggingface.co/, or predownloaded model directory |
| global.modelUseHostPath | string | `"/mnt"` | Cached models directory, tgi will not download if the model is cached here. The host path "modelUseHostPath" will be mounted to container as /data directory |
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ spec:
value: {{ .Values.global.LANGCHAIN_API_KEY }}
- name: LANGCHAIN_PROJECT
value: "opea-embedding-service"

securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}"
Expand Down
24 changes: 16 additions & 8 deletions helm-charts/common/embedding-usvc/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,15 @@ podAnnotations: {}
podSecurityContext: {}
# fsGroup: 2000

securityContext: {}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
capabilities:
drop:
- ALL
seccompProfile:
type: RuntimeDefault

service:
type: ClusterIP
Expand Down Expand Up @@ -64,5 +66,11 @@ tei:
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: "cpu-1.2"

global:
volume: /mnt
http_proxy:
https_proxy:
no_proxy:
# set modelUseHostPath to host directory if you want to use hostPath volume for model storage
# comment out modeluseHostPath if you want to download the model from huggingface
modelUseHostPath: /mnt
Loading

0 comments on commit 8fcf0ad

Please sign in to comment.