-
Notifications
You must be signed in to change notification settings - Fork 63
/
values.yaml
230 lines (224 loc) · 10.7 KB
/
values.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
basehub:
# Copied from https://github.com/dask/helm-chart/blob/master/daskhub/values.yaml
# FIXME: Properly use the upstream chart.
jupyterhub:
singleuser:
# Almost everyone using dask by default wants JupyterLab
defaultUrl: /lab
extraLabels:
hub.jupyter.org/network-access-proxy-http: "true"
cloudMetadata:
# Don't block access to AWS cloud metadata
# If we don't, our users can't access S3 buckets / other AWS services
# without an explicit identity
# FIXME: Provide an explicit identity for users instead
blockWithIptables: false
serviceAccountName: user-sa
extraEnv:
# About DASK_ prefixed variables we set:
#
# 1. k8s native variable expansion is applied with $(MY_ENV) syntax. The
# order variables are defined matters though and we are under the
# mercy of how KubeSpawner renders our passed dictionaries.
#
# 2. Dask loads local YAML config.
#
# 3. Dask loads environment variables prefixed DASK_.
# - DASK_ is stripped
# - Capitalization is ignored
# - Double underscore means a nested configuration
# - `ast.literal_eval` is used to parse values
#
# 4. dask-gateway and dask-distributed looks at its config and expands
# expressions in {} again, sometimes only with the environment
# variables as context but sometimes also with additional variables.
#
# References:
# - K8s expansion: https://kubernetes.io/docs/tasks/inject-data-application/define-interdependent-environment-variables/
# - KubeSpawner issue: https://github.com/jupyterhub/kubespawner/issues/491
# - Dask config: https://docs.dask.org/en/latest/configuration.html
# - Exploration issue: https://github.com/2i2c-org/infrastructure/issues/442
#
# DASK_GATEWAY__CLUSTER__OPTIONS__IMAGE makes the default worker image
# match the singleuser image.
DASK_GATEWAY__CLUSTER__OPTIONS__IMAGE: "{JUPYTER_IMAGE_SPEC}"
# DASK_GATEWAY__CLUSTER__OPTIONS__ENVIRONMENT makes some environment
# variables be copied over to the worker nodes from the user nodes.
DASK_GATEWAY__CLUSTER__OPTIONS__ENVIRONMENT: '{"SCRATCH_BUCKET": "$(SCRATCH_BUCKET)", "PANGEO_SCRATCH": "$(PANGEO_SCRATCH)"}'
# DASK_DISTRIBUTED__DASHBOARD__LINK makes the suggested link to the
# dashboard account for the /user/<username>/<server-name> prefix in the path.
# JUPYTERHUB_SERVICE_PREFIX has leading and trailing slashes as appropriate
DASK_DISTRIBUTED__DASHBOARD__LINK: "{JUPYTERHUB_SERVICE_PREFIX}proxy/{port}/status"
hub:
services:
dask-gateway:
# Don't display a dask-gateway entry under 'services',
# as dask-gateway has no UI
display: false
extraConfig:
daskhub-01-add-dask-gateway-values: |
# 1. Sets `DASK_GATEWAY__PROXY_ADDRESS` in the singleuser environment.
# 2. Adds the URL for the Dask Gateway JupyterHub service.
import os
# These are set by jupyterhub.
release_name = os.environ["HELM_RELEASE_NAME"]
release_namespace = os.environ["POD_NAMESPACE"]
if "PROXY_HTTP_SERVICE_HOST" in os.environ:
# https is enabled, we want to use the internal http service.
gateway_address = "http://{}:{}/services/dask-gateway/".format(
os.environ["PROXY_HTTP_SERVICE_HOST"],
os.environ["PROXY_HTTP_SERVICE_PORT"],
)
print("Setting DASK_GATEWAY__ADDRESS {} from HTTP service".format(gateway_address))
else:
gateway_address = "http://proxy-public/services/dask-gateway"
print("Setting DASK_GATEWAY__ADDRESS {}".format(gateway_address))
# Internal address to connect to the Dask Gateway.
c.KubeSpawner.environment.setdefault("DASK_GATEWAY__ADDRESS", gateway_address)
# Internal address for the Dask Gateway proxy.
c.KubeSpawner.environment.setdefault("DASK_GATEWAY__PROXY_ADDRESS", "gateway://traefik-{}-dask-gateway.{}:80".format(release_name, release_namespace))
# Relative address for the dashboard link.
c.KubeSpawner.environment.setdefault("DASK_GATEWAY__PUBLIC_ADDRESS", "/services/dask-gateway/")
# Use JupyterHub to authenticate with Dask Gateway.
c.KubeSpawner.environment.setdefault("DASK_GATEWAY__AUTH__TYPE", "jupyterhub")
# Adds Dask Gateway as a JupyterHub service to make the gateway available at
# {HUB_URL}/services/dask-gateway
service_url = "http://traefik-{}-dask-gateway.{}".format(release_name, release_namespace)
for service in c.JupyterHub.services:
if service["name"] == "dask-gateway":
if not service.get("url", None):
print("Adding dask-gateway service URL")
service.setdefault("url", service_url)
break
else:
print("dask-gateway service not found, this should not happen!")
dask-gateway:
enabled: true # Enabling dask-gateway will install Dask Gateway as a dependency.
# Futher Dask Gateway configuration goes here
# See https://github.com/dask/dask-gateway/blob/master/resources/helm/dask-gateway/values.yaml
controller:
nodeSelector:
k8s.dask.org/node-purpose: core
gateway:
nodeSelector:
k8s.dask.org/node-purpose: core
backend:
scheduler:
extraPodConfig:
serviceAccountName: user-sa
tolerations:
# Let's put schedulers on notebook nodes, since they aren't ephemeral
# dask can recover from dead workers, but not dead schedulers
- key: "hub.jupyter.org/dedicated"
operator: "Equal"
value: "user"
effect: "NoSchedule"
- key: "hub.jupyter.org_dedicated"
operator: "Equal"
value: "user"
effect: "NoSchedule"
nodeSelector:
k8s.dask.org/node-purpose: scheduler
cores:
request: 0.01
limit: 1
memory:
request: 128M
limit: 1G
worker:
extraContainerConfig:
securityContext:
runAsGroup: 1000
runAsUser: 1000
extraPodConfig:
serviceAccountName: user-sa
securityContext:
fsGroup: 1000
tolerations:
- key: "k8s.dask.org/dedicated"
operator: "Equal"
value: "worker"
effect: "NoSchedule"
- key: "k8s.dask.org_dedicated"
operator: "Equal"
value: "worker"
effect: "NoSchedule"
nodeSelector:
# Dask workers get their own pre-emptible pool
k8s.dask.org/node-purpose: worker
# TODO: figure out a replacement for userLimits.
extraConfig:
optionHandler: |
from dask_gateway_server.options import Options, Integer, Float, String, Mapping
import string
# Escape a string to be dns-safe in the same way that KubeSpawner does it.
# Reference https://github.com/jupyterhub/kubespawner/blob/616f72c4aee26c3d2127c6af6086ec50d6cda383/kubespawner/spawner.py#L1828-L1835
# Adapted from https://github.com/minrk/escapism to avoid installing the package
# in the dask-gateway api pod which would have been problematic.
def escape_string_label_safe(to_escape):
safe_chars = set(string.ascii_lowercase + string.digits)
escape_char = "-"
chars = []
for c in to_escape:
if c in safe_chars:
chars.append(c)
else:
# escape one character
buf = []
# UTF-8 uses 1 to 4 bytes per character, depending on the Unicode symbol
# so we need to transform each byte to its hex value
for byte in c.encode("utf8"):
buf.append(escape_char)
# %X is the hex value of the byte
buf.append('%X' % byte)
escaped_hex_char = "".join(buf)
chars.append(escaped_hex_char)
return u''.join(chars)
def cluster_options(user):
safe_username = escape_string_label_safe(user.name)
def option_handler(options):
if ":" not in options.image:
raise ValueError("When specifying an image you must also provide a tag")
extra_annotations = {
"hub.jupyter.org/username": safe_username,
"prometheus.io/scrape": "true",
"prometheus.io/port": "8787",
}
extra_labels = {
"hub.jupyter.org/username": safe_username,
}
return {
"worker_cores_limit": options.worker_cores,
"worker_cores": options.worker_cores,
"worker_memory": "%fG" % options.worker_memory,
"image": options.image,
"scheduler_extra_pod_annotations": extra_annotations,
"worker_extra_pod_annotations": extra_annotations,
"scheduler_extra_pod_labels": extra_labels,
"worker_extra_pod_labels": extra_labels,
"environment": options.environment,
}
return Options(
Integer("worker_cores", 2, min=1, label="Worker Cores"),
Float("worker_memory", 4, min=1, label="Worker Memory (GiB)"),
# The default image is set via DASK_GATEWAY__CLUSTER__OPTIONS__IMAGE env variable
String("image", label="Image"),
Mapping("environment", {}, label="Environment Variables"),
handler=option_handler,
)
c.Backend.cluster_options = cluster_options
idle: |
# timeout after 30 minutes of inactivity
c.KubeClusterConfig.idle_timeout = 1800
prefix: "/services/dask-gateway" # Users connect to the Gateway through the JupyterHub service.
auth:
type: jupyterhub # Use JupyterHub to authenticate with Dask Gateway
traefik:
nodeSelector:
k8s.dask.org/node-purpose: core
service:
type: ClusterIP # Access Dask Gateway through JupyterHub. To access the Gateway from outside JupyterHub, this must be changed to a `LoadBalancer`.
# A placeholder as global values that can be referenced from the same location
# of any chart should be possible to provide, but aren't necessarily provided or
# used.
global: {}