forked from h2oai/h2o-llmstudio
-
Notifications
You must be signed in to change notification settings - Fork 0
/
app.toml.template
43 lines (37 loc) · 956 Bytes
/
app.toml.template
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
[App]
name = "ai.h2o.llmstudio"
Version = "{{VERSION}}"
Title = "H2O LLM Studio"
Description = "A framework and no-code GUI designed for fine-tuning state-of-the-art large language models (LLMs)"
LongDescription = "about.md"
Tags = ["GENERATIVE_AI", "NLP", "DATA_SCIENCE"]
[Runtime]
# Must be installed in the server config
RuntimeVersion = "llm_studio"
AppMode = "container"
Port = 10101
CPUReservation = "10"
MemoryReservation = "118Gi"
MemoryLimit = "118Gi"
GPUCount = 1
VolumeMount = "/home/llmstudio/mount"
VolumeSize = "1Ti"
ResourceVolumeSize = "1Ti"
EnableSHM = true
EnableOIDC = true
RoutingMode = "BASE_URL"
[[Env]]
Name = "H2O_LLM_STUDIO_WORKDIR"
Value = "/home/llmstudio/mount"
[[Env]]
Name = "HOME"
Value = "/home/llmstudio"
[[Env]]
Name = "H2O_LLM_STUDIO_ENABLE_HEAP"
Value = "True"
[[Env]]
Name = "H2O_WAVE_PRIVATE_DIR"
Value = "/download/@/home/llmstudio/mount/output/download"
[[Env]]
Name = "HF_HUB_ENABLE_HF_TRANSFER"
Value = "0"