forked from intel/ai-workflows
-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-compose.yml
70 lines (70 loc) · 2.59 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
services:
csv-to-parquet:
build:
args:
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
no_proxy: ${no_proxy}
dockerfile: Dockerfile.friesian-training
command: conda run -n bigdl --no-capture-output conda run -n bigdl --no-capture-output python3 csv_to_parquet.py --input /dataset/data-csv/day_0.csv --output /dataset/data-parquet/day_0.parquet
environment:
- DATASET_DIR=${DATASET_DIR}
- MODEL_OUTPUT=${MODEL_OUTPUT}
- http_proxy=${http_proxy}
- https_proxy=${https_proxy}
- no_proxy=${no_proxy}
image: ${FINAL_IMAGE_NAME}:training-ubuntu-20.04
privileged: true
volumes:
- ${DATASET_DIR}:/dataset
- ${MODEL_OUTPUT}:/model
- $PWD:/workspace
working_dir: /workspace/BigDL/python/friesian/example/wnd
preprocessing:
build:
args:
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
no_proxy: ${no_proxy}
dockerfile: Dockerfile.friesian-training
command: conda run -n bigdl --no-capture-output python wnd_preprocessing.py --executor_cores 36 --executor_memory 50g --days 0-0 --input_folder /dataset/data-parquet --output_folder /dataset/data-processed --frequency_limit 15 --cross_sizes 10000,10000
depends_on:
csv-to-parquet:
condition: service_completed_successfully
environment:
- DATASET_DIR=${DATASET_DIR}
- MODEL_OUTPUT=${MODEL_OUTPUT}
- http_proxy=${http_proxy}
- https_proxy=${https_proxy}
- no_proxy=${no_proxy}
image: ${FINAL_IMAGE_NAME}:training-ubuntu-20.04
privileged: true
volumes:
- ${DATASET_DIR}:/dataset
- ${MODEL_OUTPUT}:/model
- $PWD:/workspace
working_dir: /workspace/BigDL/python/friesian/example/wnd
friesian-training:
build:
args:
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
no_proxy: ${no_proxy}
dockerfile: Dockerfile.friesian-training
command: conda run -n bigdl --no-capture-output python wnd_train.py --executor_cores 36 --executor_memory 50g --data_dir /dataset/data-processed --model_dir /model
depends_on:
preprocessing:
condition: service_completed_successfully
environment:
- DATASET_DIR=${DATASET_DIR}
- MODEL_OUTPUT=${MODEL_OUTPUT}
- http_proxy=${http_proxy}
- https_proxy=${https_proxy}
- no_proxy=${no_proxy}
image: ${FINAL_IMAGE_NAME}:training-ubuntu-20.04
privileged: true
volumes:
- ${DATASET_DIR}:/dataset
- ${MODEL_OUTPUT}:/model
- $PWD:/workspace
working_dir: /workspace/BigDL/python/friesian/example/wnd