-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-compose.yml
101 lines (99 loc) · 4.09 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
version: '2'
services:
spark:
image: bitnami/spark:3.5
hostname: spark
container_name: spark_master
environment:
- SPARK_MODE=master
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
ports:
- '8080:8080'
volumes:
- D:/Archivos_uni/TFM/TFMDataLake/src/main/scala/org/tfmupm:/home/datalake
- D:/Archivos_uni/TFM/TFMDataLake/target:/home
spark-worker-1:
image: bitnami/spark:3.5
container_name: spark_worker_1
environment:
- SPARK_MODE=worker
- SPARK_MASTER_URL=spark://spark:7077
- SPARK_WORKER_MEMORY=1G
- SPARK_WORKER_CORES=1
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
volumes:
- D:/Archivos_uni/TFM/TFMDataLake/src/main/scala/org/tfmupm:/home/datalake
- D:/Archivos_uni/TFM/TFMDataLake/target:/home
spark-worker-2:
image: bitnami/spark:3.5
container_name: spark_worker_2
environment:
- SPARK_MODE=worker
- SPARK_MASTER_URL=spark://spark:7077
- SPARK_WORKER_MEMORY=1G
- SPARK_WORKER_CORES=1
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
volumes:
- D:/Archivos_uni/TFM/TFMDataLake/src/main/scala/org/tfmupm:/home/datalake
- D:/Archivos_uni/TFM/TFMDataLake/target:/home
spark-submit-kafkareaderwriter:
image: bitnami/spark:3.5
command: /opt/bitnami/spark/bin/spark-submit --class org.tfmupm.KafkaReaderWriterDocker --packages io.delta:delta-spark_2.12:3.1.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.0 --conf "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" --conf "spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog" /home/TFMDataLake-1.0-SNAPSHOT.jar
volumes:
- D:/Archivos_uni/TFM/TFMDataLake/src/main/scala/org/tfmupm:/home/datalake
- D:/Archivos_uni/TFM/TFMDataLake/target:/home
spark-submit-bronzetosilver:
image: bitnami/spark:3.5
command: /opt/bitnami/spark/bin/spark-submit --class org.tfmupm.BronzeToSilverDocker --packages io.delta:delta-spark_2.12:3.1.0 --conf "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" --conf "spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog" /home/TFMDataLake-1.0-SNAPSHOT.jar
volumes:
- D:/Archivos_uni/TFM/TFMDataLake/src/main/scala/org/tfmupm:/home/datalake
- D:/Archivos_uni/TFM/TFMDataLake/target:/home
depends_on:
- spark-submit-kafkareaderwriter
spark-submit-historicaldbtransformer:
image: bitnami/spark:3.5
command: /opt/bitnami/spark/bin/spark-submit --class org.tfmupm.SparkReadDocker --packages io.delta:delta-spark_2.12:3.1.0 --conf "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" --conf "spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog" /home/TFMDataLake-1.0-SNAPSHOT.jar
volumes:
- D:/Archivos_uni/TFM/TFMDataLake/src/main/scala/org/tfmupm:/home/datalake
- D:/Archivos_uni/TFM/TFMDataLake/target:/home
nifi:
image: apache/nifi:latest
environment:
- NIFI_WEB_HTTP_PORT=8443
ports:
- "8443:8443"
- "5050:5050"
zookeeper:
image: bitnami/zookeeper:latest
environment:
- ALLOW_ANONYMOUS_LOGIN=yes
ports:
- '2181:2181'
kafka:
image: wurstmeister/kafka:2.12-2.3.0
ports:
- "9092:9092"
environment:
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
- KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092
- KAFKA_LISTENERS= PLAINTEXT://kafka:9092
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
- KAFKA_CREATE_TOPICS=nifitopic:1:1,nificontinuous:1:1
depends_on:
- zookeeper
jupyter:
image: jupyter/base-notebook:latest
ports:
- "8888:8888"
volumes:
- D:/Archivos_uni/TFM/TFMDataLake/src/main/scala/org/tfmupm:/home/datalake
command: start-notebook.sh --NotebookApp.token=''