-
Notifications
You must be signed in to change notification settings - Fork 1
/
docker-compose.yaml
160 lines (151 loc) · 4.71 KB
/
docker-compose.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
version: "3.4"
x-spark-default-env: &spark-default-env
SPARK_NO_DAEMONIZE: "true"
SPARK_SCALA_VERSION: 2.13
x-spark-default: &spark-default
env_file: .env
volumes:
- ./conf/app.properties:/opt/spark/conf/app.properties
- ./conf/spark-defaults.conf:/opt/spark/conf/spark-defaults.conf
- ./conf/log4j2.properties:/opt/spark/conf/log4j2.properties
x-spark-worker-default-env: &spark-worker-default-env
<<: *spark-default-env
SPARK_MASTER: spark://spark-master:7077
SPARK_WORKER_DIR: /opt/spark/work-dir
x-spark-submit-default-env: &spark-submit-default-env
<<: *spark-default-env
SPARK_MASTER_HOST: spark-master
SPARK_MASTER_PORT: 7077
DEPLOY_MODE: cluster
services:
spark-master:
<<: *spark-default
image: mataelang/spark:3.3.1-scala2.13
ports:
- "8080:8080"
environment:
<<: *spark-default-env
SPARK_MASTER_HOST: spark-master
SPARK_MASTER_PORT: 7077
SPARK_MASTER_WEBUI_PORT: 8080
SPARK_DAEMON_MEMORY: 1g
command: >
bash -c "/opt/spark/sbin/start-master.sh"
deploy:
mode: replicated
replicas: 1
resources:
limits:
cpus: "1"
memory: 1G
reservations:
cpus: "1"
memory: 1G
spark-worker:
<<: *spark-default
image: mataelang/spark:3.3.1-scala2.13
depends_on:
- spark-master
# ports:
# - "8181-8182:8081"
# - "4041-4042:4040"
environment:
<<: *spark-worker-default-env
SPARK_WORKER_CORES: 2
SPARK_WORKER_MEMORY: 4G
command: >
bash -c "/opt/spark/sbin/start-worker.sh $$SPARK_MASTER"
deploy:
mode: replicated
replicas: 2
spark-historyserver:
<<: *spark-default
image: mataelang/spark:3.3.1-scala2.13
depends_on:
- spark-master
ports:
- target: 18080
published: 18080
protocol: tcp
mode: host
environment:
<<: *spark-default-env
SPARK_DAEMON_MEMORY: 1G
command: >
bash -c "/opt/spark/sbin/start-history-server.sh"
deploy:
mode: replicated
replicas: 1
resources:
limits:
cpus: "1"
memory: 1G
reservations:
cpus: "1"
memory: 1G
spark-submit-enrich:
<<: *spark-default
image: mataelang/spark:3.3.1-scala2.13
restart: "no"
depends_on:
- spark-master
- spark-worker
working_dir: /opt/spark
environment:
<<: *spark-submit-default-env
TOTAL_EXECUTOR_CORES: 1
SPARK_DRIVER_MEMORY: 1g
SPARK_EXECUTOR_CORES: 1
SPARK_EXECUTOR_MEMORY: 1g
SPARK_APP_UI: 4040
SPARK_APP_CLASSNAME: org.mataelang.kaspacore.jobs.SensorEnrichDataStreamJob
SPARK_APP_NAME: SensorEnrichDataStreamJob
command: >
bash -c "/opt/spark/bin/spark-submit \\
--class $$SPARK_APP_CLASSNAME \\
--name $$SPARK_APP_NAME \\
--total-executor-cores $$TOTAL_EXECUTOR_CORES \\
--conf spark.ui.port=$$SPARK_APP_UI \\
--conf spark.submit.deployMode=$$DEPLOY_MODE \\
--conf spark.driver.memory=$$SPARK_DRIVER_MEMORY \\
--conf spark.executor.cores=$$SPARK_EXECUTOR_CORES \\
--conf spark.executor.memory=$$SPARK_EXECUTOR_MEMORY \\
--conf spark.eventLog.dir=$$SPARK_EVENTLOG_DIR \\
--files conf/app.properties \\
$$SPARK_APP_JAR_PATH
"
spark-submit-aggr:
<<: *spark-default
image: mataelang/spark:3.3.1-scala2.13
restart: "no"
depends_on:
- spark-master
- spark-worker
working_dir: /opt/spark
environment:
<<: *spark-submit-default-env
TOTAL_EXECUTOR_CORES: 1
SPARK_DRIVER_MEMORY: 2g
SPARK_EXECUTOR_CORES: 1
SPARK_EXECUTOR_MEMORY: 2g
SPARK_SHUFFLE_PARTITION: 1
SPARK_APP_UI: 4041
SPARK_APP_CLASSNAME: org.mataelang.kaspacore.jobs.SensorAggregationStreamJob
SPARK_APP_NAME: SensorAggregationStreamJob
command: >
bash -c "/opt/spark/bin/spark-submit \\
--class $$SPARK_APP_CLASSNAME \\
--name $$SPARK_APP_NAME \\
--total-executor-cores $$TOTAL_EXECUTOR_CORES \\
--conf spark.ui.port=$$SPARK_APP_UI \\
--conf spark.submit.deployMode=$$DEPLOY_MODE \\
--conf spark.driver.memory=$$SPARK_DRIVER_MEMORY \\
--conf spark.executor.cores=$$SPARK_EXECUTOR_CORES \\
--conf spark.executor.memory=$$SPARK_EXECUTOR_MEMORY \\
--conf spark.eventLog.dir=$$SPARK_EVENTLOG_DIR \\
--conf spark.sql.shuffle.partitions=$$SPARK_SHUFFLE_PARTITION \\
--conf spark.sql.codegen.aggregate.map.twolevel.enabled=false \\
--conf spark.sql.streaming.metricsEnabled=true \\
--files conf/app.properties \\
$$SPARK_APP_JAR_PATH
"