|
| 1 | +--- |
| 2 | +# Not used. Kept for compatibilty with Dockerfile. |
1 | 3 | version: 0.1.0
|
2 | 4 | spec:
|
3 |
| - units: |
4 |
| - - unit: &unitPassword |
5 |
| - name: "password" |
6 |
| - regex: "^[a-zA-Z]\\w{5,20}$" |
7 |
| - - unit: &unitDirectory |
8 |
| - name: "directory" |
9 |
| - regex: "^(.*)/?([^/]+)$" |
10 |
| - examples: |
11 |
| - - "/tmp/xyz" |
12 |
| - - unit: &unitMemory |
13 |
| - name: "memory" |
14 |
| - regex: "(^\\p{N}+)(?:\\s*)((?:b|k|m|g|t|p|kb|mb|gb|tb|pb)\\b$)" |
15 |
| - examples: |
16 |
| - - "1024b" |
17 |
| - - "1024kb" |
18 |
| - - "500m" |
19 |
| - - "1g" |
20 |
| - |
21 |
| -properties: |
22 |
| - - property: &sparkWorkerCores |
23 |
| - propertyNames: |
24 |
| - - name: "SPARK_WORKER_CORES" |
25 |
| - kind: |
26 |
| - type: "file" |
27 |
| - file: "spark-env.sh" |
28 |
| - datatype: |
29 |
| - type: "integer" |
30 |
| - roles: |
31 |
| - - name: "slave" |
32 |
| - required: false |
33 |
| - asOfVersion: "0.6.2" |
34 |
| - description: "Total number of cores to allow Spark jobs to use on the machine (default: all available cores)" |
35 |
| - |
36 |
| - - property: &sparkWorkerMemory |
37 |
| - propertyNames: |
38 |
| - - name: "SPARK_WORKER_MEMORY" |
39 |
| - kind: |
40 |
| - type: "file" |
41 |
| - file: "spark-env.sh" |
42 |
| - datatype: |
43 |
| - type: "string" |
44 |
| - unit: *unitMemory |
45 |
| - roles: |
46 |
| - - name: "slave" |
47 |
| - required: false |
48 |
| - asOfVersion: "0.6.2" |
49 |
| - description: "Total amount of memory to allow Spark jobs to use on the machine, e.g. 1000M, 2G (default: total memory minus 1 GB); note that each job's individual memory is configured using SPARK_MEM." |
50 |
| - |
51 |
| - - property: &sparkDaemonMemory |
52 |
| - propertyNames: |
53 |
| - - name: "SPARK_DAEMON_MEMORY" |
54 |
| - kind: |
55 |
| - type: "file" |
56 |
| - file: "spark-env.sh" |
57 |
| - datatype: |
58 |
| - type: "string" |
59 |
| - unit: *unitMemory |
60 |
| - defaultValues: |
61 |
| - - fromVersion: "0.6.2" |
62 |
| - to_version: "1.4.1" |
63 |
| - value: "512m" |
64 |
| - - fromVersion: "1.5.0" |
65 |
| - value: "1g" |
66 |
| - roles: |
67 |
| - - name: "master" |
68 |
| - required: false |
69 |
| - - name: "slave" |
70 |
| - required: false |
71 |
| - - name: "history-server" |
72 |
| - required: false |
73 |
| - asOfVersion: "0.6.2" |
74 |
| - description: "Memory to allocate to the Spark master and worker daemons themselves (default: 512m/1g)" |
75 |
| - |
76 |
| - - property: &sparkEventLogEnabled |
77 |
| - propertyNames: |
78 |
| - - name: "spark.eventLog.enabled" |
79 |
| - kind: |
80 |
| - type: "file" |
81 |
| - file: "spark-defaults.conf" |
82 |
| - datatype: |
83 |
| - type: "bool" |
84 |
| - defaultValues: |
85 |
| - - fromVersion: "1.0.0" |
86 |
| - value: "false" |
87 |
| - recommendedValues: |
88 |
| - - fromVersion: "1.0.0" |
89 |
| - value: "true" |
90 |
| - roles: |
91 |
| - - name: "master" |
92 |
| - required: false |
93 |
| - - name: "slave" |
94 |
| - required: false |
95 |
| - - name: "history-server" |
96 |
| - required: false |
97 |
| - asOfVersion: "1.0.0" |
98 |
| - description: "Whether to log Spark events, useful for reconstructing the Web UI after the application has finished." |
99 |
| - |
100 |
| - - property: &sparkEventLogDir |
101 |
| - propertyNames: |
102 |
| - - name: "spark.eventLog.dir" |
103 |
| - kind: |
104 |
| - type: "file" |
105 |
| - file: "spark-defaults.conf" |
106 |
| - datatype: |
107 |
| - type: "string" |
108 |
| - unit: *unitDirectory |
109 |
| - defaultValues: |
110 |
| - - fromVersion: "1.0.0" |
111 |
| - value: "/tmp/spark-events" |
112 |
| - recommendedValues: |
113 |
| - - fromVersion: "1.0.0" |
114 |
| - value: "/stackable/log" |
115 |
| - roles: |
116 |
| - - name: "master" |
117 |
| - required: false |
118 |
| - - name: "slave" |
119 |
| - required: false |
120 |
| - asOfVersion: "1.0.0" |
121 |
| - expandsTo: |
122 |
| - - property: *sparkEventLogEnabled |
123 |
| - value: "true" |
124 |
| - description: "Base directory in which Spark events are logged, if spark.eventLog.enabled is true. Within this base directory, Spark creates a sub-directory for each application, and logs the events specific to the application in this directory. Users may want to set this to a unified location like an HDFS directory so history files can be read by the history server." |
125 |
| - |
126 |
| - - property: &sparkHistoryLogDirectory |
127 |
| - propertyNames: |
128 |
| - - name: "spark.history.fs.logDirectory" |
129 |
| - kind: |
130 |
| - type: "file" |
131 |
| - file: "spark-defaults.conf" |
132 |
| - datatype: |
133 |
| - type: "string" |
134 |
| - unit: *unitDirectory |
135 |
| - defaultValues: |
136 |
| - - fromVersion: "1.1.0" |
137 |
| - value: "/tmp/spark-events" |
138 |
| - recommendedValues: |
139 |
| - - fromVersion: "1.0.0" |
140 |
| - value: "/stackable/log" |
141 |
| - roles: |
142 |
| - - name: "history-server" |
143 |
| - required: true |
144 |
| - expandsTo: |
145 |
| - - property: *sparkEventLogEnabled |
146 |
| - value: "true" |
147 |
| - asOfVersion: "1.1.0" |
148 |
| - description: "For the filesystem history provider, the URL to the directory containing application event logs to load. This can be a local file://path, an HDFS path hdfs://namenode/shared/spark-logs or that of an alternative filesystem supported by the Hadoop APIs." |
149 |
| - |
150 |
| - - property: &sparkHistoryStorePath |
151 |
| - propertyNames: |
152 |
| - - name: "spark.history.store.path" |
153 |
| - kind: |
154 |
| - type: "file" |
155 |
| - file: "spark-defaults.conf" |
156 |
| - datatype: |
157 |
| - type: "string" |
158 |
| - unit: *unitDirectory |
159 |
| - roles: |
160 |
| - - name: "history-server" |
161 |
| - required: false |
162 |
| - asOfVersion: "2.3.0" |
163 |
| - description: "Local directory where to cache application history data. If set, the history server will store application data on disk instead of keeping it in memory. The data written to disk will be re-used in the event of a history server restart." |
164 |
| - |
165 |
| - - property: &sparkAuthenticate |
166 |
| - propertyNames: |
167 |
| - - name: "spark.authenticate" |
168 |
| - kind: |
169 |
| - type: "file" |
170 |
| - file: "spark-defaults.conf" |
171 |
| - datatype: |
172 |
| - type: "bool" |
173 |
| - defaultValues: |
174 |
| - - fromVersion: "1.0.0" |
175 |
| - value: "false" |
176 |
| - recommendedValues: |
177 |
| - - fromVersion: "1.0.0" |
178 |
| - value: "true" |
179 |
| - roles: |
180 |
| - - name: "master" |
181 |
| - required: false |
182 |
| - - name: "slave" |
183 |
| - required: false |
184 |
| - - name: "history-server" |
185 |
| - required: false |
186 |
| - asOfVersion: "1.0.0" |
187 |
| - description: "Whether Spark authenticates its internal connections." |
188 |
| - |
189 |
| - - property: &sparkAuthenticateSecret |
190 |
| - propertyNames: |
191 |
| - - name: "spark.authenticate.secret" |
192 |
| - kind: |
193 |
| - type: "file" |
194 |
| - file: "spark-defaults.conf" |
195 |
| - datatype: |
196 |
| - type: "string" |
197 |
| - unit: *unitPassword |
198 |
| - recommendedValues: |
199 |
| - - fromVersion: "1.0.0" |
200 |
| - value: "secret" |
201 |
| - roles: |
202 |
| - - name: "master" |
203 |
| - required: false |
204 |
| - - name: "slave" |
205 |
| - required: false |
206 |
| - - name: "history-server" |
207 |
| - required: false |
208 |
| - asOfVersion: "1.0.0" |
209 |
| - expandsTo: |
210 |
| - - property: *sparkAuthenticate |
211 |
| - value: "true" |
212 |
| - description: "The secret key used in the authentication. SPARK_AUTHENTICATE must be set to true." |
213 |
| - |
214 |
| - - property: &sparkPortMaxRetries |
215 |
| - propertyNames: |
216 |
| - - name: "spark.port.maxRetries" |
217 |
| - kind: |
218 |
| - type: "file" |
219 |
| - file: "spark-defaults.conf" |
220 |
| - datatype: |
221 |
| - type: "integer" |
222 |
| - defaultValues: |
223 |
| - - fromVersion: "1.1.1" |
224 |
| - value: "16" |
225 |
| - recommendedValues: |
226 |
| - - fromVersion: "1.1.1" |
227 |
| - value: "0" |
228 |
| - roles: |
229 |
| - - name: "master" |
230 |
| - required: true |
231 |
| - - name: "slave" |
232 |
| - required: true |
233 |
| - - name: "history-server" |
234 |
| - required: true |
235 |
| - asOfVersion: "1.1.1" |
236 |
| - description: "Maximum number of retries when binding to a port before giving up. When a port is given a specific value (non 0), each subsequent retry will increment the port used in the previous attempt by 1 before retrying. This essentially allows it to try a range of ports from the start port specified to port + maxRetries." |
237 |
| - |
238 |
| - - property: &sparkNoDaemonize |
239 |
| - propertyNames: |
240 |
| - - name: "SPARK_NO_DAEMONIZE" |
241 |
| - kind: |
242 |
| - type: "env" |
243 |
| - datatype: |
244 |
| - type: "bool" |
245 |
| - recommendedValues: |
246 |
| - - fromVersion: "2.0.0" |
247 |
| - value: "true" |
248 |
| - roles: |
249 |
| - - name: "master" |
250 |
| - required: true |
251 |
| - - name: "slave" |
252 |
| - required: true |
253 |
| - - name: "history-server" |
254 |
| - required: true |
255 |
| - asOfVersion: "2.0.0" |
256 |
| - description: "Run spark processes in foreground if true. Useful for systemd (default: false)" |
257 |
| - |
258 |
| - - property: &sparkConfDir |
259 |
| - propertyNames: |
260 |
| - - name: "SPARK_CONF_DIR" |
261 |
| - kind: |
262 |
| - type: "env" |
263 |
| - datatype: |
264 |
| - type: "string" |
265 |
| - recommendedValues: |
266 |
| - - fromVersion: "1.1.1" |
267 |
| - value: "/stackable/config" |
268 |
| - roles: |
269 |
| - - name: "master" |
270 |
| - required: true |
271 |
| - - name: "slave" |
272 |
| - required: true |
273 |
| - - name: "history-server" |
274 |
| - required: true |
275 |
| - asOfVersion: "1.1.1" |
276 |
| - description: "To specify a different configuration directory other than the default “SPARK_HOME/conf”, you can set SPARK_CONF_DIR. Spark will use the the configuration files (spark-defaults.conf, spark-env.sh, log4j.properties, etc) from this directory." |
| 5 | + units: [] |
| 6 | +properties: [] |
0 commit comments