@@ -20,30 +20,34 @@ customConfig:
20
20
type: vector
21
21
version: "2"
22
22
transforms:
23
+ validEvents:
24
+ type: filter
25
+ inputs: [vector]
26
+ condition: is_null(.errors)
23
27
24
28
# SparkHistoryServer spark-history
25
29
26
30
filteredSparkHistoryAutomaticLogConfigSparkHistory:
27
31
type: filter
28
- inputs: [vector ]
32
+ inputs: [validEvents ]
29
33
condition: >-
30
34
.pod == "spark-history-node-automatic-log-config-0" &&
31
35
.container == "spark-history"
32
36
filteredSparkHistoryAutomaticLogConfigVector:
33
37
type: filter
34
- inputs: [vector ]
38
+ inputs: [validEvents ]
35
39
condition: >-
36
40
.pod == "spark-history-node-automatic-log-config-0" &&
37
41
.container == "vector"
38
42
filteredSparkHistoryCustomLogConfigSparkHistory:
39
43
type: filter
40
- inputs: [vector ]
44
+ inputs: [validEvents ]
41
45
condition: >-
42
46
.pod == "spark-history-node-custom-log-config-0" &&
43
47
.container == "spark-history"
44
48
filteredSparkHistoryCustomLogConfigVector:
45
49
type: filter
46
- inputs: [vector ]
50
+ inputs: [validEvents ]
47
51
condition: >-
48
52
.pod == "spark-history-node-custom-log-config-0" &&
49
53
.container == "vector"
@@ -52,42 +56,42 @@ customConfig:
52
56
53
57
filteredSparkAutomaticLogConfigDriverSpark:
54
58
type: filter
55
- inputs: [vector ]
59
+ inputs: [validEvents ]
56
60
condition: >-
57
61
.cluster == "spark-automatic-log-config" &&
58
62
ends_with(string!(.pod), "-driver") &&
59
63
.container == "spark"
60
64
filteredSparkAutomaticLogConfigDriverJob:
61
65
type: filter
62
- inputs: [vector ]
66
+ inputs: [validEvents ]
63
67
condition: >-
64
68
.cluster == "spark-automatic-log-config" &&
65
69
ends_with(string!(.pod), "-driver") &&
66
70
.container == "job"
67
71
filteredSparkAutomaticLogConfigDriverVector:
68
72
type: filter
69
- inputs: [vector ]
73
+ inputs: [validEvents ]
70
74
condition: >-
71
75
.cluster == "spark-automatic-log-config" &&
72
76
ends_with(string!(.pod), "-driver") &&
73
77
.container == "vector"
74
78
filteredSparkAutomaticLogConfigExecutorSpark:
75
79
type: filter
76
- inputs: [vector ]
80
+ inputs: [validEvents ]
77
81
condition: >-
78
82
.cluster == "spark-automatic-log-config" &&
79
83
ends_with(string!(.pod), "-exec-1") &&
80
84
.container == "spark"
81
85
filteredSparkAutomaticLogConfigExecutorJob:
82
86
type: filter
83
- inputs: [vector ]
87
+ inputs: [validEvents ]
84
88
condition: >-
85
89
.cluster == "spark-automatic-log-config" &&
86
90
ends_with(string!(.pod), "-exec-1") &&
87
91
.container == "job"
88
92
filteredSparkAutomaticLogConfigExecutorVector:
89
93
type: filter
90
- inputs: [vector ]
94
+ inputs: [validEvents ]
91
95
condition: >-
92
96
.cluster == "spark-automatic-log-config" &&
93
97
ends_with(string!(.pod), "-exec-1") &&
@@ -97,42 +101,42 @@ customConfig:
97
101
98
102
filteredSparkCustomLogConfigDriverSpark:
99
103
type: filter
100
- inputs: [vector ]
104
+ inputs: [validEvents ]
101
105
condition: >-
102
106
.cluster == "spark-custom-log-config" &&
103
107
ends_with(string!(.pod), "-driver") &&
104
108
.container == "spark"
105
109
filteredSparkCustomLogConfigDriverJob:
106
110
type: filter
107
- inputs: [vector ]
111
+ inputs: [validEvents ]
108
112
condition: >-
109
113
.cluster == "spark-custom-log-config" &&
110
114
ends_with(string!(.pod), "-driver") &&
111
115
.container == "job"
112
116
filteredSparkCustomLogConfigDriverVector:
113
117
type: filter
114
- inputs: [vector ]
118
+ inputs: [validEvents ]
115
119
condition: >-
116
120
.cluster == "spark-custom-log-config" &&
117
121
ends_with(string!(.pod), "-driver") &&
118
122
.container == "vector"
119
123
filteredSparkCustomLogConfigExecutorSpark:
120
124
type: filter
121
- inputs: [vector ]
125
+ inputs: [validEvents ]
122
126
condition: >-
123
127
.cluster == "spark-custom-log-config" &&
124
128
ends_with(string!(.pod), "-exec-1") &&
125
129
.container == "spark"
126
130
filteredSparkCustomLogConfigExecutorJob:
127
131
type: filter
128
- inputs: [vector ]
132
+ inputs: [validEvents ]
129
133
condition: >-
130
134
.cluster == "spark-custom-log-config" &&
131
135
ends_with(string!(.pod), "-exec-1") &&
132
136
.container == "job"
133
137
filteredSparkCustomLogConfigExecutorVector:
134
138
type: filter
135
- inputs: [vector ]
139
+ inputs: [validEvents ]
136
140
condition: >-
137
141
.cluster == "spark-custom-log-config" &&
138
142
ends_with(string!(.pod), "-exec-1") &&
@@ -142,42 +146,42 @@ customConfig:
142
146
143
147
filteredPysparkAutomaticLogConfigDriverSpark:
144
148
type: filter
145
- inputs: [vector ]
149
+ inputs: [validEvents ]
146
150
condition: >-
147
151
.cluster == "pyspark-automatic-log-config" &&
148
152
ends_with(string!(.pod), "-driver") &&
149
153
.container == "spark"
150
154
filteredPysparkAutomaticLogConfigDriverRequirements:
151
155
type: filter
152
- inputs: [vector ]
156
+ inputs: [validEvents ]
153
157
condition: >-
154
158
.cluster == "pyspark-automatic-log-config" &&
155
159
ends_with(string!(.pod), "-driver") &&
156
160
.container == "requirements"
157
161
filteredPysparkAutomaticLogConfigDriverVector:
158
162
type: filter
159
- inputs: [vector ]
163
+ inputs: [validEvents ]
160
164
condition: >-
161
165
.cluster == "pyspark-automatic-log-config" &&
162
166
ends_with(string!(.pod), "-driver") &&
163
167
.container == "vector"
164
168
filteredPysparkAutomaticLogConfigExecutorSpark:
165
169
type: filter
166
- inputs: [vector ]
170
+ inputs: [validEvents ]
167
171
condition: >-
168
172
.cluster == "pyspark-automatic-log-config" &&
169
173
ends_with(string!(.pod), "-exec-1") &&
170
174
.container == "spark"
171
175
filteredPysparkAutomaticLogConfigExecutorRequirements:
172
176
type: filter
173
- inputs: [vector ]
177
+ inputs: [validEvents ]
174
178
condition: >-
175
179
.cluster == "pyspark-automatic-log-config" &&
176
180
ends_with(string!(.pod), "-exec-1") &&
177
181
.container == "requirements"
178
182
filteredPysparkAutomaticLogConfigExecutorVector:
179
183
type: filter
180
- inputs: [vector ]
184
+ inputs: [validEvents ]
181
185
condition: >-
182
186
.cluster == "pyspark-automatic-log-config" &&
183
187
ends_with(string!(.pod), "-exec-1") &&
@@ -187,42 +191,42 @@ customConfig:
187
191
188
192
filteredPysparkCustomLogConfigDriverSpark:
189
193
type: filter
190
- inputs: [vector ]
194
+ inputs: [validEvents ]
191
195
condition: >-
192
196
.cluster == "pyspark-custom-log-config" &&
193
197
ends_with(string!(.pod), "-driver") &&
194
198
.container == "spark"
195
199
filteredPysparkCustomLogConfigDriverRequirements:
196
200
type: filter
197
- inputs: [vector ]
201
+ inputs: [validEvents ]
198
202
condition: >-
199
203
.cluster == "pyspark-custom-log-config" &&
200
204
ends_with(string!(.pod), "-driver") &&
201
205
.container == "requirements"
202
206
filteredPysparkCustomLogConfigDriverVector:
203
207
type: filter
204
- inputs: [vector ]
208
+ inputs: [validEvents ]
205
209
condition: >-
206
210
.cluster == "pyspark-custom-log-config" &&
207
211
ends_with(string!(.pod), "-driver") &&
208
212
.container == "vector"
209
213
filteredPysparkCustomLogConfigExecutorSpark:
210
214
type: filter
211
- inputs: [vector ]
215
+ inputs: [validEvents ]
212
216
condition: >-
213
217
.cluster == "pyspark-custom-log-config" &&
214
218
ends_with(string!(.pod), "-exec-1") &&
215
219
.container == "spark"
216
220
filteredPysparkCustomLogConfigExecutorRequirements:
217
221
type: filter
218
- inputs: [vector ]
222
+ inputs: [validEvents ]
219
223
condition: >-
220
224
.cluster == "pyspark-custom-log-config" &&
221
225
ends_with(string!(.pod), "-exec-1") &&
222
226
.container == "requirements"
223
227
filteredPysparkCustomLogConfigExecutorVector:
224
228
type: filter
225
- inputs: [vector ]
229
+ inputs: [validEvents ]
226
230
condition: >-
227
231
.cluster == "pyspark-custom-log-config" &&
228
232
ends_with(string!(.pod), "-exec-1") &&
@@ -239,15 +243,16 @@ customConfig:
239
243
is_null(.logger) ||
240
244
is_null(.message)
241
245
sinks:
242
- out :
246
+ test :
243
247
inputs: [filtered*]
248
+ type: blackhole
244
249
{% if lookup ('env' , 'VECTOR_AGGREGATOR' ) %}
250
+ aggregator:
251
+ inputs: [vector]
245
252
type: vector
246
253
address: {{ lookup('env', 'VECTOR_AGGREGATOR') }}
247
254
buffer:
248
255
# Avoid back pressure from VECTOR_AGGREGATOR. The test should
249
256
# not fail if the aggregator is not available.
250
257
when_full: drop_newest
251
- {% else %}
252
- type: blackhole
253
258
{% endif %}
0 commit comments