Skip to content

Commit e9e4660

Browse files
committed
Bump image version to 3.3.0-stackable0.2.0 (#145)
# Description Fixes stackabletech/docker-images#172 Tests passed on gke `v1.22.12-gke.300`
1 parent 5fe5be4 commit e9e4660

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+59
-491
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,12 @@ All notable changes to this project will be documented in this file.
44

55
## [Unreleased]
66

7+
### Changed
8+
9+
- Bumped image to `3.3.0-stackable0.2.0` in tests and docs ([#145])
10+
11+
[#145]: https://github.com/stackabletech/spark-k8s-operator/pull/145
12+
713
## [0.5.0] - 2022-09-06
814

915
### Added

docs/modules/ROOT/examples/example-encapsulated.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ metadata:
55
name: spark-pi
66
spec:
77
version: "1.0"
8-
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0 # <1>
8+
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0 # <1>
99
mode: cluster
1010
mainClass: org.apache.spark.examples.SparkPi
1111
mainApplicationFile: /stackable/spark/examples/jars/spark-examples_2.12-3.3.0.jar # <2>

docs/modules/ROOT/examples/example-sparkapp-configmap.yaml

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -6,38 +6,29 @@ metadata:
66
namespace: default
77
spec:
88
version: "1.0"
9-
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0
9+
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0
1010
mode: cluster
1111
mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.1.0.jar # <3>
1212
mainClass: tech.stackable.demo.spark.NYTLCReport
1313
volumes:
14-
- name: job-deps
15-
persistentVolumeClaim:
16-
claimName: pvc-ksv
1714
- name: cm-job-arguments
1815
configMap:
1916
name: cm-job-arguments # <4>
2017
args:
2118
- "--input /arguments/job-args.txt" # <5>
2219
sparkConf:
2320
"spark.hadoop.fs.s3a.aws.credentials.provider": "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider"
24-
"spark.driver.extraClassPath": "/dependencies/jars/*"
25-
"spark.executor.extraClassPath": "/dependencies/jars/*"
2621
driver:
2722
cores: 1
2823
coreLimit: "1200m"
2924
memory: "512m"
3025
volumeMounts:
31-
- name: job-deps
32-
mountPath: /dependencies
3326
- name: cm-job-arguments # <6>
3427
mountPath: /arguments # <7>
3528
executor:
3629
cores: 1
3730
instances: 3
3831
memory: "512m"
3932
volumeMounts:
40-
- name: job-deps
41-
mountPath: /dependencies
4233
- name: cm-job-arguments # <6>
4334
mountPath: /arguments # <7>

docs/modules/ROOT/examples/example-sparkapp-external-dependencies.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ metadata:
66
namespace: default
77
spec:
88
version: "1.0"
9-
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0
9+
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0
1010
mode: cluster
1111
mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny_tlc_report.py # <1>
1212
args:

docs/modules/ROOT/examples/example-sparkapp-image.yaml

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ metadata:
77
spec:
88
version: "1.0"
99
image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0 # <1>
10-
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0
10+
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0
1111
mode: cluster
1212
mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py # <2>
1313
args:
@@ -17,23 +17,11 @@ spec:
1717
- tabulate==0.8.9 # <4>
1818
sparkConf: # <5>
1919
"spark.hadoop.fs.s3a.aws.credentials.provider": "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider"
20-
"spark.driver.extraClassPath": "/dependencies/jars/*"
21-
"spark.executor.extraClassPath": "/dependencies/jars/*"
22-
volumes:
23-
- name: job-deps # <6>
24-
persistentVolumeClaim:
25-
claimName: pvc-ksv
2620
driver:
2721
cores: 1
2822
coreLimit: "1200m"
2923
memory: "512m"
30-
volumeMounts:
31-
- name: job-deps
32-
mountPath: /dependencies # <7>
3324
executor:
3425
cores: 1
3526
instances: 3
3627
memory: "512m"
37-
volumeMounts:
38-
- name: job-deps
39-
mountPath: /dependencies # <7>

docs/modules/ROOT/examples/example-sparkapp-pvc.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ metadata:
66
namespace: default
77
spec:
88
version: "1.0"
9-
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0
9+
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0
1010
mode: cluster
1111
mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.0-SNAPSHOT.jar # <1>
1212
mainClass: org.example.App # <2>

docs/modules/ROOT/examples/example-sparkapp-s3-private.yaml

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ metadata:
55
name: example-sparkapp-s3-private
66
spec:
77
version: "1.0"
8-
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0
8+
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0
99
mode: cluster
1010
mainApplicationFile: s3a://my-bucket/spark-examples_2.12-3.3.0.jar # <1>
1111
mainClass: org.apache.spark.examples.SparkPi # <2>
@@ -23,21 +23,11 @@ spec:
2323
spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider" # <6>
2424
spark.driver.extraClassPath: "/dependencies/jars/hadoop-aws-3.2.0.jar:/dependencies/jars/aws-java-sdk-bundle-1.11.375.jar"
2525
spark.executor.extraClassPath: "/dependencies/jars/hadoop-aws-3.2.0.jar:/dependencies/jars/aws-java-sdk-bundle-1.11.375.jar"
26-
volumes:
27-
- name: spark-pi-deps # <7>
28-
persistentVolumeClaim:
29-
claimName: spark-pi-pvc
3026
driver:
3127
cores: 1
3228
coreLimit: "1200m"
3329
memory: "512m"
34-
volumeMounts:
35-
- name: spark-pi-deps
36-
mountPath: /dependencies # <8>
3730
executor:
3831
cores: 1
3932
instances: 3
4033
memory: "512m"
41-
volumeMounts:
42-
- name: spark-pi-deps
43-
mountPath: /dependencies # <8>

docs/modules/ROOT/pages/usage.adoc

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,6 @@ include::example$example-sparkapp-image.yaml[]
4242
<3> Job argument (external)
4343
<4> List of python job requirements: these will be installed in the pods via `pip`
4444
<5> Spark dependencies: the credentials provider (the user knows what is relevant here) plus dependencies needed to access external resources (in this case, in an S3 store)
45-
<6> the name of the volume mount backed by a `PersistentVolumeClaim` that must be pre-existing
46-
<7> the path on the volume mount: this is referenced in the `sparkConf` section where the extra class path is defined for the driver and executors
4745

4846
=== JVM (Scala): externally located artifact and dataset
4947

@@ -71,8 +69,6 @@ include::example$example-sparkapp-s3-private.yaml[]
7169
<4> Credentials referencing a secretClass (not shown in is example)
7270
<5> Spark dependencies: the credentials provider (the user knows what is relevant here) plus dependencies needed to access external resources...
7371
<6> ...in this case, in an S3 store, accessed with the credentials defined in the secret
74-
<7> the name of the volume mount backed by a `PersistentVolumeClaim` that must be pre-existing
75-
<8> the path on the volume mount: this is referenced in the `sparkConf` section where the extra class path is defined for the driver and executors
7672

7773
=== JVM (Scala): externally located artifact accessed with job arguments provided via configuration map
7874

@@ -174,7 +170,7 @@ Below are listed the CRD fields that can be defined by the user:
174170
|User-supplied image containing spark-job dependencies that will be copied to the specified volume mount
175171

176172
|`spec.sparkImage`
177-
| Spark image which will be deployed to driver and executor pods, which must contain spark environment needed by the job e.g. `docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0`
173+
| Spark image which will be deployed to driver and executor pods, which must contain spark environment needed by the job e.g. `docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0`
178174

179175
|`spec.sparkImagePullPolicy`
180176
| Optional Enum (one of `Always`, `IfNotPresent` or `Never`) that determines the pull policy of the spark job image

docs/modules/getting_started/examples/code/getting_started.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ metadata:
5353
namespace: default
5454
spec:
5555
version: "1.0"
56-
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0
56+
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0
5757
mode: cluster
5858
mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py
5959
driver:

docs/modules/getting_started/examples/code/getting_started.sh.j2

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ metadata:
5353
namespace: default
5454
spec:
5555
version: "1.0"
56-
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0
56+
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0
5757
mode: cluster
5858
mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py
5959
driver:

examples/ny-tlc-report-external-dependencies.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ metadata:
66
namespace: default
77
spec:
88
version: "1.0"
9-
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0
9+
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0
1010
# Always | IfNotPresent | Never
1111
sparkImagePullPolicy: IfNotPresent
1212
mode: cluster

examples/ny-tlc-report-image.yaml

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ spec:
88
version: "1.0"
99
# everything under /jobs will be copied to /stackable/spark/jobs
1010
image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0
11-
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0
11+
sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0
1212
sparkImagePullPolicy: IfNotPresent
1313
mode: cluster
1414
mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py
@@ -27,23 +27,11 @@ spec:
2727
accessStyle: Path
2828
sparkConf:
2929
spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider"
30-
spark.driver.extraClassPath: "/dependencies/jars/*"
31-
spark.executor.extraClassPath: "/dependencies/jars/*"
32-
volumes:
33-
- name: job-deps
34-
persistentVolumeClaim:
35-
claimName: pvc-ksv
3630
driver:
3731
cores: 1
3832
coreLimit: "1200m"
3933
memory: "512m"
40-
volumeMounts:
41-
- name: job-deps
42-
mountPath: /dependencies
4334
executor:
4435
cores: 1
4536
instances: 3
4637
memory: "512m"
47-
volumeMounts:
48-
- name: job-deps
49-
mountPath: /dependencies

examples/ny-tlc-report.yaml

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -11,17 +11,13 @@ apiVersion: spark.stackable.tech/v1alpha1
1111
kind: SparkApplication
1212
metadata:
1313
name: spark-ny-cm
14-
namespace: default
1514
spec:
1615
version: "1.0"
17-
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0
16+
sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0
1817
mode: cluster
1918
mainApplicationFile: s3a://my-bucket/ny-tlc-report-1.1.0-3.3.0.jar
2019
mainClass: tech.stackable.demo.spark.NYTLCReport
2120
volumes:
22-
- name: job-deps
23-
persistentVolumeClaim:
24-
claimName: pvc-ksv
2521
- name: cm-job-arguments
2622
configMap:
2723
name: cm-job-arguments
@@ -37,23 +33,17 @@ spec:
3733
accessStyle: Path
3834
sparkConf:
3935
spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider"
40-
spark.driver.extraClassPath: "/dependencies/jars/*"
41-
spark.executor.extraClassPath: "/dependencies/jars/*"
4236
driver:
4337
cores: 1
4438
coreLimit: "1200m"
4539
memory: "512m"
4640
volumeMounts:
47-
- name: job-deps
48-
mountPath: /dependencies
4941
- name: cm-job-arguments
5042
mountPath: /arguments
5143
executor:
5244
cores: 1
5345
instances: 3
5446
memory: "512m"
5547
volumeMounts:
56-
- name: job-deps
57-
mountPath: /dependencies
5848
- name: cm-job-arguments
5949
mountPath: /arguments

tests/templates/kuttl/pyspark-ny-public-s3-image/00-assert.yaml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
---
22
apiVersion: kuttl.dev/v1beta1
33
kind: TestAssert
4-
metadata:
5-
name: minio
64
timeout: 900
75
---
86
apiVersion: apps/v1

tests/templates/kuttl/pyspark-ny-public-s3-image/02-assert.yaml

Lines changed: 0 additions & 13 deletions
This file was deleted.

tests/templates/kuttl/pyspark-ny-public-s3-image/02-deps-volume.yaml

Lines changed: 0 additions & 52 deletions
This file was deleted.

tests/templates/kuttl/pyspark-ny-public-s3-image/10-assert.yaml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
---
22
apiVersion: kuttl.dev/v1beta1
33
kind: TestAssert
4-
metadata:
5-
name: pyspark-ny-public-s3-image
64
timeout: 900
75
---
86
# The Job starting the whole process

tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,23 +26,11 @@ spec:
2626
accessStyle: Path
2727
sparkConf:
2828
spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider"
29-
spark.driver.extraClassPath: "/dependencies/jars/*"
30-
spark.executor.extraClassPath: "/dependencies/jars/*"
31-
volumes:
32-
- name: job-deps
33-
persistentVolumeClaim:
34-
claimName: pyspark-ny-pvc
3529
driver:
3630
cores: 1
3731
coreLimit: "1200m"
3832
memory: "512m"
39-
volumeMounts:
40-
- name: job-deps
41-
mountPath: /dependencies/jars
4233
executor:
4334
cores: 1
4435
instances: 3
4536
memory: "512m"
46-
volumeMounts:
47-
- name: job-deps
48-
mountPath: /dependencies/jars

tests/templates/kuttl/pyspark-ny-public-s3/00-assert.yaml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
---
22
apiVersion: kuttl.dev/v1beta1
33
kind: TestAssert
4-
metadata:
5-
name: minio
64
timeout: 900
75
---
86
apiVersion: apps/v1

0 commit comments

Comments
 (0)