diff --git a/CHANGELOG.md b/CHANGELOG.md index 6aa44079..4ab17f12 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ All notable changes to this project will be documented in this file. ## [Unreleased] +### Changed + +- Bumped image to `3.3.0-stackable0.2.0` in tests and docs ([#145]) + +[#145]: https://github.com/stackabletech/spark-k8s-operator/pull/145 + ## [0.5.0] - 2022-09-06 ### Added diff --git a/docs/modules/ROOT/examples/example-encapsulated.yaml b/docs/modules/ROOT/examples/example-encapsulated.yaml index aa18e940..5e39020b 100644 --- a/docs/modules/ROOT/examples/example-encapsulated.yaml +++ b/docs/modules/ROOT/examples/example-encapsulated.yaml @@ -5,7 +5,7 @@ metadata: name: spark-pi spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0 # <1> + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0 # <1> mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: /stackable/spark/examples/jars/spark-examples_2.12-3.3.0.jar # <2> diff --git a/docs/modules/ROOT/examples/example-sparkapp-configmap.yaml b/docs/modules/ROOT/examples/example-sparkapp-configmap.yaml index 6f7cbdaa..24652fa0 100644 --- a/docs/modules/ROOT/examples/example-sparkapp-configmap.yaml +++ b/docs/modules/ROOT/examples/example-sparkapp-configmap.yaml @@ -6,14 +6,11 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.1.0.jar # <3> mainClass: tech.stackable.demo.spark.NYTLCReport volumes: - - name: job-deps - persistentVolumeClaim: - claimName: pvc-ksv - name: cm-job-arguments configMap: name: cm-job-arguments # <4> @@ -21,15 +18,11 @@ spec: - "--input /arguments/job-args.txt" # <5> sparkConf: "spark.hadoop.fs.s3a.aws.credentials.provider": "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - "spark.driver.extraClassPath": "/dependencies/jars/*" - "spark.executor.extraClassPath": "/dependencies/jars/*" driver: cores: 1 coreLimit: "1200m" memory: "512m" volumeMounts: - - name: job-deps - mountPath: /dependencies - name: cm-job-arguments # <6> mountPath: /arguments # <7> executor: @@ -37,7 +30,5 @@ spec: instances: 3 memory: "512m" volumeMounts: - - name: job-deps - mountPath: /dependencies - name: cm-job-arguments # <6> mountPath: /arguments # <7> diff --git a/docs/modules/ROOT/examples/example-sparkapp-external-dependencies.yaml b/docs/modules/ROOT/examples/example-sparkapp-external-dependencies.yaml index 4898571e..57952a04 100644 --- a/docs/modules/ROOT/examples/example-sparkapp-external-dependencies.yaml +++ b/docs/modules/ROOT/examples/example-sparkapp-external-dependencies.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny_tlc_report.py # <1> args: diff --git a/docs/modules/ROOT/examples/example-sparkapp-image.yaml b/docs/modules/ROOT/examples/example-sparkapp-image.yaml index 8985a5a6..73bc79ab 100644 --- a/docs/modules/ROOT/examples/example-sparkapp-image.yaml +++ b/docs/modules/ROOT/examples/example-sparkapp-image.yaml @@ -7,7 +7,7 @@ metadata: spec: version: "1.0" image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0 # <1> - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py # <2> args: @@ -17,23 +17,11 @@ spec: - tabulate==0.8.9 # <4> sparkConf: # <5> "spark.hadoop.fs.s3a.aws.credentials.provider": "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - "spark.driver.extraClassPath": "/dependencies/jars/*" - "spark.executor.extraClassPath": "/dependencies/jars/*" - volumes: - - name: job-deps # <6> - persistentVolumeClaim: - claimName: pvc-ksv driver: cores: 1 coreLimit: "1200m" memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies # <7> executor: cores: 1 instances: 3 memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies # <7> diff --git a/docs/modules/ROOT/examples/example-sparkapp-pvc.yaml b/docs/modules/ROOT/examples/example-sparkapp-pvc.yaml index 25559184..ee221561 100644 --- a/docs/modules/ROOT/examples/example-sparkapp-pvc.yaml +++ b/docs/modules/ROOT/examples/example-sparkapp-pvc.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.0-SNAPSHOT.jar # <1> mainClass: org.example.App # <2> diff --git a/docs/modules/ROOT/examples/example-sparkapp-s3-private.yaml b/docs/modules/ROOT/examples/example-sparkapp-s3-private.yaml index da0a06f6..d5b7ae5b 100644 --- a/docs/modules/ROOT/examples/example-sparkapp-s3-private.yaml +++ b/docs/modules/ROOT/examples/example-sparkapp-s3-private.yaml @@ -5,7 +5,7 @@ metadata: name: example-sparkapp-s3-private spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: s3a://my-bucket/spark-examples_2.12-3.3.0.jar # <1> mainClass: org.apache.spark.examples.SparkPi # <2> @@ -23,21 +23,11 @@ spec: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider" # <6> spark.driver.extraClassPath: "/dependencies/jars/hadoop-aws-3.2.0.jar:/dependencies/jars/aws-java-sdk-bundle-1.11.375.jar" spark.executor.extraClassPath: "/dependencies/jars/hadoop-aws-3.2.0.jar:/dependencies/jars/aws-java-sdk-bundle-1.11.375.jar" - volumes: - - name: spark-pi-deps # <7> - persistentVolumeClaim: - claimName: spark-pi-pvc driver: cores: 1 coreLimit: "1200m" memory: "512m" - volumeMounts: - - name: spark-pi-deps - mountPath: /dependencies # <8> executor: cores: 1 instances: 3 memory: "512m" - volumeMounts: - - name: spark-pi-deps - mountPath: /dependencies # <8> diff --git a/docs/modules/ROOT/pages/usage.adoc b/docs/modules/ROOT/pages/usage.adoc index fe0b5c6c..fbcf591a 100644 --- a/docs/modules/ROOT/pages/usage.adoc +++ b/docs/modules/ROOT/pages/usage.adoc @@ -42,8 +42,6 @@ include::example$example-sparkapp-image.yaml[] <3> Job argument (external) <4> List of python job requirements: these will be installed in the pods via `pip` <5> Spark dependencies: the credentials provider (the user knows what is relevant here) plus dependencies needed to access external resources (in this case, in an S3 store) -<6> the name of the volume mount backed by a `PersistentVolumeClaim` that must be pre-existing -<7> the path on the volume mount: this is referenced in the `sparkConf` section where the extra class path is defined for the driver and executors === JVM (Scala): externally located artifact and dataset @@ -71,8 +69,6 @@ include::example$example-sparkapp-s3-private.yaml[] <4> Credentials referencing a secretClass (not shown in is example) <5> Spark dependencies: the credentials provider (the user knows what is relevant here) plus dependencies needed to access external resources... <6> ...in this case, in an S3 store, accessed with the credentials defined in the secret -<7> the name of the volume mount backed by a `PersistentVolumeClaim` that must be pre-existing -<8> the path on the volume mount: this is referenced in the `sparkConf` section where the extra class path is defined for the driver and executors === JVM (Scala): externally located artifact accessed with job arguments provided via configuration map @@ -174,7 +170,7 @@ Below are listed the CRD fields that can be defined by the user: |User-supplied image containing spark-job dependencies that will be copied to the specified volume mount |`spec.sparkImage` -| Spark image which will be deployed to driver and executor pods, which must contain spark environment needed by the job e.g. `docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0` +| Spark image which will be deployed to driver and executor pods, which must contain spark environment needed by the job e.g. `docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0` |`spec.sparkImagePullPolicy` | Optional Enum (one of `Always`, `IfNotPresent` or `Never`) that determines the pull policy of the spark job image diff --git a/docs/modules/getting_started/examples/code/getting_started.sh b/docs/modules/getting_started/examples/code/getting_started.sh index 8e263de8..c7dd01b2 100755 --- a/docs/modules/getting_started/examples/code/getting_started.sh +++ b/docs/modules/getting_started/examples/code/getting_started.sh @@ -53,7 +53,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py driver: diff --git a/docs/modules/getting_started/examples/code/getting_started.sh.j2 b/docs/modules/getting_started/examples/code/getting_started.sh.j2 index db56695d..1647b1c8 100755 --- a/docs/modules/getting_started/examples/code/getting_started.sh.j2 +++ b/docs/modules/getting_started/examples/code/getting_started.sh.j2 @@ -53,7 +53,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py driver: diff --git a/examples/ny-tlc-report-external-dependencies.yaml b/examples/ny-tlc-report-external-dependencies.yaml index 60921759..3271b2c7 100644 --- a/examples/ny-tlc-report-external-dependencies.yaml +++ b/examples/ny-tlc-report-external-dependencies.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0 # Always | IfNotPresent | Never sparkImagePullPolicy: IfNotPresent mode: cluster diff --git a/examples/ny-tlc-report-image.yaml b/examples/ny-tlc-report-image.yaml index 198631b7..a402f3c3 100644 --- a/examples/ny-tlc-report-image.yaml +++ b/examples/ny-tlc-report-image.yaml @@ -8,7 +8,7 @@ spec: version: "1.0" # everything under /jobs will be copied to /stackable/spark/jobs image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0 - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.2.0 sparkImagePullPolicy: IfNotPresent mode: cluster mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py @@ -27,23 +27,11 @@ spec: accessStyle: Path sparkConf: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - spark.driver.extraClassPath: "/dependencies/jars/*" - spark.executor.extraClassPath: "/dependencies/jars/*" - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: pvc-ksv driver: cores: 1 coreLimit: "1200m" memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies executor: cores: 1 instances: 3 memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies diff --git a/examples/ny-tlc-report.yaml b/examples/ny-tlc-report.yaml index 6d0b7e7d..37ce2eae 100644 --- a/examples/ny-tlc-report.yaml +++ b/examples/ny-tlc-report.yaml @@ -11,17 +11,13 @@ apiVersion: spark.stackable.tech/v1alpha1 kind: SparkApplication metadata: name: spark-ny-cm - namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.1.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.2.0 mode: cluster mainApplicationFile: s3a://my-bucket/ny-tlc-report-1.1.0-3.3.0.jar mainClass: tech.stackable.demo.spark.NYTLCReport volumes: - - name: job-deps - persistentVolumeClaim: - claimName: pvc-ksv - name: cm-job-arguments configMap: name: cm-job-arguments @@ -37,15 +33,11 @@ spec: accessStyle: Path sparkConf: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - spark.driver.extraClassPath: "/dependencies/jars/*" - spark.executor.extraClassPath: "/dependencies/jars/*" driver: cores: 1 coreLimit: "1200m" memory: "512m" volumeMounts: - - name: job-deps - mountPath: /dependencies - name: cm-job-arguments mountPath: /arguments executor: @@ -53,7 +45,5 @@ spec: instances: 3 memory: "512m" volumeMounts: - - name: job-deps - mountPath: /dependencies - name: cm-job-arguments mountPath: /arguments diff --git a/tests/templates/kuttl/pyspark-ny-public-s3-image/00-assert.yaml b/tests/templates/kuttl/pyspark-ny-public-s3-image/00-assert.yaml index 4a33d6c0..863f6070 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3-image/00-assert.yaml +++ b/tests/templates/kuttl/pyspark-ny-public-s3-image/00-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: minio timeout: 900 --- apiVersion: apps/v1 diff --git a/tests/templates/kuttl/pyspark-ny-public-s3-image/02-assert.yaml b/tests/templates/kuttl/pyspark-ny-public-s3-image/02-assert.yaml deleted file mode 100644 index fa3246a3..00000000 --- a/tests/templates/kuttl/pyspark-ny-public-s3-image/02-assert.yaml +++ /dev/null @@ -1,13 +0,0 @@ ---- -apiVersion: kuttl.dev/v1beta1 -kind: TestAssert -metadata: - name: pyspark-ny-deps-job -timeout: 900 ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: pyspark-ny-deps-job -status: - succeeded: 1 diff --git a/tests/templates/kuttl/pyspark-ny-public-s3-image/02-deps-volume.yaml b/tests/templates/kuttl/pyspark-ny-public-s3-image/02-deps-volume.yaml deleted file mode 100644 index 97b8d49a..00000000 --- a/tests/templates/kuttl/pyspark-ny-public-s3-image/02-deps-volume.yaml +++ /dev/null @@ -1,52 +0,0 @@ ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: pyspark-ny-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: pyspark-ny-deps-job -spec: - template: - spec: - nodeSelector: - node: "1" - restartPolicy: Never - securityContext: - runAsGroup: 0 - runAsUser: 1000 - serviceAccountName: integration-tests-sa - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: pyspark-ny-pvc - containers: - - name: aws-deps - image: docker.stackable.tech/stackable/tools:0.2.0-stackable0 - env: - - name: DEST_DIR - value: "/dependencies/jars" - - name: AWS - value: "1.11.1026" - - name: HADOOP - value: "3.3.3" - command: - [ - "bash", - "-x", - "-o", - "pipefail", - "-c", - "curl -L https://search.maven.org/remotecontent?filepath=org/apache/hadoop/hadoop-aws/${HADOOP}/hadoop-aws-${HADOOP}.jar -o ${DEST_DIR}/hadoop-aws-${HADOOP}.jar && curl -L https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${AWS}/aws-java-sdk-bundle-${AWS}.jar -o ${DEST_DIR}/aws-java-sdk-bundle-${AWS}.jar", - ] - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars diff --git a/tests/templates/kuttl/pyspark-ny-public-s3-image/10-assert.yaml b/tests/templates/kuttl/pyspark-ny-public-s3-image/10-assert.yaml index 9d20d20e..d3dc9bab 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3-image/10-assert.yaml +++ b/tests/templates/kuttl/pyspark-ny-public-s3-image/10-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: pyspark-ny-public-s3-image timeout: 900 --- # The Job starting the whole process diff --git a/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 index ed35659d..755b3a95 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 @@ -26,23 +26,11 @@ spec: accessStyle: Path sparkConf: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - spark.driver.extraClassPath: "/dependencies/jars/*" - spark.executor.extraClassPath: "/dependencies/jars/*" - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: pyspark-ny-pvc driver: cores: 1 coreLimit: "1200m" memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars executor: cores: 1 instances: 3 memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars diff --git a/tests/templates/kuttl/pyspark-ny-public-s3/00-assert.yaml b/tests/templates/kuttl/pyspark-ny-public-s3/00-assert.yaml index 4a33d6c0..863f6070 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3/00-assert.yaml +++ b/tests/templates/kuttl/pyspark-ny-public-s3/00-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: minio timeout: 900 --- apiVersion: apps/v1 diff --git a/tests/templates/kuttl/pyspark-ny-public-s3/02-assert.yaml b/tests/templates/kuttl/pyspark-ny-public-s3/02-assert.yaml deleted file mode 100644 index fa3246a3..00000000 --- a/tests/templates/kuttl/pyspark-ny-public-s3/02-assert.yaml +++ /dev/null @@ -1,13 +0,0 @@ ---- -apiVersion: kuttl.dev/v1beta1 -kind: TestAssert -metadata: - name: pyspark-ny-deps-job -timeout: 900 ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: pyspark-ny-deps-job -status: - succeeded: 1 diff --git a/tests/templates/kuttl/pyspark-ny-public-s3/02-deps-volume.yaml b/tests/templates/kuttl/pyspark-ny-public-s3/02-deps-volume.yaml deleted file mode 100644 index 97b8d49a..00000000 --- a/tests/templates/kuttl/pyspark-ny-public-s3/02-deps-volume.yaml +++ /dev/null @@ -1,52 +0,0 @@ ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: pyspark-ny-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: pyspark-ny-deps-job -spec: - template: - spec: - nodeSelector: - node: "1" - restartPolicy: Never - securityContext: - runAsGroup: 0 - runAsUser: 1000 - serviceAccountName: integration-tests-sa - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: pyspark-ny-pvc - containers: - - name: aws-deps - image: docker.stackable.tech/stackable/tools:0.2.0-stackable0 - env: - - name: DEST_DIR - value: "/dependencies/jars" - - name: AWS - value: "1.11.1026" - - name: HADOOP - value: "3.3.3" - command: - [ - "bash", - "-x", - "-o", - "pipefail", - "-c", - "curl -L https://search.maven.org/remotecontent?filepath=org/apache/hadoop/hadoop-aws/${HADOOP}/hadoop-aws-${HADOOP}.jar -o ${DEST_DIR}/hadoop-aws-${HADOOP}.jar && curl -L https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${AWS}/aws-java-sdk-bundle-${AWS}.jar -o ${DEST_DIR}/aws-java-sdk-bundle-${AWS}.jar", - ] - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars diff --git a/tests/templates/kuttl/pyspark-ny-public-s3/10-assert.yaml b/tests/templates/kuttl/pyspark-ny-public-s3/10-assert.yaml index 7772be5d..0c6b9b75 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3/10-assert.yaml +++ b/tests/templates/kuttl/pyspark-ny-public-s3/10-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: pyspark-ny-public-s3 timeout: 900 --- # The Job starting the whole process diff --git a/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 index c02a3844..023d3dff 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 @@ -25,23 +25,11 @@ spec: accessStyle: Path sparkConf: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - spark.driver.extraClassPath: "/dependencies/jars/*" - spark.executor.extraClassPath: "/dependencies/jars/*" - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: pyspark-ny-pvc driver: cores: 1 coreLimit: "1200m" memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars executor: cores: 1 instances: 3 memory: "512m" - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars diff --git a/tests/templates/kuttl/spark-examples/00-assert.yaml b/tests/templates/kuttl/spark-examples/00-assert.yaml new file mode 100644 index 00000000..5baf8caa --- /dev/null +++ b/tests/templates/kuttl/spark-examples/00-assert.yaml @@ -0,0 +1,9 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestAssert +timeout: 900 +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: integration-tests-sa diff --git a/tests/templates/kuttl/spark-examples/00-serviceaccount.yaml.j2 b/tests/templates/kuttl/spark-examples/00-serviceaccount.yaml.j2 new file mode 100644 index 00000000..9cbf0351 --- /dev/null +++ b/tests/templates/kuttl/spark-examples/00-serviceaccount.yaml.j2 @@ -0,0 +1,29 @@ +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: use-integration-tests-scc +rules: +{% if test_scenario['values']['openshift'] == "true" %} + - apiGroups: ["security.openshift.io"] + resources: ["securitycontextconstraints"] + resourceNames: ["privileged"] + verbs: ["use"] +{% endif %} +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: integration-tests-sa +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: use-integration-tests-scc +subjects: + - kind: ServiceAccount + name: integration-tests-sa +roleRef: + kind: Role + name: use-integration-tests-scc + apiGroup: rbac.authorization.k8s.io diff --git a/tests/templates/kuttl/spark-examples/10-assert.yaml b/tests/templates/kuttl/spark-examples/10-assert.yaml index e8c13bc2..4b3b0dc3 100644 --- a/tests/templates/kuttl/spark-examples/10-assert.yaml +++ b/tests/templates/kuttl/spark-examples/10-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: spark-examples timeout: 900 --- # The Job starting the whole process diff --git a/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 index b40d9898..71848c1e 100644 --- a/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 @@ -14,8 +14,6 @@ spec: cores: 1 coreLimit: "1200m" memory: "512m" - nodeSelector: - node: "1" executor: cores: 1 instances: 1 diff --git a/tests/templates/kuttl/spark-ny-public-s3/00-assert.yaml b/tests/templates/kuttl/spark-ny-public-s3/00-assert.yaml index 4a33d6c0..863f6070 100644 --- a/tests/templates/kuttl/spark-ny-public-s3/00-assert.yaml +++ b/tests/templates/kuttl/spark-ny-public-s3/00-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: minio timeout: 900 --- apiVersion: apps/v1 diff --git a/tests/templates/kuttl/spark-ny-public-s3/02-assert.yaml b/tests/templates/kuttl/spark-ny-public-s3/02-assert.yaml deleted file mode 100644 index aa359720..00000000 --- a/tests/templates/kuttl/spark-ny-public-s3/02-assert.yaml +++ /dev/null @@ -1,13 +0,0 @@ ---- -apiVersion: kuttl.dev/v1beta1 -kind: TestAssert -metadata: - name: spark-ny-deps-job -timeout: 900 ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: spark-ny-deps-job -status: - succeeded: 1 diff --git a/tests/templates/kuttl/spark-ny-public-s3/02-deps-volume.yaml b/tests/templates/kuttl/spark-ny-public-s3/02-deps-volume.yaml deleted file mode 100644 index 9cf648a4..00000000 --- a/tests/templates/kuttl/spark-ny-public-s3/02-deps-volume.yaml +++ /dev/null @@ -1,52 +0,0 @@ ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: spark-ny-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: spark-ny-deps-job -spec: - template: - spec: - nodeSelector: - node: "1" - restartPolicy: Never - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: spark-ny-pvc - securityContext: - runAsGroup: 0 - runAsUser: 1000 - serviceAccountName: integration-tests-sa - containers: - - name: aws-deps - image: docker.stackable.tech/stackable/tools:0.2.0-stackable0 - env: - - name: DEST_DIR - value: "/dependencies/jars" - - name: AWS - value: "1.11.1026" - - name: HADOOP - value: "3.3.3" - command: - [ - "bash", - "-x", - "-o", - "pipefail", - "-c", - "curl -L https://search.maven.org/remotecontent?filepath=org/apache/hadoop/hadoop-aws/${HADOOP}/hadoop-aws-${HADOOP}.jar -o ${DEST_DIR}/hadoop-aws-${HADOOP}.jar && curl -L https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${AWS}/aws-java-sdk-bundle-${AWS}.jar -o ${DEST_DIR}/aws-java-sdk-bundle-${AWS}.jar", - ] - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars diff --git a/tests/templates/kuttl/spark-ny-public-s3/10-assert.yaml b/tests/templates/kuttl/spark-ny-public-s3/10-assert.yaml index 76c719f3..6e532520 100644 --- a/tests/templates/kuttl/spark-ny-public-s3/10-assert.yaml +++ b/tests/templates/kuttl/spark-ny-public-s3/10-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: spark-ny-cm timeout: 900 --- # The Job starting the whole process diff --git a/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 index 8831e58d..765adada 100644 --- a/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 @@ -19,9 +19,6 @@ spec: mainClass: tech.stackable.demo.spark.NYTLCReport mainApplicationFile: s3a://my-bucket/ny-tlc-report-1.1.0-{{ test_scenario['values']['spark'] }}.jar volumes: - - name: spark-ny-deps - persistentVolumeClaim: - claimName: spark-ny-pvc - name: cm-job-arguments configMap: name: cm-job-arguments @@ -37,27 +34,17 @@ spec: accessStyle: Path sparkConf: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - spark.driver.extraClassPath: "/dependencies/jars/*" - spark.executor.extraClassPath: "/dependencies/jars/*" driver: cores: 1 coreLimit: "1200m" memory: "512m" volumeMounts: - - name: spark-ny-deps - mountPath: /dependencies/jars - name: cm-job-arguments mountPath: /arguments - nodeSelector: - node: "1" executor: cores: 1 instances: 3 memory: "512m" volumeMounts: - - name: spark-ny-deps - mountPath: /dependencies/jars - name: cm-job-arguments mountPath: /arguments - nodeSelector: - node: "1" diff --git a/tests/templates/kuttl/spark-pi-private-s3/00-assert.yaml b/tests/templates/kuttl/spark-pi-private-s3/00-assert.yaml index 4a33d6c0..863f6070 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/00-assert.yaml +++ b/tests/templates/kuttl/spark-pi-private-s3/00-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: minio timeout: 900 --- apiVersion: apps/v1 diff --git a/tests/templates/kuttl/spark-pi-private-s3/00-s3-secret.yaml b/tests/templates/kuttl/spark-pi-private-s3/00-s3-secret.yaml index 235e1049..0845c0a9 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/00-s3-secret.yaml +++ b/tests/templates/kuttl/spark-pi-private-s3/00-s3-secret.yaml @@ -10,7 +10,7 @@ stringData: accessKeyId: minioAccessKey secretAccessKey: minioSecretKey # The following two entries are used by the Bitnami chart for MinIO to - # set up creedentials for accessing buckets managed by the MinIO tenant. + # set up credentials for accessing buckets managed by the MinIO tenant. root-user: minioAccessKey root-password: minioSecretKey --- diff --git a/tests/templates/kuttl/spark-pi-private-s3/02-assert.yaml b/tests/templates/kuttl/spark-pi-private-s3/02-assert.yaml deleted file mode 100644 index ec6db489..00000000 --- a/tests/templates/kuttl/spark-pi-private-s3/02-assert.yaml +++ /dev/null @@ -1,13 +0,0 @@ ---- -apiVersion: kuttl.dev/v1beta1 -kind: TestAssert -metadata: - name: spark-pi-deps-job -timeout: 900 ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: spark-pi-deps-job -status: - succeeded: 1 diff --git a/tests/templates/kuttl/spark-pi-private-s3/02-deps-volume.yaml b/tests/templates/kuttl/spark-pi-private-s3/02-deps-volume.yaml deleted file mode 100644 index 0b97e947..00000000 --- a/tests/templates/kuttl/spark-pi-private-s3/02-deps-volume.yaml +++ /dev/null @@ -1,54 +0,0 @@ ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: spark-pi-private-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: spark-pi-deps-job -spec: - template: - spec: - nodeSelector: - node: "1" - restartPolicy: Never - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: spark-pi-private-pvc - serviceAccountName: integration-tests-sa - securityContext: - runAsGroup: 0 - runAsUser: 1000 - containers: - - name: aws-deps - image: docker.stackable.tech/stackable/tools:0.2.0-stackable0 - env: - - name: DEST_DIR - value: "/dependencies/jars" - - name: AWS - value: "1.11.1026" - - name: HADOOP - value: "3.3.3" - command: - [ - "bash", - "-x", - "-o", - "pipefail", - "-c", - "curl -L https://search.maven.org/remotecontent?filepath=org/apache/hadoop/hadoop-aws/${HADOOP}/hadoop-aws-${HADOOP}.jar -o ${DEST_DIR}/hadoop-aws-${HADOOP}.jar && curl -L https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${AWS}/aws-java-sdk-bundle-${AWS}.jar -o ${DEST_DIR}/aws-java-sdk-bundle-${AWS}.jar", - ] - volumeMounts: - - name: job-deps - mountPath: /dependencies/jars - securityContext: - runAsUser: 0 diff --git a/tests/templates/kuttl/spark-pi-private-s3/10-assert.yaml b/tests/templates/kuttl/spark-pi-private-s3/10-assert.yaml index fda7e6cc..57e548ac 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/10-assert.yaml +++ b/tests/templates/kuttl/spark-pi-private-s3/10-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: spark-pi-private-s3 timeout: 900 --- # The Job starting the whole process diff --git a/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 index e6b701a2..3d744cc5 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 @@ -20,29 +20,13 @@ spec: accessStyle: Path credentials: secretClass: s3-credentials-class - volumes: - - name: spark-pi-deps - persistentVolumeClaim: - claimName: spark-pi-private-pvc sparkConf: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider" - spark.driver.extraClassPath: "/dependencies/jars/*" - spark.executor.extraClassPath: "/dependencies/jars/*" driver: cores: 1 coreLimit: "1200m" memory: "512m" - volumeMounts: - - name: spark-pi-deps - mountPath: /dependencies/jars - nodeSelector: - node: "1" executor: cores: 1 instances: 1 memory: "512m" - volumeMounts: - - name: spark-pi-deps - mountPath: /dependencies/jars - nodeSelector: - node: "1" diff --git a/tests/templates/kuttl/spark-pi-public-s3/00-assert.yaml b/tests/templates/kuttl/spark-pi-public-s3/00-assert.yaml index 4a33d6c0..863f6070 100644 --- a/tests/templates/kuttl/spark-pi-public-s3/00-assert.yaml +++ b/tests/templates/kuttl/spark-pi-public-s3/00-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: minio timeout: 900 --- apiVersion: apps/v1 diff --git a/tests/templates/kuttl/spark-pi-public-s3/02-assert.yaml b/tests/templates/kuttl/spark-pi-public-s3/02-assert.yaml deleted file mode 100644 index ec6db489..00000000 --- a/tests/templates/kuttl/spark-pi-public-s3/02-assert.yaml +++ /dev/null @@ -1,13 +0,0 @@ ---- -apiVersion: kuttl.dev/v1beta1 -kind: TestAssert -metadata: - name: spark-pi-deps-job -timeout: 900 ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: spark-pi-deps-job -status: - succeeded: 1 diff --git a/tests/templates/kuttl/spark-pi-public-s3/02-deps-volume.yaml b/tests/templates/kuttl/spark-pi-public-s3/02-deps-volume.yaml deleted file mode 100644 index 15d54839..00000000 --- a/tests/templates/kuttl/spark-pi-public-s3/02-deps-volume.yaml +++ /dev/null @@ -1,52 +0,0 @@ ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: spark-pi-public-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi ---- -apiVersion: batch/v1 -kind: Job -metadata: - name: spark-pi-deps-job -spec: - template: - spec: - nodeSelector: - node: "1" - restartPolicy: Never - volumes: - - name: job-deps - persistentVolumeClaim: - claimName: spark-pi-public-pvc - serviceAccountName: integration-tests-sa - securityContext: - runAsGroup: 0 - runAsUser: 1000 - containers: - - name: aws-deps - image: docker.stackable.tech/stackable/tools:0.2.0-stackable0 - env: - - name: DEST_DIR - value: "/stackable/dependencies/jars" - - name: AWS - value: "1.11.1026" - - name: HADOOP - value: "3.3.3" - command: - [ - "bash", - "-x", - "-o", - "pipefail", - "-c", - "curl -L https://search.maven.org/remotecontent?filepath=org/apache/hadoop/hadoop-aws/${HADOOP}/hadoop-aws-${HADOOP}.jar -o ${DEST_DIR}/hadoop-aws-${HADOOP}.jar && curl -L https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${AWS}/aws-java-sdk-bundle-${AWS}.jar -o ${DEST_DIR}/aws-java-sdk-bundle-${AWS}.jar", - ] - volumeMounts: - - name: job-deps - mountPath: /stackable/dependencies/jars diff --git a/tests/templates/kuttl/spark-pi-public-s3/10-assert.yaml b/tests/templates/kuttl/spark-pi-public-s3/10-assert.yaml index 5c3c55ad..211c8f4d 100644 --- a/tests/templates/kuttl/spark-pi-public-s3/10-assert.yaml +++ b/tests/templates/kuttl/spark-pi-public-s3/10-assert.yaml @@ -1,8 +1,6 @@ --- apiVersion: kuttl.dev/v1beta1 kind: TestAssert -metadata: - name: spark-pi-public-s3 timeout: 900 --- # The Job starting the whole process diff --git a/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 index 3b27cbce..884e8fbb 100644 --- a/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 @@ -10,10 +10,6 @@ spec: mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: s3a://my-bucket/spark-examples_2.12-{{ test_scenario['values']['spark'] }}.jar - volumes: - - name: spark-pi-deps - persistentVolumeClaim: - claimName: spark-pi-public-pvc s3bucket: inline: bucketName: my-bucket @@ -24,23 +20,11 @@ spec: accessStyle: Path sparkConf: spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" - spark.driver.extraClassPath: "/stackable/dependencies/jars/*" - spark.executor.extraClassPath: "/stackable/dependencies/jars/*" driver: cores: 1 coreLimit: "1200m" memory: "512m" - volumeMounts: - - name: spark-pi-deps - mountPath: /stackable/dependencies/jars - nodeSelector: - node: "1" executor: cores: 1 instances: 1 memory: "512m" - volumeMounts: - - name: spark-pi-deps - mountPath: /stackable/dependencies/jars - nodeSelector: - node: "1" diff --git a/tests/test-definition.yaml b/tests/test-definition.yaml index 8a0162ad..68cc5047 100644 --- a/tests/test-definition.yaml +++ b/tests/test-definition.yaml @@ -13,7 +13,7 @@ dimensions: - 3.3.0 - name: stackable values: - - 0.1.0 + - 0.2.0 - name: ny-tlc-report values: - 0.1.0 @@ -41,6 +41,7 @@ tests: dimensions: - spark - stackable + - openshift - name: pyspark-ny-public-s3 dimensions: - spark