diff --git a/CHANGELOG.md b/CHANGELOG.md index 49c2d264..d495f6f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ All notable changes to this project will be documented in this file. ### Added - Generate OLM bundle for Release 23.4.0 ([#238]). +- Add support for Spark 3.4.0 ([#243]). ### Changed @@ -15,10 +16,15 @@ All notable changes to this project will be documented in this file. - Use testing-tools 0.2.0 ([#236]) - Run as root group ([#241]). +### Fixed + +- Fix quoting issues when spark config values contain spaces ([#243]). + [#235]: https://github.com/stackabletech/spark-k8s-operator/pull/235 [#236]: https://github.com/stackabletech/spark-k8s-operator/pull/236 [#238]: https://github.com/stackabletech/spark-k8s-operator/pull/238 [#241]: https://github.com/stackabletech/spark-k8s-operator/pull/241 +[#243]: https://github.com/stackabletech/spark-k8s-operator/pull/243 ## [23.4.0] - 2023-04-17 diff --git a/deploy/helm/spark-k8s-operator/templates/roles.yaml b/deploy/helm/spark-k8s-operator/templates/roles.yaml index 934b920e..42daa6a4 100644 --- a/deploy/helm/spark-k8s-operator/templates/roles.yaml +++ b/deploy/helm/spark-k8s-operator/templates/roles.yaml @@ -29,6 +29,7 @@ rules: verbs: - create - delete + - deletecollection - get - list - patch diff --git a/docs/modules/spark-k8s/examples/example-encapsulated.yaml b/docs/modules/spark-k8s/examples/example-encapsulated.yaml index 57e28902..1aa0305b 100644 --- a/docs/modules/spark-k8s/examples/example-encapsulated.yaml +++ b/docs/modules/spark-k8s/examples/example-encapsulated.yaml @@ -5,9 +5,9 @@ metadata: name: spark-pi spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.3.0 # <1> + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev # <1> mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: /stackable/spark/examples/jars/spark-examples_2.12-3.3.0.jar # <2> + mainApplicationFile: /stackable/spark/examples/jars/spark-examples.jar # <2> executor: instances: 3 diff --git a/docs/modules/spark-k8s/examples/example-history-app.yaml b/docs/modules/spark-k8s/examples/example-history-app.yaml index aa36dd4a..e54c3824 100644 --- a/docs/modules/spark-k8s/examples/example-history-app.yaml +++ b/docs/modules/spark-k8s/examples/example-history-app.yaml @@ -5,11 +5,11 @@ metadata: name: spark-pi-s3-1 spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev sparkImagePullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: s3a://my-bucket/spark-examples_2.12-3.3.0.jar + mainApplicationFile: s3a://my-bucket/spark-examples.jar s3connection: # <1> inline: host: test-minio diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml index 2c8b12f0..262dcb14 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.1.0.jar # <3> mainClass: tech.stackable.demo.spark.NYTLCReport diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml index b63df83c..fcdd502d 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny_tlc_report.py # <1> args: diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml index d4e5cfea..d88af990 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml @@ -7,7 +7,7 @@ metadata: spec: version: "1.0" image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0 # <1> - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev mode: cluster mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py # <2> args: diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml index a4e8ca60..2703b440 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.0-SNAPSHOT.jar # <1> mainClass: org.example.App # <2> diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml index d1152bb3..174dbf31 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml @@ -5,9 +5,9 @@ metadata: name: example-sparkapp-s3-private spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev mode: cluster - mainApplicationFile: s3a://my-bucket/spark-examples_2.12-3.3.0.jar # <1> + mainApplicationFile: s3a://my-bucket/spark-examples.jar # <1> mainClass: org.apache.spark.examples.SparkPi # <2> s3connection: # <3> inline: diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml index f2dbac68..72007aa0 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/streaming/hdfs_wordcount.py args: diff --git a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh index 4dfe59b3..4a79f79a 100755 --- a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh +++ b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh @@ -58,7 +58,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py driver: diff --git a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 index ad4ca29a..426a76e1 100755 --- a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 +++ b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 @@ -58,7 +58,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-0.0.0-dev mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py driver: diff --git a/docs/modules/spark-k8s/partials/supported-versions.adoc b/docs/modules/spark-k8s/partials/supported-versions.adoc index f26a4c8e..fc374a5b 100644 --- a/docs/modules/spark-k8s/partials/supported-versions.adoc +++ b/docs/modules/spark-k8s/partials/supported-versions.adoc @@ -5,3 +5,4 @@ - 3.2.1-hadoop3.2 - 3.2.1-hadoop3.2-python39 - 3.3.0-hadoop3 +- 3.4.0-hadoop3 diff --git a/examples/ny-tlc-report-external-dependencies.yaml b/examples/ny-tlc-report-external-dependencies.yaml index 3f9ee16e..f69e1c9c 100644 --- a/examples/ny-tlc-report-external-dependencies.yaml +++ b/examples/ny-tlc-report-external-dependencies.yaml @@ -6,7 +6,7 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev # Always | IfNotPresent | Never sparkImagePullPolicy: IfNotPresent mode: cluster diff --git a/examples/ny-tlc-report-image.yaml b/examples/ny-tlc-report-image.yaml index db5d8e10..d5b74d23 100644 --- a/examples/ny-tlc-report-image.yaml +++ b/examples/ny-tlc-report-image.yaml @@ -8,7 +8,7 @@ spec: version: "1.0" # everything under /jobs will be copied to /stackable/spark/jobs image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0 - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev sparkImagePullPolicy: IfNotPresent mode: cluster mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py diff --git a/examples/ny-tlc-report.yaml b/examples/ny-tlc-report.yaml index 0d281e1c..8b58e4b4 100644 --- a/examples/ny-tlc-report.yaml +++ b/examples/ny-tlc-report.yaml @@ -13,7 +13,7 @@ metadata: name: spark-ny-cm spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.3.0 + sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev mode: cluster mainApplicationFile: s3a://my-bucket/ny-tlc-report-1.1.0-3.3.0.jar mainClass: tech.stackable.demo.spark.NYTLCReport diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index d144940f..5cd4c287 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -618,7 +618,7 @@ impl SparkApplication { } // ...before being added to the command collection for (key, value) in submit_conf { - submit_cmd.push(format!("--conf {key}={value}")); + submit_cmd.push(format!("--conf \"{key}={value}\"")); } submit_cmd.extend( @@ -939,7 +939,7 @@ spec: sparkImage: docker.stackable.tech/stackable/spark-k8s:3.2.1-hadoop3.2-python39-aws1.11.375-stackable0.3.0 mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/spark-examples_2.12-3.2.1.jar + mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/spark-examples.jar sparkConf: "spark.hadoop.fs.s3a.aws.credentials.provider": "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider" driver: @@ -960,7 +960,7 @@ spec: spark_application.spec.main_class ); assert_eq!( - Some("s3a://stackable-spark-k8s-jars/jobs/spark-examples_2.12-3.2.1.jar".to_string()), + Some("s3a://stackable-spark-k8s-jars/jobs/spark-examples.jar".to_string()), spark_application.spec.main_application_file ); assert_eq!( @@ -1113,7 +1113,7 @@ spec: - name: myregistrykey mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: local:///stackable/spark/examples/jars/spark-examples_2.12-3.2.1.jar + mainApplicationFile: local:///stackable/spark/examples/jars/spark-examples.jar sparkConf: spark.kubernetes.node.selector.node: "2" driver: diff --git a/rust/operator-binary/src/history_controller.rs b/rust/operator-binary/src/history_controller.rs index 4af2938e..c7859003 100644 --- a/rust/operator-binary/src/history_controller.rs +++ b/rust/operator-binary/src/history_controller.rs @@ -536,9 +536,9 @@ fn command_args(s3logdir: &S3LogDir) -> Vec { if let Some(secret_dir) = s3logdir.credentials_mount_path() { command.extend(vec![ - format!("export AWS_ACCESS_KEY_ID=$(cat {secret_dir}/{ACCESS_KEY_ID})"), + format!("export AWS_ACCESS_KEY_ID=\"$(cat {secret_dir}/{ACCESS_KEY_ID})\""), "&&".to_string(), - format!("export AWS_SECRET_ACCESS_KEY=$(cat {secret_dir}/{SECRET_ACCESS_KEY})"), + format!("export AWS_SECRET_ACCESS_KEY=\"$(cat {secret_dir}/{SECRET_ACCESS_KEY})\""), "&&".to_string(), ]); } diff --git a/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 b/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 index 32a4b650..16053929 100644 --- a/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 @@ -6,12 +6,11 @@ metadata: spec: version: "1.0" sparkImage: docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} - sparkImagePullPolicy: IfNotPresent image: docker.stackable.tech/stackable/ny-tlc-report:{{ test_scenario['values']['ny-tlc-report'] }} vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster mainClass: org.apache.spark.examples.SparkALS - mainApplicationFile: local:///stackable/spark/examples/jars/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar + mainApplicationFile: local:///stackable/spark/examples/jars/spark-examples.jar job: logging: enableVectorAgent: true diff --git a/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 b/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 index 9e59a166..b3ee4007 100644 --- a/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 @@ -40,12 +40,11 @@ metadata: spec: version: "1.0" sparkImage: docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} - sparkImagePullPolicy: IfNotPresent image: docker.stackable.tech/stackable/ny-tlc-report:{{ test_scenario['values']['ny-tlc-report'] }} vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster mainClass: org.apache.spark.examples.SparkALS - mainApplicationFile: local:///stackable/spark/examples/jars/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar + mainApplicationFile: local:///stackable/spark/examples/jars/spark-examples.jar job: logging: enableVectorAgent: true diff --git a/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 b/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 index 7dce0c13..939ecb7f 100644 --- a/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 @@ -6,7 +6,6 @@ metadata: spec: version: "1.0" sparkImage: docker.stackable.tech/stackable/pyspark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} - sparkImagePullPolicy: IfNotPresent vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/als.py diff --git a/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 b/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 index 2addf6b9..3dcaa696 100644 --- a/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 @@ -40,7 +40,6 @@ metadata: spec: version: "1.0" sparkImage: docker.stackable.tech/stackable/pyspark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} - sparkImagePullPolicy: IfNotPresent vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/als.py diff --git a/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 index a3d97806..3c48ff6c 100644 --- a/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 @@ -12,7 +12,7 @@ spec: sparkImagePullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkALS - mainApplicationFile: "local:///stackable/spark/examples/jars/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar" + mainApplicationFile: "local:///stackable/spark/examples/jars/spark-examples.jar" job: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 index 6535f39b..87436a23 100644 --- a/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 @@ -4,10 +4,9 @@ kind: TestStep commands: # give minio enough time to start - command: sleep 10 - - command: kubectl cp -n $NAMESPACE spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar minio-client:/tmp + - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/spark-examples.jar - command: kubectl exec -n $NAMESPACE minio-client -- sh -c 'mc alias set test-minio http://test-minio:9000 $$MINIO_SERVER_ACCESS_KEY $$MINIO_SERVER_SECRET_KEY' - command: kubectl exec -n $NAMESPACE minio-client -- mc mb test-minio/my-bucket - command: kubectl exec -n $NAMESPACE eventlog-minio-client -- sh -c 'mc alias set eventlog-minio http://eventlog-minio:9000 $$MINIO_SERVER_ACCESS_KEY $$MINIO_SERVER_SECRET_KEY' - command: kubectl exec -n $NAMESPACE eventlog-minio-client -- mc mb eventlog-minio/spark-logs/eventlogs - - script: >- - kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar test-minio/my-bucket + - command: kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples.jar test-minio/my-bucket diff --git a/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 index e144baf2..a8717ff3 100644 --- a/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 @@ -12,7 +12,7 @@ spec: sparkImagePullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: "s3a://my-bucket/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar" + mainApplicationFile: "s3a://my-bucket/spark-examples.jar" s3connection: reference: spark-data-s3-connection logFileDirectory: diff --git a/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 index 2f3e2871..ef042659 100644 --- a/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 @@ -12,7 +12,7 @@ spec: sparkImagePullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: "s3a://my-bucket/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar" + mainApplicationFile: "s3a://my-bucket/spark-examples.jar" s3connection: reference: spark-data-s3-connection logFileDirectory: diff --git a/tests/templates/kuttl/spark-history-server/spark-examples_2.12-3.3.0.jar b/tests/templates/kuttl/spark-history-server/spark-examples_3.3.0.jar similarity index 100% rename from tests/templates/kuttl/spark-history-server/spark-examples_2.12-3.3.0.jar rename to tests/templates/kuttl/spark-history-server/spark-examples_3.3.0.jar diff --git a/tests/templates/kuttl/spark-history-server/spark-examples_3.4.0.jar b/tests/templates/kuttl/spark-history-server/spark-examples_3.4.0.jar new file mode 100644 index 00000000..da6bb311 Binary files /dev/null and b/tests/templates/kuttl/spark-history-server/spark-examples_3.4.0.jar differ diff --git a/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 index 491f9a6b..b3b042e1 100644 --- a/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 @@ -4,14 +4,10 @@ kind: TestStep commands: # give minio enough time to start - command: sleep 10 - - command: kubectl cp -n $NAMESPACE ny-tlc-report-1.1.0-{{ test_scenario['values']['examples'] }}.jar minio-client:/tmp + - command: kubectl cp -n $NAMESPACE ny-tlc-report-1.1.0-{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/ny-tlc-report.jar - command: kubectl cp -n $NAMESPACE yellow_tripdata_2021-07.csv minio-client:/tmp - command: kubectl exec -n $NAMESPACE minio-client -- sh -c 'mc alias set test-minio http://test-minio:9000 $$MINIO_SERVER_ACCESS_KEY $$MINIO_SERVER_SECRET_KEY' - command: kubectl exec -n $NAMESPACE minio-client -- mc mb test-minio/my-bucket - command: kubectl exec -n $NAMESPACE minio-client -- mc policy set public test-minio/my-bucket - - script: >- - kubectl exec -n $NAMESPACE minio-client -- - mc cp /tmp/ny-tlc-report-1.1.0-{{ test_scenario['values']['examples'] }}.jar test-minio/my-bucket - - script: >- - kubectl exec -n $NAMESPACE minio-client -- - mc cp /tmp/yellow_tripdata_2021-07.csv test-minio/my-bucket + - command: kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/ny-tlc-report.jar test-minio/my-bucket + - command: kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/yellow_tripdata_2021-07.csv test-minio/my-bucket diff --git a/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 index 47e03495..cfc7535f 100644 --- a/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 @@ -20,7 +20,7 @@ spec: sparkImagePullPolicy: IfNotPresent mode: cluster mainClass: tech.stackable.demo.spark.NYTLCReport - mainApplicationFile: "s3a://my-bucket/ny-tlc-report-1.1.0-{{ test_scenario['values']['examples'] }}.jar" + mainApplicationFile: "s3a://my-bucket/ny-tlc-report.jar" volumes: - name: cm-job-arguments configMap: diff --git a/tests/templates/kuttl/spark-ny-public-s3/ny-tlc-report-1.1.0-3.4.0.jar b/tests/templates/kuttl/spark-ny-public-s3/ny-tlc-report-1.1.0-3.4.0.jar new file mode 100644 index 00000000..6466711d Binary files /dev/null and b/tests/templates/kuttl/spark-ny-public-s3/ny-tlc-report-1.1.0-3.4.0.jar differ diff --git a/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 index 565d4e1c..52d7bd3c 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 @@ -4,8 +4,7 @@ kind: TestStep commands: # give minio enough time to start - command: sleep 10 - - command: kubectl cp -n $NAMESPACE spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar minio-client:/tmp + - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/spark-examples.jar - command: kubectl exec -n $NAMESPACE minio-client -- sh -c 'mc alias set test-minio http://test-minio:9000 $$MINIO_SERVER_ACCESS_KEY $$MINIO_SERVER_SECRET_KEY' - command: kubectl exec -n $NAMESPACE minio-client -- mc mb test-minio/my-bucket - - script: >- - kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar test-minio/my-bucket + - command: kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples.jar test-minio/my-bucket diff --git a/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 index 807e2123..32cd6028 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 @@ -12,7 +12,7 @@ spec: sparkImagePullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: s3a://my-bucket/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar + mainApplicationFile: s3a://my-bucket/spark-examples.jar s3connection: inline: host: test-minio diff --git a/tests/templates/kuttl/spark-pi-private-s3/spark-examples_2.12-3.3.0.jar b/tests/templates/kuttl/spark-pi-private-s3/spark-examples_3.3.0.jar similarity index 100% rename from tests/templates/kuttl/spark-pi-private-s3/spark-examples_2.12-3.3.0.jar rename to tests/templates/kuttl/spark-pi-private-s3/spark-examples_3.3.0.jar diff --git a/tests/templates/kuttl/spark-pi-private-s3/spark-examples_3.4.0.jar b/tests/templates/kuttl/spark-pi-private-s3/spark-examples_3.4.0.jar new file mode 100644 index 00000000..da6bb311 Binary files /dev/null and b/tests/templates/kuttl/spark-pi-private-s3/spark-examples_3.4.0.jar differ diff --git a/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 index 94ce5723..e06a76c8 100644 --- a/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 @@ -4,9 +4,8 @@ kind: TestStep commands: # give minio enough time to start - command: sleep 10 - - command: kubectl cp -n $NAMESPACE spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar minio-client:/tmp + - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/spark-examples.jar - command: kubectl exec -n $NAMESPACE minio-client -- sh -c 'mc alias set test-minio http://test-minio:9000 $$MINIO_SERVER_ACCESS_KEY $$MINIO_SERVER_SECRET_KEY' - command: kubectl exec -n $NAMESPACE minio-client -- mc mb test-minio/my-bucket - command: kubectl exec -n $NAMESPACE minio-client -- mc policy set public test-minio/my-bucket - - script: >- - kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar test-minio/my-bucket + - command: kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples.jar test-minio/my-bucket diff --git a/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 index a16012ff..0565659a 100644 --- a/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 @@ -12,7 +12,7 @@ spec: sparkImagePullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: s3a://my-bucket/spark-examples_2.12-{{ test_scenario['values']['examples'] }}.jar + mainApplicationFile: s3a://my-bucket/spark-examples.jar s3connection: inline: host: test-minio diff --git a/tests/templates/kuttl/spark-pi-public-s3/spark-examples_2.12-3.3.0.jar b/tests/templates/kuttl/spark-pi-public-s3/spark-examples_3.3.0.jar similarity index 100% rename from tests/templates/kuttl/spark-pi-public-s3/spark-examples_2.12-3.3.0.jar rename to tests/templates/kuttl/spark-pi-public-s3/spark-examples_3.3.0.jar diff --git a/tests/templates/kuttl/spark-pi-public-s3/spark-examples_3.4.0.jar b/tests/templates/kuttl/spark-pi-public-s3/spark-examples_3.4.0.jar new file mode 100644 index 00000000..da6bb311 Binary files /dev/null and b/tests/templates/kuttl/spark-pi-public-s3/spark-examples_3.4.0.jar differ diff --git a/tests/test-definition.yaml b/tests/test-definition.yaml index 001761fd..d48703e8 100644 --- a/tests/test-definition.yaml +++ b/tests/test-definition.yaml @@ -10,64 +10,46 @@ dimensions: - name: spark values: - 3.3.0-stackable0.0.0-dev + - 3.4.0-stackable0.0.0-dev - name: ny-tlc-report values: - 0.1.0 - - name: examples - values: - - 3.3.0 tests: - name: spark-history-server dimensions: - spark - - stackable - openshift - - examples - name: spark-pi-private-s3 dimensions: - spark - - stackable - openshift - - examples - name: spark-pi-public-s3 dimensions: - spark - - stackable - openshift - - examples - name: spark-ny-public-s3 dimensions: - spark - - stackable - openshift - - examples - name: spark-examples dimensions: - spark - - stackable - openshift - - examples - name: pyspark-ny-public-s3 dimensions: - spark - - stackable - openshift - name: pyspark-ny-public-s3-image dimensions: - spark - - stackable - ny-tlc-report - openshift - name: resources dimensions: - spark - - stackable - openshift - - examples - name: logging dimensions: - spark - - stackable - ny-tlc-report - openshift - - examples