This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new b005fe9  [SPARK-54997] Use Spark 4.1.1 in docs, examples, tests, and 
default values
b005fe9 is described below

commit b005fe92755771a37877ad030f80117585962a45
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Sat Jan 10 13:00:25 2026 +0900

    [SPARK-54997] Use Spark 4.1.1 in docs, examples, tests, and default values
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use Spark 4.1.1 instead of 4.1.0 in docs, examples, tests, 
and default values.
    
    ### Why are the changes needed?
    
    To use the latest bug fixed version.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, but this is an expected.
    
    ### How was this patch tested?
    
    Pass the CIs and manual tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #452 from dongjoon-hyun/SPARK-54997.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 README.md                                              | 18 +++++++++---------
 examples/cluster-on-yunikorn.yaml                      |  2 +-
 examples/cluster-with-hpa-template.yaml                |  2 +-
 examples/cluster-with-hpa.yaml                         |  2 +-
 examples/cluster-with-template.yaml                    |  2 +-
 examples/cluster.yaml                                  |  2 +-
 examples/dfs-read-write.yaml                           |  2 +-
 examples/pi-java17.yaml                                |  2 +-
 examples/pi-on-volcano.yaml                            |  2 +-
 examples/pi-on-yunikorn.yaml                           |  2 +-
 examples/pi-python.yaml                                |  2 +-
 examples/pi-statefulset.yaml                           |  2 +-
 examples/pi-v1alpha1.yaml                              |  2 +-
 examples/pi-v1beta1.yaml                               |  2 +-
 examples/pi-with-driver-timeout.yaml                   |  2 +-
 examples/pi-with-eventlog.yaml                         |  2 +-
 examples/pi-with-one-pod.yaml                          |  2 +-
 examples/pi-with-spark-connect-plugin.yaml             |  2 +-
 examples/pi-with-template.yaml                         |  2 +-
 examples/pi.yaml                                       |  2 +-
 examples/prod-cluster-with-three-workers.yaml          |  2 +-
 examples/qa-cluster-with-one-worker.yaml               |  2 +-
 examples/spark-connect-server-with-spark-cluster.yaml  |  2 +-
 examples/spark-connect-server.yaml                     |  2 +-
 examples/spark-history-server.yaml                     |  2 +-
 examples/spark-thrift-server.yaml                      |  2 +-
 examples/sql.yaml                                      |  2 +-
 examples/stream-word-count.yaml                        |  2 +-
 examples/word-count.yaml                               |  2 +-
 .../spark/k8s/operator/SparkClusterResourceSpec.java   |  2 +-
 tests/benchmark/sparkapps.sh                           |  2 +-
 .../spark-example-retain-duration.yaml                 |  2 +-
 tests/e2e/resource-selector/chainsaw-test.yaml         |  4 ++--
 tests/e2e/spark-versions/chainsaw-test.yaml            |  8 ++++----
 .../spark-cluster-example-succeeded.yaml               |  2 +-
 .../e2e/state-transition/spark-example-succeeded.yaml  |  2 +-
 tests/e2e/watched-namespaces/spark-example.yaml        |  2 +-
 37 files changed, 49 insertions(+), 49 deletions(-)

diff --git a/README.md b/README.md
index 83a04e5..44712d8 100644
--- a/README.md
+++ b/README.md
@@ -75,21 +75,21 @@ $ kubectl port-forward prod-master-0 6066 &
 $ ./examples/submit-pi-to-prod.sh
 {
   "action" : "CreateSubmissionResponse",
-  "message" : "Driver successfully submitted as driver-20251219002524-0000",
-  "serverSparkVersion" : "4.1.0",
-  "submissionId" : "driver-20251219002524-0000",
+  "message" : "Driver successfully submitted as driver-20260110030233-0000",
+  "serverSparkVersion" : "4.1.1",
+  "submissionId" : "driver-20260110030233-0000",
   "success" : true
 }
 
-$ curl http://localhost:6066/v1/submissions/status/driver-20251219002524-0000/
+$ curl http://localhost:6066/v1/submissions/status/driver-20260110030233-0000/
 {
   "action" : "SubmissionStatusResponse",
   "driverState" : "FINISHED",
-  "serverSparkVersion" : "4.1.0",
-  "submissionId" : "driver-20251219002524-0000",
+  "serverSparkVersion" : "4.1.1",
+  "submissionId" : "driver-20260110030233-0000",
   "success" : true,
-  "workerHostPort" : "10.1.0.190:46501",
-  "workerId" : "worker-20251219002506-10.1.0.190-46501"
+  "workerHostPort" : "10.1.1.172:44233",
+  "workerId" : "worker-20260110030145-10.1.1.172-44233"
 }
 
 $ kubectl delete sparkcluster prod
@@ -122,7 +122,7 @@ Events:
   Normal  Scheduling         1s    yunikorn  default/pi-on-yunikorn-0-driver 
is queued and waiting for allocation
   Normal  Scheduled          1s    yunikorn  Successfully assigned 
default/pi-on-yunikorn-0-driver to node docker-desktop
   Normal  PodBindSuccessful  1s    yunikorn  Pod 
default/pi-on-yunikorn-0-driver is successfully bound to node docker-desktop
-  Normal  Pulled             0s    kubelet   Container image 
"apache/spark:4.1.0-scala" already present on machine
+  Normal  Pulled             0s    kubelet   Container image 
"apache/spark:4.1.1-scala" already present on machine
   Normal  Created            0s    kubelet   Created container: 
spark-kubernetes-driver
   Normal  Started            0s    kubelet   Started container 
spark-kubernetes-driver
 
diff --git a/examples/cluster-on-yunikorn.yaml 
b/examples/cluster-on-yunikorn.yaml
index 7b0cc7d..f43b6b3 100644
--- a/examples/cluster-on-yunikorn.yaml
+++ b/examples/cluster-on-yunikorn.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-on-yunikorn
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
diff --git a/examples/cluster-with-hpa-template.yaml 
b/examples/cluster-with-hpa-template.yaml
index 8cf6304..b63e2d1 100644
--- a/examples/cluster-with-hpa-template.yaml
+++ b/examples/cluster-with-hpa-template.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-hpa-template
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
diff --git a/examples/cluster-with-hpa.yaml b/examples/cluster-with-hpa.yaml
index 4bb3d48..d165e1a 100644
--- a/examples/cluster-with-hpa.yaml
+++ b/examples/cluster-with-hpa.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-hpa
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
diff --git a/examples/cluster-with-template.yaml 
b/examples/cluster-with-template.yaml
index 1388ec6..a50b43f 100644
--- a/examples/cluster-with-template.yaml
+++ b/examples/cluster-with-template.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-template
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
diff --git a/examples/cluster.yaml b/examples/cluster.yaml
index 830fb4c..ae4a88c 100644
--- a/examples/cluster.yaml
+++ b/examples/cluster.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
diff --git a/examples/dfs-read-write.yaml b/examples/dfs-read-write.yaml
index b3bc209..6c97ebf 100644
--- a/examples/dfs-read-write.yaml
+++ b/examples/dfs-read-write.yaml
@@ -41,4 +41,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-java17.yaml b/examples/pi-java17.yaml
index 9392619..d30186b 100644
--- a/examples/pi-java17.yaml
+++ b/examples/pi-java17.yaml
@@ -28,4 +28,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-on-volcano.yaml b/examples/pi-on-volcano.yaml
index acda3cc..979e065 100644
--- a/examples/pi-on-volcano.yaml
+++ b/examples/pi-on-volcano.yaml
@@ -33,4 +33,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-on-yunikorn.yaml b/examples/pi-on-yunikorn.yaml
index f3d498b..0985d88 100644
--- a/examples/pi-on-yunikorn.yaml
+++ b/examples/pi-on-yunikorn.yaml
@@ -34,4 +34,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-python.yaml b/examples/pi-python.yaml
index e313810..da7df72 100644
--- a/examples/pi-python.yaml
+++ b/examples/pi-python.yaml
@@ -27,4 +27,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-statefulset.yaml b/examples/pi-statefulset.yaml
index 62bef4c..298e6c8 100644
--- a/examples/pi-statefulset.yaml
+++ b/examples/pi-statefulset.yaml
@@ -28,4 +28,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-v1alpha1.yaml b/examples/pi-v1alpha1.yaml
index 6cf25d2..860ecf0 100644
--- a/examples/pi-v1alpha1.yaml
+++ b/examples/pi-v1alpha1.yaml
@@ -28,4 +28,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-v1beta1.yaml b/examples/pi-v1beta1.yaml
index 34c199c..cd70fc6 100644
--- a/examples/pi-v1beta1.yaml
+++ b/examples/pi-v1beta1.yaml
@@ -28,4 +28,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-with-driver-timeout.yaml 
b/examples/pi-with-driver-timeout.yaml
index 6157a5c..a8c59b5 100644
--- a/examples/pi-with-driver-timeout.yaml
+++ b/examples/pi-with-driver-timeout.yaml
@@ -32,4 +32,4 @@ spec:
     resourceRetainPolicy: OnFailure
     ttlAfterStopMillis: 10000
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-with-eventlog.yaml b/examples/pi-with-eventlog.yaml
index b2dda11..965b95e 100644
--- a/examples/pi-with-eventlog.yaml
+++ b/examples/pi-with-eventlog.yaml
@@ -37,4 +37,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-with-one-pod.yaml b/examples/pi-with-one-pod.yaml
index ac55eab..0aa4db1 100644
--- a/examples/pi-with-one-pod.yaml
+++ b/examples/pi-with-one-pod.yaml
@@ -26,4 +26,4 @@ spec:
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-with-spark-connect-plugin.yaml 
b/examples/pi-with-spark-connect-plugin.yaml
index c7ce99d..a28bc70 100644
--- a/examples/pi-with-spark-connect-plugin.yaml
+++ b/examples/pi-with-spark-connect-plugin.yaml
@@ -30,4 +30,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi-with-template.yaml b/examples/pi-with-template.yaml
index a6d4daa..e6a3544 100644
--- a/examples/pi-with-template.yaml
+++ b/examples/pi-with-template.yaml
@@ -38,4 +38,4 @@ spec:
         priorityClassName: system-cluster-critical
         terminationGracePeriodSeconds: 0
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/pi.yaml b/examples/pi.yaml
index 3ff038b..a70e3aa 100644
--- a/examples/pi.yaml
+++ b/examples/pi.yaml
@@ -30,4 +30,4 @@ spec:
     resourceRetainPolicy: OnFailure
     ttlAfterStopMillis: 10000
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/prod-cluster-with-three-workers.yaml 
b/examples/prod-cluster-with-three-workers.yaml
index a3a6531..a4ffa7a 100644
--- a/examples/prod-cluster-with-three-workers.yaml
+++ b/examples/prod-cluster-with-three-workers.yaml
@@ -18,7 +18,7 @@ metadata:
   name: prod
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
diff --git a/examples/qa-cluster-with-one-worker.yaml 
b/examples/qa-cluster-with-one-worker.yaml
index 1ffbe14..b4387d0 100644
--- a/examples/qa-cluster-with-one-worker.yaml
+++ b/examples/qa-cluster-with-one-worker.yaml
@@ -18,7 +18,7 @@ metadata:
   name: qa
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
diff --git a/examples/spark-connect-server-with-spark-cluster.yaml 
b/examples/spark-connect-server-with-spark-cluster.yaml
index d7b41cb..d1bb648 100644
--- a/examples/spark-connect-server-with-spark-cluster.yaml
+++ b/examples/spark-connect-server-with-spark-cluster.yaml
@@ -27,4 +27,4 @@ spec:
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.ui.reverseProxy: "true"
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/spark-connect-server.yaml 
b/examples/spark-connect-server.yaml
index 5ab877d..aa64c12 100644
--- a/examples/spark-connect-server.yaml
+++ b/examples/spark-connect-server.yaml
@@ -31,4 +31,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/spark-history-server.yaml 
b/examples/spark-history-server.yaml
index f56e28b..b3df1ab 100644
--- a/examples/spark-history-server.yaml
+++ b/examples/spark-history-server.yaml
@@ -37,7 +37,7 @@ spec:
     spark.hadoop.fs.s3a.secret.key: "test"
     spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   applicationTolerations:
     restartConfig:
       restartPolicy: Always
diff --git a/examples/spark-thrift-server.yaml 
b/examples/spark-thrift-server.yaml
index 5844c80..c3e3a1f 100644
--- a/examples/spark-thrift-server.yaml
+++ b/examples/spark-thrift-server.yaml
@@ -29,7 +29,7 @@ spec:
     spark.kubernetes.executor.podNamePrefix: "spark-thrift-server"
     spark.scheduler.mode: "FAIR"
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   applicationTolerations:
     restartConfig:
       restartPolicy: Always
diff --git a/examples/sql.yaml b/examples/sql.yaml
index 0fc1e93..e7e4183 100644
--- a/examples/sql.yaml
+++ b/examples/sql.yaml
@@ -27,4 +27,4 @@ spec:
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/stream-word-count.yaml b/examples/stream-word-count.yaml
index 6b2821a..f079237 100644
--- a/examples/stream-word-count.yaml
+++ b/examples/stream-word-count.yaml
@@ -37,4 +37,4 @@ spec:
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git a/examples/word-count.yaml b/examples/word-count.yaml
index 8496d79..b3af71a 100644
--- a/examples/word-count.yaml
+++ b/examples/word-count.yaml
@@ -29,4 +29,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
diff --git 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
index 012c1e0..8962aaf 100644
--- 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
+++ 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
@@ -68,7 +68,7 @@ public class SparkClusterResourceSpec {
     String clusterName = cluster.getMetadata().getName();
     String scheduler = conf.get(Config.KUBERNETES_SCHEDULER_NAME().key(), 
"default-scheduler");
     String namespace = conf.get(Config.KUBERNETES_NAMESPACE().key(), 
clusterNamespace);
-    String image = conf.get(Config.CONTAINER_IMAGE().key(), 
"apache/spark:4.1.0");
+    String image = conf.get(Config.CONTAINER_IMAGE().key(), 
"apache/spark:4.1.1");
     ClusterSpec spec = cluster.getSpec();
     String version = spec.getRuntimeVersions().getSparkVersion();
     StringBuilder options = new StringBuilder();
diff --git a/tests/benchmark/sparkapps.sh b/tests/benchmark/sparkapps.sh
index d72b89d..c15dda9 100755
--- a/tests/benchmark/sparkapps.sh
+++ b/tests/benchmark/sparkapps.sh
@@ -46,7 +46,7 @@ spec:
     spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     spark.kubernetes.driver.request.cores: "100m"
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
 ---
 EOF
 done
diff --git 
a/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml 
b/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
index 8a83f7a..be50659 100644
--- a/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
+++ b/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
@@ -32,4 +32,4 @@ spec:
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
   runtimeVersions:
-    sparkVersion: 4.1.0
+    sparkVersion: "4.1.1"
diff --git a/tests/e2e/resource-selector/chainsaw-test.yaml 
b/tests/e2e/resource-selector/chainsaw-test.yaml
index fed366f..902d1b7 100644
--- a/tests/e2e/resource-selector/chainsaw-test.yaml
+++ b/tests/e2e/resource-selector/chainsaw-test.yaml
@@ -27,9 +27,9 @@ spec:
         - name: SPARK_APPLICATION_NAME
           value: "spark-example-resource-selector"
         - name: SPARK_VERSION
-          value: "4.1.0"
+          value: "4.1.1"
         - name: IMAGE
-          value: "apache/spark:4.1.0-scala"
+          value: "apache/spark:4.1.1-scala"
   steps:
     - name: install-spark-application-and-apply-label
       try:
diff --git a/tests/e2e/spark-versions/chainsaw-test.yaml 
b/tests/e2e/spark-versions/chainsaw-test.yaml
index ac29035..0bbd6d2 100644
--- a/tests/e2e/spark-versions/chainsaw-test.yaml
+++ b/tests/e2e/spark-versions/chainsaw-test.yaml
@@ -23,13 +23,13 @@ spec:
   scenarios:
   - bindings:
       - name: "SPARK_VERSION"
-        value: "4.1.0"
+        value: "4.1.1"
       - name: "SCALA_VERSION"
         value: "2.13"
       - name: "JAVA_VERSION"
         value: "17"
       - name: "IMAGE"
-        value: "apache/spark:4.1.0-scala-java17"
+        value: "apache/spark:4.1.1-scala-java17"
   - bindings:
       - name: "SPARK_VERSION"
         value: "3.5.7"
@@ -41,13 +41,13 @@ spec:
         value: 'apache/spark:3.5.7-scala2.12-java17-ubuntu'
   - bindings:
       - name: "SPARK_VERSION"
-        value: "4.1.0"
+        value: "4.1.1"
       - name: "SCALA_VERSION"
         value: "2.13"
       - name: "JAVA_VERSION"
         value: "21"
       - name: "IMAGE"
-        value: 'apache/spark:4.1.0-scala'
+        value: 'apache/spark:4.1.1-scala'
   steps:
     - name: install-spark-application
       try:
diff --git a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml 
b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
index 07a6eed..6541bf4 100644
--- a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
@@ -19,7 +19,7 @@ metadata:
   namespace: default
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0"
+    sparkVersion: "4.1.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
diff --git a/tests/e2e/state-transition/spark-example-succeeded.yaml 
b/tests/e2e/state-transition/spark-example-succeeded.yaml
index 531bc8d..5eec269 100644
--- a/tests/e2e/state-transition/spark-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-example-succeeded.yaml
@@ -28,4 +28,4 @@ spec:
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
   runtimeVersions:
-    sparkVersion: 4.1.0
+    sparkVersion: "4.1.1"
diff --git a/tests/e2e/watched-namespaces/spark-example.yaml 
b/tests/e2e/watched-namespaces/spark-example.yaml
index 49e02ac..13b071c 100644
--- a/tests/e2e/watched-namespaces/spark-example.yaml
+++ b/tests/e2e/watched-namespaces/spark-example.yaml
@@ -28,4 +28,4 @@ spec:
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
   runtimeVersions:
-    sparkVersion: 4.1.0
+    sparkVersion: "4.1.1"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to