[spark] branch master updated: [SPARK-41373][SQL][ERROR] Rename CAST_WITH_FUN_SUGGESTION to CAST_WITH_FUNC_SUGGESTION
This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git The following commit(s) were added to refs/heads/master by this push: new 811921be3ba [SPARK-41373][SQL][ERROR] Rename CAST_WITH_FUN_SUGGESTION to CAST_WITH_FUNC_SUGGESTION 811921be3ba is described below commit 811921be3bacb2edb1d382257561429a0a604adb Author: Rui Wang AuthorDate: Sun Dec 4 00:44:11 2022 +0300 [SPARK-41373][SQL][ERROR] Rename CAST_WITH_FUN_SUGGESTION to CAST_WITH_FUNC_SUGGESTION ### What changes were proposed in this pull request? Rename CAST_WITH_FUN_SUGGESTION to CAST_WITH_FUNC_SUGGESTION. This is just `_FUN_SUGGESTION` could has other meaning. `CAST_WITH_FUNC_SUGGESTION` is more clear. I didn't choose to rename this it `CAST_WITH_SUGGESTION` because there is a `CAST_WITH_CONF_SUGGESTION` so we need to differentiate. ### Why are the changes needed? Better error message name. ### Does this PR introduce _any_ user-facing change? NO ### How was this patch tested? Existing UT. Closes #38892 from amaliujia/improve_error_message. Authored-by: Rui Wang Signed-off-by: Max Gekk --- core/src/main/resources/error/error-classes.json | 2 +- .../org/apache/spark/sql/catalyst/expressions/Cast.scala | 2 +- .../spark/sql/catalyst/expressions/CastSuiteBase.scala | 12 ++-- .../spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 347b9a14862..7d5c272a77f 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -197,7 +197,7 @@ "If you have to cast to , you can set as ." ] }, - "CAST_WITH_FUN_SUGGESTION" : { + "CAST_WITH_FUNC_SUGGESTION" : { "message" : [ "cannot cast to .", "To convert values from to , you can use the functions instead." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index a302298d99c..23152adc0ca 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -419,7 +419,7 @@ object Cast extends QueryErrorsBase { fallbackConf: Option[(String, String)]): DataTypeMismatch = { def withFunSuggest(names: String*): DataTypeMismatch = { DataTypeMismatch( -errorSubClass = "CAST_WITH_FUN_SUGGESTION", +errorSubClass = "CAST_WITH_FUNC_SUGGESTION", messageParameters = Map( "srcType" -> toSQLType(from), "targetType" -> toSQLType(to), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala index 6d972a8482a..68b3d5c8446 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala @@ -545,7 +545,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { protected def checkInvalidCastFromNumericType(to: DataType): Unit = { cast(1.toByte, to).checkInputDataTypes() == DataTypeMismatch( -errorSubClass = "CAST_WITH_FUN_SUGGESTION", +errorSubClass = "CAST_WITH_FUNC_SUGGESTION", messageParameters = Map( "srcType" -> toSQLType(Literal(1.toByte).dataType), "targetType" -> toSQLType(to), @@ -554,7 +554,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { ) cast(1.toShort, to).checkInputDataTypes() == DataTypeMismatch( -errorSubClass = "CAST_WITH_FUN_SUGGESTION", +errorSubClass = "CAST_WITH_FUNC_SUGGESTION", messageParameters = Map( "srcType" -> toSQLType(Literal(1.toShort).dataType), "targetType" -> toSQLType(to), @@ -563,7 +563,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { ) cast(1, to).checkInputDataTypes() == DataTypeMismatch( -errorSubClass = "CAST_WITH_FUN_SUGGESTION", +errorSubClass = "CAST_WITH_FUNC_SUGGESTION", messageParameters = Map( "srcType" -> toSQLType(Literal(1).dataType), "targetType" -> toSQLType(to), @@ -572,7 +572,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { ) cast(1L, to).checkInputDataTypes() == DataTypeMismatch( -
[spark] branch branch-3.3 updated: [SPARK-41253][K8S][TESTS] Make Spark K8S volcano IT work in Github Action
This is an automated email from the ASF dual-hosted git repository. yikun pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/spark.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new 821997bec37 [SPARK-41253][K8S][TESTS] Make Spark K8S volcano IT work in Github Action 821997bec37 is described below commit 821997bec3703ec52db9b1deb667e11e76296c48 Author: Yikun Jiang AuthorDate: Fri Dec 2 22:44:50 2022 -0800 [SPARK-41253][K8S][TESTS] Make Spark K8S volcano IT work in Github Action ### What changes were proposed in this pull request? This patch makes Spark K8s volcano IT can be ran in Github Action resource limited env. It will help downstream community like volcano to enable spark IT test in github action. BTW, there is no plan to enable volcano test in Spark community, this patch only make test work but **DO NOT** enable the volcano test in Apache Spark GA, it will help downstream test. - Change parallel job number from 4 to 2. (Only 1 job in each queue) if in github action env. - Get specified `spark.kubernetes.[driver|executor].request.cores` - Set queue limit according specified [driver|executor].request.cores just like we done in normal test: https://github.com/apache/spark/commit/883a481e44a1f91ef3fc3aea2838a598cbd6cf0f ### Why are the changes needed? It helps downstream communitys who want to use free github action hosted resources to enable spark IT test in github action. ### Does this PR introduce _any_ user-facing change? No, test only. ### How was this patch tested? - Test on my local env with enough resource (default): ``` $ build/sbt -Pvolcano -Pkubernetes -Pkubernetes-integration-tests -Dtest.include.tags=volcano "kubernetes-integration-tests/test" [info] KubernetesSuite: [info] VolcanoSuite: [info] - Run SparkPi with volcano scheduler (10 seconds, 410 milliseconds) [info] - SPARK-38187: Run SparkPi Jobs with minCPU (25 seconds, 489 milliseconds) [info] - SPARK-38187: Run SparkPi Jobs with minMemory (25 seconds, 518 milliseconds) [info] - SPARK-38188: Run SparkPi jobs with 2 queues (only 1 enabled) (14 seconds, 349 milliseconds) [info] - SPARK-38188: Run SparkPi jobs with 2 queues (all enabled) (23 seconds, 516 milliseconds) [info] - SPARK-38423: Run driver job to validate priority order (16 seconds, 404 milliseconds) [info] YuniKornSuite: [info] Run completed in 2 minutes, 34 seconds. [info] Total number of tests run: 6 [info] Suites: completed 3, aborted 0 [info] Tests: succeeded 6, failed 0, canceled 0, ignored 0, pending 0 [info] All tests passed. [success] Total time: 439 s (07:19), completed 2022-12-3 8:58:50 ``` - Test on Github Action with `volcanoMaxConcurrencyJobNum`: https://github.com/Yikun/spark/pull/192 ``` $ build/sbt -Pvolcano -Psparkr -Pkubernetes -Pkubernetes-integration-tests -Dspark.kubernetes.test.driverRequestCores=0.5 -Dspark.kubernetes.test.executorRequestCores=0.2 -Dspark.kubernetes.test.volcanoMaxConcurrencyJobNum=1 -Dtest.include.tags=volcano "kubernetes-integration-tests/test" [info] VolcanoSuite: [info] - Run SparkPi with volcano scheduler (18 seconds, 122 milliseconds) [info] - SPARK-38187: Run SparkPi Jobs with minCPU (53 seconds, 964 milliseconds) [info] - SPARK-38187: Run SparkPi Jobs with minMemory (54 seconds, 523 milliseconds) [info] - SPARK-38188: Run SparkPi jobs with 2 queues (only 1 enabled) (22 seconds, 185 milliseconds) [info] - SPARK-38188: Run SparkPi jobs with 2 queues (all enabled) (33 seconds, 349 milliseconds) [info] - SPARK-38423: Run driver job to validate priority order (32 seconds, 435 milliseconds) [info] YuniKornSuite: [info] Run completed in 4 minutes, 16 seconds. [info] Total number of tests run: 6 [info] Suites: completed 3, aborted 0 [info] Tests: succeeded 6, failed 0, canceled 0, ignored 0, pending 0 [info] All tests passed. [warn] In the last 494 seconds, 7.296 (1.5%) were spent in GC. [Heap: 3.12GB free of 3.83GB, max 3.83GB] Consider increasing the JVM heap using `-Xmx` or try a different collector, e.g. `-XX:+UseG1GC`, for better performance. [success] Total time: 924 s (15:24), completed Dec 3, 2022 12:49:42 AM ``` - CI passed Closes #38789 from Yikun/SPARK-41253. Authored-by: Yikun Jiang Signed-off-by: Dongjoon Hyun (cherry picked from commit 72d58d5f8a847bac53cf01b137780c7e4e2664d7) Signed-off-by: Yikun Jiang --- .../kubernetes/integration-tests/README.md | 8 .../volcano/driver-podgroup-template-cpu-2u.yml| 23 -- .../deploy/k8s/integrationtest/TestConstants.scala | 2 + .../k8s/integrationtest/VolcanoTestsSuite.scala| 52 +- 4 files changed, 51 insertions(+), 34 deletions(-) diff
[spark] branch branch-3.3 updated: [SPARK-38921][K8S][TESTS] Use k8s-client to create queue resource in Volcano IT
This is an automated email from the ASF dual-hosted git repository. yikun pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/spark.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new 20cc2b6104e [SPARK-38921][K8S][TESTS] Use k8s-client to create queue resource in Volcano IT 20cc2b6104e is described below commit 20cc2b6104e1670be3295ed52be54bb40de1b1ce Author: Yikun Jiang AuthorDate: Thu Aug 11 08:28:57 2022 -0700 [SPARK-38921][K8S][TESTS] Use k8s-client to create queue resource in Volcano IT ### What changes were proposed in this pull request? Use fabric8io/k8s-client to create queue resource in Volcano IT. ### Why are the changes needed? Use k8s-client to create volcano queue to - Make code easy to understand - Enable abity to set queue capacity dynamically. This will help to support running Volcano test in a resource limited env (such as github action). ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Volcano IT passed Closes #36219 from Yikun/SPARK-38921. Authored-by: Yikun Jiang Signed-off-by: Dongjoon Hyun (cherry picked from commit a49f66fe49d4d4bbfb41da2e5bbb5af4bd64d1da) Signed-off-by: Yikun Jiang --- .../src/test/resources/volcano/disable-queue.yml | 24 --- .../volcano/disable-queue0-enable-queue1.yml | 31 - .../volcano/driver-podgroup-template-cpu-2u.yml| 2 +- .../volcano/driver-podgroup-template-memory-3g.yml | 2 +- .../src/test/resources/volcano/enable-queue.yml| 24 --- .../volcano/enable-queue0-enable-queue1.yml| 29 - .../src/test/resources/volcano/queue-2u-3g.yml | 25 .../k8s/integrationtest/VolcanoTestsSuite.scala| 74 +++--- 8 files changed, 52 insertions(+), 159 deletions(-) diff --git a/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/disable-queue.yml b/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/disable-queue.yml deleted file mode 100644 index d9f8c36471e..000 --- a/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/disable-queue.yml +++ /dev/null @@ -1,24 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -#http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -apiVersion: scheduling.volcano.sh/v1beta1 -kind: Queue -metadata: - name: queue -spec: - weight: 1 - capability: -cpu: "0.001" diff --git a/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/disable-queue0-enable-queue1.yml b/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/disable-queue0-enable-queue1.yml deleted file mode 100644 index 82e479478cc..000 --- a/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/disable-queue0-enable-queue1.yml +++ /dev/null @@ -1,31 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -#http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -apiVersion: scheduling.volcano.sh/v1beta1 -kind: Queue -metadata: - name: queue0 -spec: - weight: 1 - capability: -cpu: "0.001" -apiVersion: scheduling.volcano.sh/v1beta1 -kind: Queue -metadata: - name: queue1 -spec: - weight: 1 diff --git a/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/driver-podgroup-template-cpu-2u.yml b/resource-managers/kubernetes/integration-tests/src/test/resources/volcano/driver-podgroup-template-cpu-2u.yml index e6d53ddc8b5..4a784f0f864