This is an automated email from the ASF dual-hosted git repository. wenchen pushed a commit to branch branch-3.0 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push: new 49c07b7 fix merge mistakes 49c07b7 is described below commit 49c07b7335a89533bc25d6ef45f7877b43b6a98d Author: Wenchen Fan <wenc...@databricks.com> AuthorDate: Thu Mar 5 20:26:14 2020 +0800 fix merge mistakes --- sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | 1 - .../apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index cc9c2ae..3dbfc65 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -2112,7 +2112,6 @@ object SQLConf { "MapFromEntries, StringToMap, MapConcat and TransformKeys. When EXCEPTION, the query " + "fails if duplicated map keys are detected. When LAST_WIN, the map key that is inserted " + "at last takes precedence.") - .version("3.0.0") .stringConf .transform(_.toUpperCase(Locale.ROOT)) .checkValues(MapKeyDedupPolicy.values.map(_.toString)) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala index e5f1fa6..500b6cc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala @@ -609,7 +609,7 @@ class AdaptiveQueryExecSuite withSQLConf( SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true", SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1", - SQLConf.SHUFFLE_TARGET_POSTSHUFFLE_INPUT_SIZEDateFormatter.key -> "2000") { + SQLConf.SHUFFLE_TARGET_POSTSHUFFLE_INPUT_SIZE.key -> "2000") { withTempView("skewData1", "skewData2") { spark .range(0, 1000, 1, 10) --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org